diff --git a/gam.py b/gam.py index da77472f..8a389e40 100644 --- a/gam.py +++ b/gam.py @@ -1044,6 +1044,10 @@ def doDelegates(users): if delete_alias: doDeleteAlias(alias_email=use_delegate_address) +def gen_sha512_hash(password): + from passlib.hash import sha512_crypt + return sha512_crypt(password) + def getDelegates(users): emailsettings = getEmailSettingsObject() csv_format = False @@ -3500,10 +3504,10 @@ def doCreateUser(): if need_password: body[u'password'] = u''.join(random.sample(u'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789~`!@#$%^&*()-=_+:;"\'{}[]\\|', 25)) if need_to_hash_password: - newhash = hashlib.sha1() - newhash.update(body[u'password']) - body[u'password'] = newhash.hexdigest() - body[u'hashFunction'] = u'SHA-1' + #newhash = hashlib.sha1() + #newhash.update(body[u'password']) + body[u'password'] = gen_sha512_hash(body[u'password']) + body[u'hashFunction'] = u'crypt' print u"Creating account for %s" % body[u'primaryEmail'] callGAPI(service=cd.users(), function='insert', body=body) if do_admin: @@ -3956,10 +3960,10 @@ def doUpdateUser(users): print u'Error: didn\'t expect %s command at position %s' % (sys.argv[i], i) sys.exit(2) if gotPassword and not (isSHA1 or isMD5 or isCrypt or nohash): - newhash = hashlib.sha1() - newhash.update(body[u'password']) - body[u'password'] = newhash.hexdigest() - body[u'hashFunction'] = u'SHA-1' + #newhash = hashlib.sha1() + #newhash.update(body[u'password']) + body[u'password'] = gen_sha512_hash(body[u'password']) + body[u'hashFunction'] = u'crypt' for user in users: if user[:4].lower() == u'uid:': user = user[4:] diff --git a/passlib/__init__.py b/passlib/__init__.py new file mode 100644 index 00000000..aeea1fc2 --- /dev/null +++ b/passlib/__init__.py @@ -0,0 +1,3 @@ +"""passlib - suite of password hashing & generation routinges""" + +__version__ = '1.6.2' diff --git a/passlib/_setup/__init__.py b/passlib/_setup/__init__.py new file mode 100644 index 00000000..38819437 --- /dev/null +++ b/passlib/_setup/__init__.py @@ -0,0 +1 @@ +"""passlib.setup - helpers used by passlib's setup.py script""" diff --git a/passlib/_setup/docdist.py b/passlib/_setup/docdist.py new file mode 100644 index 00000000..dadb4b53 --- /dev/null +++ b/passlib/_setup/docdist.py @@ -0,0 +1,87 @@ +"custom command to build doc.zip file" +#============================================================================= +# imports +#============================================================================= +# core +import os +from distutils import dir_util +from distutils.cmd import Command +from distutils.errors import * +from distutils.spawn import spawn +# local +__all__ = [ + "docdist" +] +#============================================================================= +# command +#============================================================================= +class docdist(Command): + + description = "create zip file containing standalone html docs" + + user_options = [ + ('build-dir=', None, 'Build directory'), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ('format=', 'f', + "archive format to create (tar, ztar, gztar, zip)"), + ('sign', 's', 'sign files using gpg'), + ('identity=', 'i', 'GPG identity used to sign files'), + ] + + def initialize_options(self): + self.build_dir = None + self.dist_dir = None + self.format = None + self.keep_temp = False + self.sign = False + self.identity = None + + def finalize_options(self): + if self.identity and not self.sign: + raise DistutilsOptionError( + "Must use --sign for --identity to have meaning" + ) + if self.build_dir is None: + cmd = self.get_finalized_command('build') + self.build_dir = os.path.join(cmd.build_base, 'docdist') + if not self.dist_dir: + self.dist_dir = "dist" + if not self.format: + self.format = "zip" + + def run(self): + # call build sphinx to build docs + self.run_command("build_sphinx") + cmd = self.get_finalized_command("build_sphinx") + source_dir = cmd.builder_target_dir + + # copy to directory with appropriate name + dist = self.distribution + arc_name = "%s-docs-%s" % (dist.get_name(), dist.get_version()) + tmp_dir = os.path.join(self.build_dir, arc_name) + if os.path.exists(tmp_dir): + dir_util.remove_tree(tmp_dir, dry_run=self.dry_run) + self.copy_tree(source_dir, tmp_dir, preserve_symlinks=True) + + # make archive from dir + arc_base = os.path.join(self.dist_dir, arc_name) + self.arc_filename = self.make_archive(arc_base, self.format, + self.build_dir) + + # Sign if requested + if self.sign: + gpg_args = ["gpg", "--detach-sign", "-a", self.arc_filename] + if self.identity: + gpg_args[2:2] = ["--local-user", self.identity] + spawn(gpg_args, + dry_run=self.dry_run) + + # cleanup + if not self.keep_temp: + dir_util.remove_tree(tmp_dir, dry_run=self.dry_run) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/_setup/stamp.py b/passlib/_setup/stamp.py new file mode 100644 index 00000000..dfa62bc4 --- /dev/null +++ b/passlib/_setup/stamp.py @@ -0,0 +1,57 @@ +"update version string during build" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import os +import re +import time +from distutils.dist import Distribution +# pkg +# local +__all__ = [ + "stamp_source", + "stamp_distutils_output", +] +#============================================================================= +# helpers +#============================================================================= +def get_command_class(opts, name): + return opts['cmdclass'].get(name) or Distribution().get_command_class(name) + +def stamp_source(base_dir, version, dry_run=False): + "update version string in passlib dist" + path = os.path.join(base_dir, "passlib", "__init__.py") + with open(path) as fh: + input = fh.read() + output, count = re.subn('(?m)^__version__\s*=.*$', + '__version__ = ' + repr(version), + input) + assert count == 1, "failed to replace version string" + if not dry_run: + os.unlink(path) # sdist likes to use hardlinks + with open(path, "w") as fh: + fh.write(output) + +def stamp_distutils_output(opts, version): + + # subclass buildpy to update version string in source + _build_py = get_command_class(opts, "build_py") + class build_py(_build_py): + def build_packages(self): + _build_py.build_packages(self) + stamp_source(self.build_lib, version, self.dry_run) + opts['cmdclass']['build_py'] = build_py + + # subclass sdist to do same thing + _sdist = get_command_class(opts, "sdist") + class sdist(_sdist): + def make_release_tree(self, base_dir, files): + _sdist.make_release_tree(self, base_dir, files) + stamp_source(base_dir, version, self.dry_run) + opts['cmdclass']['sdist'] = sdist + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/apache.py b/passlib/apache.py new file mode 100644 index 00000000..8497ca2e --- /dev/null +++ b/passlib/apache.py @@ -0,0 +1,1037 @@ +"""passlib.apache - apache password support""" +# XXX: relocate this to passlib.ext.apache? +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +import os +import sys +from warnings import warn +# site +# pkg +from passlib.context import CryptContext +from passlib.exc import ExpectedStringError +from passlib.hash import htdigest +from passlib.utils import consteq, render_bytes, to_bytes, deprecated_method, is_ascii_codec +from passlib.utils.compat import b, bytes, join_bytes, str_to_bascii, u, \ + unicode, BytesIO, iteritems, imap, PY3 +# local +__all__ = [ + 'HtpasswdFile', + 'HtdigestFile', +] + +#============================================================================= +# constants & support +#============================================================================= +_UNSET = object() + +_BCOLON = b(":") + +# byte values that aren't allowed in fields. +_INVALID_FIELD_CHARS = b(":\n\r\t\x00") + +#============================================================================= +# backport of OrderedDict for PY2.5 +#============================================================================= +try: + from collections import OrderedDict +except ImportError: + # Python 2.5 + class OrderedDict(dict): + """hacked OrderedDict replacement. + + NOTE: this doesn't provide a full OrderedDict implementation, + just the minimum needed by the Htpasswd internals. + """ + def __init__(self): + self._keys = [] + + def __iter__(self): + return iter(self._keys) + + def __setitem__(self, key, value): + if key not in self: + self._keys.append(key) + super(OrderedDict, self).__setitem__(key, value) + + def __delitem__(self, key): + super(OrderedDict, self).__delitem__(key) + self._keys.remove(key) + + def iteritems(self): + return ((key, self[key]) for key in self) + + # these aren't used or implemented, so disabling them for safety. + update = pop = popitem = clear = keys = iterkeys = None + +#============================================================================= +# common helpers +#============================================================================= +class _CommonFile(object): + """common framework for HtpasswdFile & HtdigestFile""" + #=================================================================== + # instance attrs + #=================================================================== + + # charset encoding used by file (defaults to utf-8) + encoding = None + + # whether users() and other public methods should return unicode or bytes? + # (defaults to False under PY2, True under PY3) + return_unicode = None + + # if bound to local file, these will be set. + _path = None # local file path + _mtime = None # mtime when last loaded, or 0 + + # if true, automatically save to local file after changes are made. + autosave = False + + # ordered dict mapping key -> value for all records in database. + # (e.g. user => hash for Htpasswd) + _records = None + + #=================================================================== + # alt constuctors + #=================================================================== + @classmethod + def from_string(cls, data, **kwds): + """create new object from raw string. + + :type data: unicode or bytes + :arg data: + database to load, as single string. + + :param \*\*kwds: + all other keywords are the same as in the class constructor + """ + if 'path' in kwds: + raise TypeError("'path' not accepted by from_string()") + self = cls(**kwds) + self.load_string(data) + return self + + @classmethod + def from_path(cls, path, **kwds): + """create new object from file, without binding object to file. + + :type path: str + :arg path: + local filepath to load from + + :param \*\*kwds: + all other keywords are the same as in the class constructor + """ + self = cls(**kwds) + self.load(path) + return self + + #=================================================================== + # init + #=================================================================== + def __init__(self, path=None, new=False, autoload=True, autosave=False, + encoding="utf-8", return_unicode=PY3, + ): + # set encoding + if not encoding: + warn("``encoding=None`` is deprecated as of Passlib 1.6, " + "and will cause a ValueError in Passlib 1.8, " + "use ``return_unicode=False`` instead.", + DeprecationWarning, stacklevel=2) + encoding = "utf-8" + return_unicode = False + elif not is_ascii_codec(encoding): + # htpasswd/htdigest files assumes 1-byte chars, and use ":" separator, + # so only ascii-compatible encodings are allowed. + raise ValueError("encoding must be 7-bit ascii compatible") + self.encoding = encoding + + # set other attrs + self.return_unicode = return_unicode + self.autosave = autosave + self._path = path + self._mtime = 0 + + # init db + if not autoload: + warn("``autoload=False`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, use ``new=True`` instead", + DeprecationWarning, stacklevel=2) + new = True + if path and not new: + self.load() + else: + self._records = OrderedDict() + + def __repr__(self): + tail = '' + if self.autosave: + tail += ' autosave=True' + if self._path: + tail += ' path=%r' % self._path + if self.encoding != "utf-8": + tail += ' encoding=%r' % self.encoding + return "<%s 0x%0x%s>" % (self.__class__.__name__, id(self), tail) + + # NOTE: ``path`` is a property so that ``_mtime`` is wiped when it's set. + def _get_path(self): + return self._path + def _set_path(self, value): + if value != self._path: + self._mtime = 0 + self._path = value + path = property(_get_path, _set_path) + + @property + def mtime(self): + "modify time when last loaded (if bound to a local file)" + return self._mtime + + #=================================================================== + # loading + #=================================================================== + def load_if_changed(self): + """Reload from ``self.path`` only if file has changed since last load""" + if not self._path: + raise RuntimeError("%r is not bound to a local file" % self) + if self._mtime and self._mtime == os.path.getmtime(self._path): + return False + self.load() + return True + + def load(self, path=None, force=True): + """Load state from local file. + If no path is specified, attempts to load from ``self.path``. + + :type path: str + :arg path: local file to load from + + :type force: bool + :param force: + if ``force=False``, only load from ``self.path`` if file + has changed since last load. + + .. deprecated:: 1.6 + This keyword will be removed in Passlib 1.8; + Applications should use :meth:`load_if_changed` instead. + """ + if path is not None: + with open(path, "rb") as fh: + self._mtime = 0 + self._load_lines(fh) + elif not force: + warn("%(name)s.load(force=False) is deprecated as of Passlib 1.6," + "and will be removed in Passlib 1.8; " + "use %(name)s.load_if_changed() instead." % + dict(name=self.__class__.__name__), + DeprecationWarning, stacklevel=2) + return self.load_if_changed() + elif self._path: + with open(self._path, "rb") as fh: + self._mtime = os.path.getmtime(self._path) + self._load_lines(fh) + else: + raise RuntimeError("%s().path is not set, an explicit path is required" % + self.__class__.__name__) + return True + + def load_string(self, data): + "Load state from unicode or bytes string, replacing current state" + data = to_bytes(data, self.encoding, "data") + self._mtime = 0 + self._load_lines(BytesIO(data)) + + def _load_lines(self, lines): + "load from sequence of lists" + # XXX: found reference that "#" comment lines may be supported by + # htpasswd, should verify this, and figure out how to handle them. + # if true, this would also affect what can be stored in user field. + # XXX: if multiple entries for a key, should we use the first one + # or the last one? going w/ first entry for now. + # XXX: how should this behave if parsing fails? currently + # it will contain everything that was loaded up to error. + # could clear / restore old state instead. + parse = self._parse_record + records = self._records = OrderedDict() + for idx, line in enumerate(lines): + key, value = parse(line, idx+1) + if key not in records: + records[key] = value + + def _parse_record(cls, record, lineno): # pragma: no cover - abstract method + "parse line of file into (key, value) pair" + raise NotImplementedError("should be implemented in subclass") + + #=================================================================== + # saving + #=================================================================== + def _autosave(self): + "subclass helper to call save() after any changes" + if self.autosave and self._path: + self.save() + + def save(self, path=None): + """Save current state to file. + If no path is specified, attempts to save to ``self.path``. + """ + if path is not None: + with open(path, "wb") as fh: + fh.writelines(self._iter_lines()) + elif self._path: + self.save(self._path) + self._mtime = os.path.getmtime(self._path) + else: + raise RuntimeError("%s().path is not set, cannot autosave" % + self.__class__.__name__) + + def to_string(self): + "Export current state as a string of bytes" + return join_bytes(self._iter_lines()) + + def _iter_lines(self): + "iterator yielding lines of database" + return (self._render_record(key,value) for key,value in iteritems(self._records)) + + def _render_record(cls, key, value): # pragma: no cover - abstract method + "given key/value pair, encode as line of file" + raise NotImplementedError("should be implemented in subclass") + + #=================================================================== + # field encoding + #=================================================================== + def _encode_user(self, user): + "user-specific wrapper for _encode_field()" + return self._encode_field(user, "user") + + def _encode_realm(self, realm): # pragma: no cover - abstract method + "realm-specific wrapper for _encode_field()" + return self._encode_field(realm, "realm") + + def _encode_field(self, value, param="field"): + """convert field to internal representation. + + internal representation is always bytes. byte strings are left as-is, + unicode strings encoding using file's default encoding (or ``utf-8`` + if no encoding has been specified). + + :raises UnicodeEncodeError: + if unicode value cannot be encoded using default encoding. + + :raises ValueError: + if resulting byte string contains a forbidden character, + or is too long (>255 bytes). + + :returns: + encoded identifer as bytes + """ + if isinstance(value, unicode): + value = value.encode(self.encoding) + elif not isinstance(value, bytes): + raise ExpectedStringError(value, param) + if len(value) > 255: + raise ValueError("%s must be at most 255 characters: %r" % + (param, value)) + if any(c in _INVALID_FIELD_CHARS for c in value): + raise ValueError("%s contains invalid characters: %r" % + (param, value,)) + return value + + def _decode_field(self, value): + """decode field from internal representation to format + returns by users() method, etc. + + :raises UnicodeDecodeError: + if unicode value cannot be decoded using default encoding. + (usually indicates wrong encoding set for file). + + :returns: + field as unicode or bytes, as appropriate. + """ + assert isinstance(value, bytes), "expected value to be bytes" + if self.return_unicode: + return value.decode(self.encoding) + else: + return value + + # FIXME: htpasswd doc says passwords limited to 255 chars under Windows & MPE, + # and that longer ones are truncated. this may be side-effect of those + # platforms supporting the 'plaintext' scheme. these classes don't currently + # check for this. + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htpasswd editing +#============================================================================= + +# FIXME: apr_md5_crypt technically the default only for windows, netware and tpf. +# TODO: find out if htpasswd's "crypt" mode is a crypt() *call* or just des_crypt implementation. +# if the former, we can support anything supported by passlib.hosts.host_context, +# allowing more secure hashes than apr_md5_crypt to be used. +# could perhaps add this behavior as an option to the constructor. +# c.f. http://httpd.apache.org/docs/2.2/programs/htpasswd.html +htpasswd_context = CryptContext([ + "apr_md5_crypt", # man page notes supported everywhere, default on Windows, Netware, TPF + "des_crypt", # man page notes server does NOT support this on Windows, Netware, TPF + "ldap_sha1", # man page notes only for transitioning <-> ldap + "plaintext" # man page notes server ONLY supports this on Windows, Netware, TPF + ]) + +class HtpasswdFile(_CommonFile): + """class for reading & writing Htpasswd files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htpasswd file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtpasswdFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtpasswdFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :type default_scheme: str + :param default_scheme: + Optionally specify default scheme to use when encoding new passwords. + Must be one of ``"apr_md5_crypt"``, ``"des_crypt"``, ``"ldap_sha1"``, + ``"plaintext"``. It defaults to ``"apr_md5_crypt"``. + + .. versionadded:: 1.6 + This keyword was previously named ``default``. That alias + has been deprecated, and will be removed in Passlib 1.8. + + :type context: :class:`~passlib.context.CryptContext` + :param context: + :class:`!CryptContext` instance used to encrypt + and verify the hashes found in the htpasswd file. + The default value is a pre-built context which supports all + of the hashes officially allowed in an htpasswd file. + + This is also exposed as a readonly instance attribute. + + .. warning:: + + This option may be used to add support for non-standard hash + formats to an htpasswd file. However, the resulting file + will probably not be usuable by another application, + and particularly not by Apache. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + :param default: + Change the default algorithm used to encrypt new passwords. + + .. deprecated:: 1.6 + This has been renamed to *default_scheme* for clarity. + Support for this alias will be removed in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ================ + .. automethod:: users + .. automethod:: check_password + .. automethod:: get_hash + + Modification + ================ + .. automethod:: set_password + .. automethod:: delete + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores for the key, and for the value, + # both in bytes which use self.encoding + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_scheme=None, context=htpasswd_context, + **kwds): + if 'default' in kwds: + warn("``default`` is deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8, it has been renamed " + "to ``default_scheem``.", + DeprecationWarning, stacklevel=2) + default_scheme = kwds.pop("default") + if default_scheme: + context = context.copy(default=default_scheme) + self.context = context + super(HtpasswdFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + # NOTE: should return (user, hash) tuple + result = record.rstrip().split(_BCOLON) + if len(result) != 2: + raise ValueError("malformed htpasswd file (error reading line %d)" + % lineno) + return result + + def _render_record(self, user, hash): + return render_bytes("%s:%s\n", user, hash) + + #=================================================================== + # public methods + #=================================================================== + + def users(self): + "Return list of all users in database" + return [self._decode_field(user) for user in self._records] + + ##def has_user(self, user): + ## "check whether entry is present for user" + ## return self._encode_user(user) in self._records + + ##def rename(self, old, new): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## hash = self._records.pop(old) + ## self._records[new] = hash + ## self._autosave() + + def set_password(self, user, password): + """Set password for user; adds user if needed. + + :returns: + * ``True`` if existing user was updated. + * ``False`` if user account was added. + + .. versionchanged:: 1.6 + This method was previously called ``update``, it was renamed + to prevent ambiguity with the dictionary method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + user = self._encode_user(user) + hash = self.context.encrypt(password) + if PY3: + hash = hash.encode(self.encoding) + existing = (user in self._records) + self._records[user] = hash + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, password): + "set password for user" + return self.set_password(user, password) + + def get_hash(self, user): + """Return hash stored for user, or ``None`` if user not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + try: + return self._records[self._encode_user(user)] + except KeyError: + return None + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user): + "return hash for user" + return self.get_hash(user) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user): + """Delete user's entry. + + :returns: + * ``True`` if user deleted. + * ``False`` if user not found. + """ + try: + del self._records[self._encode_user(user)] + except KeyError: + return False + self._autosave() + return True + + def check_password(self, user, password): + """Verify password for specified user. + + :returns: + * ``None`` if user not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + user = self._encode_user(user) + hash = self._records.get(user) + if hash is None: + return None + if isinstance(password, unicode): + # NOTE: encoding password to match file, making the assumption + # that server will use same encoding to hash the password. + password = password.encode(self.encoding) + ok, new_hash = self.context.verify_and_update(password, hash) + if ok and new_hash is not None: + # rehash user's password if old hash was deprecated + self._records[user] = new_hash + self._autosave() + return ok + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, password): + "verify password for user" + return self.check_password(user, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest editing +#============================================================================= +class HtdigestFile(_CommonFile): + """class for reading & writing Htdigest files. + + The class constructor accepts the following arguments: + + :type path: filepath + :param path: + + Specifies path to htdigest file, use to implicitly load from and save to. + + This class has two modes of operation: + + 1. It can be "bound" to a local file by passing a ``path`` to the class + constructor. In this case it will load the contents of the file when + created, and the :meth:`load` and :meth:`save` methods will automatically + load from and save to that file if they are called without arguments. + + 2. Alternately, it can exist as an independant object, in which case + :meth:`load` and :meth:`save` will require an explicit path to be + provided whenever they are called. As well, ``autosave`` behavior + will not be available. + + This feature is new in Passlib 1.6, and is the default if no + ``path`` value is provided to the constructor. + + This is also exposed as a readonly instance attribute. + + :type default_realm: str + :param default_realm: + + If ``default_realm`` is set, all the :class:`HtdigestFile` + methods that require a realm will use this value if one is not + provided explicitly. If unset, they will raise an error stating + that an explicit realm is required. + + This is also exposed as a writeable instance attribute. + + .. versionadded:: 1.6 + + :type new: bool + :param new: + + Normally, if *path* is specified, :class:`HtdigestFile` will + immediately load the contents of the file. However, when creating + a new htpasswd file, applications can set ``new=True`` so that + the existing file (if any) will not be loaded. + + .. versionadded:: 1.6 + This feature was previously enabled by setting ``autoload=False``. + That alias has been deprecated, and will be removed in Passlib 1.8 + + :type autosave: bool + :param autosave: + + Normally, any changes made to an :class:`HtdigestFile` instance + will not be saved until :meth:`save` is explicitly called. However, + if ``autosave=True`` is specified, any changes made will be + saved to disk immediately (assuming *path* has been set). + + This is also exposed as a writeable instance attribute. + + :type encoding: str + :param encoding: + + Optionally specify character encoding used to read/write file + and hash passwords. Defaults to ``utf-8``, though ``latin-1`` + is the only other commonly encountered encoding. + + This is also exposed as a readonly instance attribute. + + :param autoload: + Set to ``False`` to prevent the constructor from automatically + loaded the file from disk. + + .. deprecated:: 1.6 + This has been replaced by the *new* keyword. + Instead of setting ``autoload=False``, you should use + ``new=True``. Support for this keyword will be removed + in Passlib 1.8. + + Loading & Saving + ================ + .. automethod:: load + .. automethod:: load_if_changed + .. automethod:: load_string + .. automethod:: save + .. automethod:: to_string + + Inspection + ========== + .. automethod:: realms + .. automethod:: users + .. automethod:: check_password(user[, realm], password) + .. automethod:: get_hash + + Modification + ============ + .. automethod:: set_password(user[, realm], password) + .. automethod:: delete + .. automethod:: delete_realm + + Alternate Constructors + ====================== + .. automethod:: from_string + + Attributes + ========== + .. attribute:: default_realm + + The default realm that will be used if one is not provided + to methods that require it. By default this is ``None``, + in which case an explicit realm must be provided for every + method call. Can be written to. + + .. attribute:: path + + Path to local file that will be used as the default + for all :meth:`load` and :meth:`save` operations. + May be written to, initialized by the *path* constructor keyword. + + .. attribute:: autosave + + Writeable flag indicating whether changes will be automatically + written to *path*. + + Errors + ====== + :raises ValueError: + All of the methods in this class will raise a :exc:`ValueError` if + any user name or realm contains a forbidden character (one of ``:\\r\\n\\t\\x00``), + or is longer than 255 characters. + """ + #=================================================================== + # instance attrs + #=================================================================== + + # NOTE: _records map stores (,) for the key, + # and as the value, all as bytes. + + # NOTE: unlike htpasswd, this class doesn't use a CryptContext, + # as only one hash format is supported: htdigest. + + # optionally specify default realm that will be used if none + # is provided to a method call. otherwise realm is always required. + default_realm = None + + #=================================================================== + # init & serialization + #=================================================================== + def __init__(self, path=None, default_realm=None, **kwds): + self.default_realm = default_realm + super(HtdigestFile, self).__init__(path, **kwds) + + def _parse_record(self, record, lineno): + result = record.rstrip().split(_BCOLON) + if len(result) != 3: + raise ValueError("malformed htdigest file (error reading line %d)" + % lineno) + user, realm, hash = result + return (user, realm), hash + + def _render_record(self, key, hash): + user, realm = key + return render_bytes("%s:%s:%s\n", user, realm, hash) + + def _encode_realm(self, realm): + # override default _encode_realm to fill in default realm field + if realm is None: + realm = self.default_realm + if realm is None: + raise TypeError("you must specify a realm explicitly, " + "or set the default_realm attribute") + return self._encode_field(realm, "realm") + + #=================================================================== + # public methods + #=================================================================== + + def realms(self): + """Return list of all realms in database""" + realms = set(key[1] for key in self._records) + return [self._decode_field(realm) for realm in realms] + + def users(self, realm=None): + """Return list of all users in specified realm. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns empty list if realm not found. + """ + realm = self._encode_realm(realm) + return [self._decode_field(key[0]) for key in self._records + if key[1] == realm] + + ##def has_user(self, user, realm=None): + ## "check if user+realm combination exists" + ## user = self._encode_user(user) + ## realm = self._encode_realm(realm) + ## return (user,realm) in self._records + + ##def rename_realm(self, old, new): + ## """rename all accounts in realm""" + ## old = self._encode_realm(old) + ## new = self._encode_realm(new) + ## keys = [key for key in self._records if key[1] == old] + ## for key in keys: + ## hash = self._records.pop(key) + ## self._records[key[0],new] = hash + ## self._autosave() + ## return len(keys) + + ##def rename(self, old, new, realm=None): + ## """rename user account""" + ## old = self._encode_user(old) + ## new = self._encode_user(new) + ## realm = self._encode_realm(realm) + ## hash = self._records.pop((old,realm)) + ## self._records[new,realm] = hash + ## self._autosave() + + def set_password(self, user, realm=None, password=_UNSET): + """Set password for user; adds user & realm if needed. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``set_password(user, password)``, + otherwise it must be called with all three arguments: + ``set_password(user, realm, password)``. + + :returns: + * ``True`` if existing user was updated + * ``False`` if user account added. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + user = self._encode_user(user) + realm = self._encode_realm(realm) + key = (user, realm) + existing = (key in self._records) + hash = htdigest.encrypt(password, user, realm, encoding=self.encoding) + if PY3: + hash = hash.encode(self.encoding) + self._records[key] = hash + self._autosave() + return existing + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="set_password") + def update(self, user, realm, password): + "set password for user" + return self.set_password(user, realm, password) + + # XXX: rename to something more explicit, like get_hash()? + def get_hash(self, user, realm=None): + """Return :class:`~passlib.hash.htdigest` hash stored for user. + + * uses ``self.default_realm`` if no realm explicitly provided. + * returns ``None`` if user or realm not found. + + .. versionchanged:: 1.6 + This method was previously named ``find``, it was renamed + for clarity. The old name is deprecated, and will be removed + in Passlib 1.8. + """ + key = (self._encode_user(user), self._encode_realm(realm)) + hash = self._records.get(key) + if hash is None: + return None + if PY3: + hash = hash.decode(self.encoding) + return hash + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="get_hash") + def find(self, user, realm): + "return hash for user" + return self.get_hash(user, realm) + + # XXX: rename to something more explicit, like delete_user()? + def delete(self, user, realm=None): + """Delete user's entry for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: + * ``True`` if user deleted, + * ``False`` if user not found in realm. + """ + key = (self._encode_user(user), self._encode_realm(realm)) + try: + del self._records[key] + except KeyError: + return False + self._autosave() + return True + + def delete_realm(self, realm): + """Delete all users for specified realm. + + if realm is not specified, uses ``self.default_realm``. + + :returns: number of users deleted (0 if realm not found) + """ + realm = self._encode_realm(realm) + records = self._records + keys = [key for key in records if key[1] == realm] + for key in keys: + del records[key] + self._autosave() + return len(keys) + + def check_password(self, user, realm=None, password=_UNSET): + """Verify password for specified user + realm. + + If ``self.default_realm`` has been set, this may be called + with the syntax ``check_password(user, password)``, + otherwise it must be called with all three arguments: + ``check_password(user, realm, password)``. + + :returns: + * ``None`` if user or realm not found. + * ``False`` if user found, but password does not match. + * ``True`` if user found and password matches. + + .. versionchanged:: 1.6 + This method was previously called ``verify``, it was renamed + to prevent ambiguity with the :class:`!CryptContext` method. + The old alias is deprecated, and will be removed in Passlib 1.8. + """ + if password is _UNSET: + # called w/ two args - (user, password), use default realm + realm, password = None, realm + user = self._encode_user(user) + realm = self._encode_realm(realm) + hash = self._records.get((user,realm)) + if hash is None: + return None + return htdigest.verify(password, hash, user, realm, + encoding=self.encoding) + + @deprecated_method(deprecated="1.6", removed="1.8", + replacement="check_password") + def verify(self, user, realm, password): + "verify password for user" + return self.check_password(user, realm, password) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/apps.py b/passlib/apps.py new file mode 100644 index 00000000..96308a4d --- /dev/null +++ b/passlib/apps.py @@ -0,0 +1,192 @@ +"""passlib.apps""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +from itertools import chain +# site +# pkg +from passlib import hash +from passlib.context import LazyCryptContext +from passlib.utils import sys_bits +# local +__all__ = [ + 'custom_app_context', + 'django_context', + 'ldap_context', 'ldap_nocrypt_context', + 'mysql_context', 'mysql4_context', 'mysql3_context', + 'phpass_context', + 'phpbb3_context', + 'postgres_context', +] + +#============================================================================= +# master containing all identifiable hashes +#============================================================================= +def _load_master_config(): + from passlib.registry import list_crypt_handlers + + # get master list + schemes = list_crypt_handlers() + + # exclude the ones we know have ambiguous or greedy identify() methods. + excluded = [ + # frequently confused for eachother + 'bigcrypt', + 'crypt16', + + # no good identifiers + 'cisco_pix', + 'cisco_type7', + 'htdigest', + 'mysql323', + 'oracle10', + + # all have same size + 'lmhash', + 'msdcc', + 'msdcc2', + 'nthash', + + # plaintext handlers + 'plaintext', + 'ldap_plaintext', + + # disabled handlers + 'django_disabled', + 'unix_disabled', + 'unix_fallback', + ] + for name in excluded: + schemes.remove(name) + + # return config + return dict(schemes=schemes, default="sha256_crypt") +master_context = LazyCryptContext(onload=_load_master_config) + +#============================================================================= +# for quickly bootstrapping new custom applications +#============================================================================= +custom_app_context = LazyCryptContext( + # choose some reasonbly strong schemes + schemes=["sha512_crypt", "sha256_crypt"], + + # set some useful global options + default="sha256_crypt" if sys_bits < 64 else "sha512_crypt", + all__vary_rounds = 0.1, + + # set a good starting point for rounds selection + sha512_crypt__min_rounds = 60000, + sha256_crypt__min_rounds = 80000, + + # if the admin user category is selected, make a much stronger hash, + admin__sha512_crypt__min_rounds = 120000, + admin__sha256_crypt__min_rounds = 160000, + ) + +#============================================================================= +# django +#============================================================================= +_django10_schemes = [ + "django_salted_sha1", "django_salted_md5", "django_des_crypt", + "hex_md5", "django_disabled", +] + +django10_context = LazyCryptContext( + schemes=_django10_schemes, + default="django_salted_sha1", + deprecated=["hex_md5"], +) + +_django14_schemes = ["django_pbkdf2_sha256", "django_pbkdf2_sha1", + "django_bcrypt"] + _django10_schemes +django14_context = LazyCryptContext( + schemes=_django14_schemes, + deprecated=_django10_schemes, +) + +_django16_schemes = _django14_schemes[:] +_django16_schemes.insert(1, "django_bcrypt_sha256") +django16_context = LazyCryptContext( + schemes=_django16_schemes, + deprecated=_django10_schemes, +) + +# this will always point to latest version +django_context = django16_context + +#============================================================================= +# ldap +#============================================================================= +std_ldap_schemes = ["ldap_salted_sha1", "ldap_salted_md5", + "ldap_sha1", "ldap_md5", + "ldap_plaintext" ] + +# create context with all std ldap schemes EXCEPT crypt +ldap_nocrypt_context = LazyCryptContext(std_ldap_schemes) + +# create context with all possible std ldap + ldap crypt schemes +def _iter_ldap_crypt_schemes(): + from passlib.utils import unix_crypt_schemes + return ('ldap_' + name for name in unix_crypt_schemes) + +def _iter_ldap_schemes(): + "helper which iterates over supported std ldap schemes" + return chain(std_ldap_schemes, _iter_ldap_crypt_schemes()) +ldap_context = LazyCryptContext(_iter_ldap_schemes()) + +### create context with all std ldap schemes + crypt schemes for localhost +##def _iter_host_ldap_schemes(): +## "helper which iterates over supported std ldap schemes" +## from passlib.handlers.ldap_digests import get_host_ldap_crypt_schemes +## return chain(std_ldap_schemes, get_host_ldap_crypt_schemes()) +##ldap_host_context = LazyCryptContext(_iter_host_ldap_schemes()) + +#============================================================================= +# mysql +#============================================================================= +mysql3_context = LazyCryptContext(["mysql323"]) +mysql4_context = LazyCryptContext(["mysql41", "mysql323"], deprecated="mysql323") +mysql_context = mysql4_context # tracks latest mysql version supported + +#============================================================================= +# postgres +#============================================================================= +postgres_context = LazyCryptContext(["postgres_md5"]) + +#============================================================================= +# phpass & variants +#============================================================================= +def _create_phpass_policy(**kwds): + "helper to choose default alg based on bcrypt availability" + kwds['default'] = 'bcrypt' if hash.bcrypt.has_backend() else 'phpass' + return kwds + +phpass_context = LazyCryptContext( + schemes=["bcrypt", "phpass", "bsdi_crypt"], + onload=_create_phpass_policy, + ) + +phpbb3_context = LazyCryptContext(["phpass"], phpass__ident="H") + +# TODO: support the drupal phpass variants (see phpass homepage) + +#============================================================================= +# roundup +#============================================================================= + +_std_roundup_schemes = [ "ldap_hex_sha1", "ldap_hex_md5", "ldap_des_crypt", "roundup_plaintext" ] +roundup10_context = LazyCryptContext(_std_roundup_schemes) + +# NOTE: 'roundup15' really applies to roundup 1.4.17+ +roundup_context = roundup15_context = LazyCryptContext( + schemes=_std_roundup_schemes + [ "ldap_pbkdf2_sha1" ], + deprecated=_std_roundup_schemes, + default = "ldap_pbkdf2_sha1", + ldap_pbkdf2_sha1__default_rounds = 10000, + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/context.py b/passlib/context.py new file mode 100644 index 00000000..4f7ec130 --- /dev/null +++ b/passlib/context.py @@ -0,0 +1,2711 @@ +"""passlib.context - CryptContext implementation""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from functools import update_wrapper +import inspect +import re +import hashlib +from math import log as logb, ceil +import logging; log = logging.getLogger(__name__) +import os +import re +from time import sleep +from warnings import warn +# site +# pkg +from passlib.exc import PasslibConfigWarning, ExpectedStringError, ExpectedTypeError +from passlib.registry import get_crypt_handler, _validate_handler_name +from passlib.utils import rng, tick, to_bytes, deprecated_method, \ + to_unicode, splitcomma +from passlib.utils.compat import bytes, iteritems, num_types, \ + PY2, PY3, PY_MIN_32, unicode, SafeConfigParser, \ + NativeStringIO, BytesIO, base_string_types +# local +__all__ = [ + 'CryptContext', + 'LazyCryptContext', + 'CryptPolicy', +] + +#============================================================================= +# support +#============================================================================= + +# private object to detect unset params +_UNSET = object() + +# TODO: merge the following helpers into _CryptConfig + +def _coerce_vary_rounds(value): + "parse vary_rounds string to percent as [0,1) float, or integer" + if value.endswith("%"): + # XXX: deprecate this in favor of raw float? + return float(value.rstrip("%"))*.01 + try: + return int(value) + except ValueError: + return float(value) + +# set of options which aren't allowed to be set via policy +_forbidden_scheme_options = set(["salt"]) + # 'salt' - not allowed since a fixed salt would defeat the purpose. + +# dict containing funcs used to coerce strings to correct type +# for scheme option keys. +_coerce_scheme_options = dict( + min_rounds=int, + max_rounds=int, + default_rounds=int, + vary_rounds=_coerce_vary_rounds, + salt_size=int, +) + +def _is_handler_registered(handler): + """detect if handler is registered or a custom handler""" + return get_crypt_handler(handler.name, None) is handler + +#============================================================================= +# crypt policy +#============================================================================= +_preamble = ("The CryptPolicy class has been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8. ") + +class CryptPolicy(object): + """ + .. deprecated:: 1.6 + This class has been deprecated, and will be removed in Passlib 1.8. + All of it's functionality has been rolled into :class:`CryptContext`. + + This class previously stored the configuration options for the + CryptContext class. In the interest of interface simplification, + all of this class' functionality has been rolled into the CryptContext + class itself. + The documentation for this class is now focused on documenting how to + migrate to the new api. Additionally, where possible, the deprecation + warnings issued by the CryptPolicy methods will list the replacement call + that should be used. + + Constructors + ============ + CryptPolicy objects can be constructed directly using any of + the keywords accepted by :class:`CryptContext`. Direct uses of the + :class:`!CryptPolicy` constructor should either pass the keywords + directly into the CryptContext constructor, or to :meth:`CryptContext.update` + if the policy object was being used to update an existing context object. + + In addition to passing in keywords directly, + CryptPolicy objects can be constructed by the following methods: + + .. automethod:: from_path + .. automethod:: from_string + .. automethod:: from_source + .. automethod:: from_sources + .. automethod:: replace + + Introspection + ============= + All of the informational methods provided by this class have been deprecated + by identical or similar methods in the :class:`CryptContext` class: + + .. automethod:: has_schemes + .. automethod:: schemes + .. automethod:: iter_handlers + .. automethod:: get_handler + .. automethod:: get_options + .. automethod:: handler_is_deprecated + .. automethod:: get_min_verify_time + + Exporting + ========= + .. automethod:: iter_config + .. automethod:: to_dict + .. automethod:: to_file + .. automethod:: to_string + + .. note:: + CryptPolicy are immutable. + Use the :meth:`replace` method to mutate existing instances. + + .. deprecated:: 1.6 + """ + #=================================================================== + # class methods + #=================================================================== + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a local file. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a file, which was previously done via + ``CryptContext(policy=CryptPolicy.from_path(path))``, can now be + done via ``CryptContext.from_path(path)``. + See :meth:`CryptContext.from_path` for details. + + Updating an existing CryptContext from a file, which was previously done + ``context.policy = CryptPolicy.from_path(path)``, can now be + done via ``context.load_path(path)``. + See :meth:`CryptContext.load_path` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_path(path)``, " + "use ``CryptContext.from_path(path)`` " + " or ``context.load_path(path)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_path(path, section, + encoding)) + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create a CryptPolicy instance from a string. + + .. deprecated:: 1.6 + + Creating a new CryptContext from a string, which was previously done via + ``CryptContext(policy=CryptPolicy.from_string(data))``, can now be + done via ``CryptContext.from_string(data)``. + See :meth:`CryptContext.from_string` for details. + + Updating an existing CryptContext from a string, which was previously done + ``context.policy = CryptPolicy.from_string(data)``, can now be + done via ``context.load(data)``. + See :meth:`CryptContext.load` for details. + """ + warn(_preamble + + "Instead of ``CryptPolicy.from_string(source)``, " + "use ``CryptContext.from_string(source)`` or " + "``context.load(source)`` for an existing CryptContext.", + DeprecationWarning, stacklevel=2) + return cls(_internal_context=CryptContext.from_string(source, section, + encoding)) + + @classmethod + def from_source(cls, source, _warn=True): + """create a CryptPolicy instance from some source. + + this method autodetects the source type, and invokes + the appropriate constructor automatically. it attempts + to detect whether the source is a configuration string, a filepath, + a dictionary, or an existing CryptPolicy instance. + + .. deprecated:: 1.6 + + Create a new CryptContext, which could previously be done via + ``CryptContext(policy=CryptPolicy.from_source(source))``, should + now be done using an explicit method: the :class:`CryptContext` + constructor itself, :meth:`CryptContext.from_path`, + or :meth:`CryptContext.from_string`. + + Updating an existing CryptContext, which could previously be done via + ``context.policy = CryptPolicy.from_source(source)``, should + now be done using an explicit method: :meth:`CryptContext.update`, + or :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_source()``, " + "use ``CryptContext.from_string(path)`` " + " or ``CryptContext.from_path(source)``, as appropriate.", + DeprecationWarning, stacklevel=2) + if isinstance(source, CryptPolicy): + return source + elif isinstance(source, dict): + return cls(_internal_context=CryptContext(**source)) + elif not isinstance(source, (bytes,unicode)): + raise TypeError("source must be CryptPolicy, dict, config string, " + "or file path: %r" % (type(source),)) + elif any(c in source for c in "\n\r\t") or not source.strip(" \t./\;:"): + return cls(_internal_context=CryptContext.from_string(source)) + else: + return cls(_internal_context=CryptContext.from_path(source)) + + @classmethod + def from_sources(cls, sources, _warn=True): + """create a CryptPolicy instance by merging multiple sources. + + each source is interpreted as by :meth:`from_source`, + and the results are merged together. + + .. deprecated:: 1.6 + Instead of using this method to merge multiple policies together, + a :class:`CryptContext` instance should be created, and then + the multiple sources merged together via :meth:`CryptContext.load`. + """ + if _warn: + warn(_preamble + + "Instead of ``CryptPolicy.from_sources()``, " + "use the various CryptContext constructors " + " followed by ``context.update()``.", + DeprecationWarning, stacklevel=2) + if len(sources) == 0: + raise ValueError("no sources specified") + if len(sources) == 1: + return cls.from_source(sources[0], _warn=False) + kwds = {} + for source in sources: + kwds.update(cls.from_source(source, _warn=False)._context.to_dict(resolve=True)) + return cls(_internal_context=CryptContext(**kwds)) + + def replace(self, *args, **kwds): + """create a new CryptPolicy, optionally updating parts of the + existing configuration. + + .. deprecated:: 1.6 + Callers of this method should :meth:`CryptContext.update` or + :meth:`CryptContext.copy` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.replace()``, " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().replace()``, " + "create a CryptContext instance and " + "use ``context.update()`` or ``context.copy()``.", + DeprecationWarning, stacklevel=2) + sources = [ self ] + if args: + sources.extend(args) + if kwds: + sources.append(kwds) + return CryptPolicy.from_sources(sources, _warn=False) + + #=================================================================== + # instance attrs + #=================================================================== + + # internal CryptContext we're wrapping to handle everything + # until this class is removed. + _context = None + + # flag indicating this is wrapper generated by the CryptContext.policy + # attribute, rather than one created independantly by the application. + _stub_policy = False + + #=================================================================== + # init + #=================================================================== + def __init__(self, *args, **kwds): + context = kwds.pop("_internal_context", None) + if context: + assert isinstance(context, CryptContext) + self._context = context + self._stub_policy = kwds.pop("_stub_policy", False) + assert not (args or kwds), "unexpected args: %r %r" % (args,kwds) + else: + if args: + if len(args) != 1: + raise TypeError("only one positional argument accepted") + if kwds: + raise TypeError("cannot specify positional arg and kwds") + kwds = args[0] + warn(_preamble + + "Instead of constructing a CryptPolicy instance, " + "create a CryptContext directly, or use ``context.update()`` " + "and ``context.load()`` to reconfigure existing CryptContext " + "instances.", + DeprecationWarning, stacklevel=2) + self._context = CryptContext(**kwds) + + #=================================================================== + # public interface for examining options + #=================================================================== + def has_schemes(self): + """return True if policy defines *any* schemes for use. + + .. deprecated:: 1.6 + applications should use ``bool(context.schemes())`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.has_schemes()``, " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().has_schemes()``, " + "create a CryptContext instance and " + "use ``bool(context.schemes())``.", + DeprecationWarning, stacklevel=2) + return bool(self._context.schemes()) + + def iter_handlers(self): + """return iterator over handlers defined in policy. + + .. deprecated:: 1.6 + applications should use ``context.schemes(resolve=True))`` instead. + see :meth:`CryptContext.schemes`. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.iter_handlers()``, " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_handlers()``, " + "create a CryptContext instance and " + "use ``context.schemes(resolve=True)``.", + DeprecationWarning, stacklevel=2) + return self._context.schemes(resolve=True) + + def schemes(self, resolve=False): + """return list of schemes defined in policy. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.schemes` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.schemes()``, " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().schemes()``, " + "create a CryptContext instance and " + "use ``context.schemes()``.", + DeprecationWarning, stacklevel=2) + return list(self._context.schemes(resolve=resolve)) + + def get_handler(self, name=None, category=None, required=False): + """return handler as specified by name, or default handler. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.handler` instead, + though note that the ``required`` keyword has been removed, + and the new method will always act as if ``required=True``. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.get_handler()``, " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().get_handler()``, " + "create a CryptContext instance and " + "use ``context.handler()``.", + DeprecationWarning, stacklevel=2) + # CryptContext.handler() doesn't support required=False, + # so wrapping it in try/except + try: + return self._context.handler(name, category) + except KeyError: + if required: + raise + else: + return None + + def get_min_verify_time(self, category=None): + """get min_verify_time setting for policy. + + .. deprecated:: 1.6 + min_verify_time will be removed entirely in passlib 1.8 + """ + warn("get_min_verify_time() and min_verify_time option is deprecated, " + "and will be removed in Passlib 1.8", DeprecationWarning, + stacklevel=2) + return self._context._config.get_context_option_with_flag(category, "min_verify_time")[0] or 0 + + def get_options(self, name, category=None): + """return dictionary of options specific to a given handler. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "``context.policy.get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().get_options()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context._config._get_record_options_with_flag(name, category)[0] + + def handler_is_deprecated(self, name, category=None): + """check if handler has been deprecated by policy. + + .. deprecated:: 1.6 + this method has no direct replacement in the 1.6 api, as there + is not a clearly defined use-case. however, examining the output of + :meth:`CryptContext.to_dict` should serve as the closest alternative. + """ + # XXX: might make a public replacement, but need more study of the use cases. + if self._stub_policy: + warn(_preamble + + "``context.policy.handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "``CryptPolicy().handler_is_deprecated()`` will no longer be available.", + DeprecationWarning, stacklevel=2) + if hasattr(name, "name"): + name = name.name + return self._context._is_deprecated_scheme(name, category) + + #=================================================================== + # serialization + #=================================================================== + + def iter_config(self, ini=False, resolve=False): + """iterate over key/value pairs representing the policy object. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.iter_config()``, " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().iter_config()``, " + "create a CryptContext instance and " + "use ``context.to_dict().items()``.", + DeprecationWarning, stacklevel=2) + # hacked code that renders keys & values in manner that approximates + # old behavior. context.to_dict() is much cleaner. + context = self._context + if ini: + def render_key(key): + return context._render_config_key(key).replace("__", ".") + def render_value(value): + if isinstance(value, (list,tuple)): + value = ", ".join(value) + return value + resolve = False + else: + render_key = context._render_config_key + render_value = lambda value: value + return ( + (render_key(key), render_value(value)) + for key, value in context._config.iter_config(resolve) + ) + + def to_dict(self, resolve=False): + """export policy object as dictionary of options. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_dict` instead. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_dict()``, " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_dict()``, " + "create a CryptContext instance and " + "use ``context.to_dict()``.", + DeprecationWarning, stacklevel=2) + return self._context.to_dict(resolve) + + def to_file(self, stream, section="passlib"): # pragma: no cover -- deprecated & unused + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead, + and then write the output to a file as desired. + """ + if self._stub_policy: + warn(_preamble + + "Instead of ``context.policy.to_file(stream)``, " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_file(stream)``, " + "create a CryptContext instance and " + "use ``stream.write(context.to_string())``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if PY2: + out = out.encode("utf-8") + stream.write(out) + + def to_string(self, section="passlib", encoding=None): + """export policy to file. + + .. deprecated:: 1.6 + applications should use :meth:`CryptContext.to_string` instead. + """ + if self._stub_policy: + warn(_preamble + # pragma: no cover -- deprecated & unused + "Instead of ``context.policy.to_string()``, " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + else: + warn(_preamble + + "Instead of ``CryptPolicy().to_string()``, " + "create a CryptContext instance and " + "use ``context.to_string()``.", + DeprecationWarning, stacklevel=2) + out = self._context.to_string(section=section) + if encoding: + out = out.encode(encoding) + return out + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# _CryptRecord helper class +#============================================================================= +class _CryptRecord(object): + """wraps a handler and automatically applies various options. + + this is a helper used internally by CryptContext in order to reduce the + amount of work that needs to be done by CryptContext.verify(). + this class takes in all the options for a particular (scheme, category) + combination, and attempts to provide as short a code-path as possible for + the particular configuration. + """ + + #=================================================================== + # instance attrs + #=================================================================== + + # informational attrs + handler = None # handler instance this is wrapping + category = None # user category this applies to + deprecated = False # set if handler itself has been deprecated in config + + # rounds management - filled in by _init_rounds_options() + _has_rounds_options = False # if _has_rounds_bounds OR _generate_rounds is set + _has_rounds_bounds = False # if either min_rounds or max_rounds set + _min_rounds = None # minimum rounds allowed by policy, or None + _max_rounds = None # maximum rounds allowed by policy, or None + _generate_rounds = None # rounds generation function, or None + + # encrypt()/genconfig() attrs + settings = None # options to be passed directly to encrypt() + + # verify() attrs + _min_verify_time = None + + # needs_update() attrs + _needs_update = None # optional callable provided by handler + _has_rounds_introspection = False # if rounds can be extract from hash + + # cloned directly from handler, not affected by config options. + identify = None + genhash = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, handler, category=None, deprecated=False, + min_rounds=None, max_rounds=None, default_rounds=None, + vary_rounds=None, min_verify_time=None, + **settings): + # store basic bits + self.handler = handler + self.category = category + self.deprecated = deprecated + self.settings = settings + + # validate & normalize rounds options + self._init_rounds_options(min_rounds, max_rounds, default_rounds, + vary_rounds) + + # init wrappers for handler methods we modify args to + self._init_encrypt_and_genconfig() + self._init_verify(min_verify_time) + self._init_needs_update() + + # these aren't wrapped by _CryptRecord, copy them directly from handler. + self.identify = handler.identify + self.genhash = handler.genhash + + #=================================================================== + # virtual attrs + #=================================================================== + @property + def scheme(self): + return self.handler.name + + @property + def _errprefix(self): + "string used to identify record in error messages" + handler = self.handler + category = self.category + if category: + return "%s %s config" % (handler.name, category) + else: + return "%s config" % (handler.name,) + + def __repr__(self): # pragma: no cover -- debugging + return "<_CryptRecord 0x%x for %s>" % (id(self), self._errprefix) + + #=================================================================== + # rounds generation & limits - used by encrypt & deprecation code + #=================================================================== + def _init_rounds_options(self, mn, mx, df, vr): + "parse options and compile efficient generate_rounds function" + #---------------------------------------------------- + # extract hard limits from handler itself + #---------------------------------------------------- + handler = self.handler + if 'rounds' not in handler.setting_kwds: + # doesn't even support rounds keyword. + return + hmn = getattr(handler, "min_rounds", None) + hmx = getattr(handler, "max_rounds", None) + + def check_against_handler(value, name): + "issue warning if value outside handler limits" + if hmn is not None and value < hmn: + warn("%s: %s value is below handler minimum %d: %d" % + (self._errprefix, name, hmn, value), PasslibConfigWarning) + if hmx is not None and value > hmx: + warn("%s: %s value is above handler maximum %d: %d" % + (self._errprefix, name, hmx, value), PasslibConfigWarning) + + #---------------------------------------------------- + # set policy limits + #---------------------------------------------------- + if mn is not None: + if mn < 0: + raise ValueError("%s: min_rounds must be >= 0" % self._errprefix) + check_against_handler(mn, "min_rounds") + self._min_rounds = mn + self._has_rounds_bounds = True + + if mx is not None: + if mn is not None and mx < mn: + raise ValueError("%s: max_rounds must be " + ">= min_rounds" % self._errprefix) + elif mx < 0: + raise ValueError("%s: max_rounds must be >= 0" % self._errprefix) + check_against_handler(mx, "max_rounds") + self._max_rounds = mx + self._has_rounds_bounds = True + + #---------------------------------------------------- + # validate default_rounds + #---------------------------------------------------- + if df is not None: + if mn is not None and df < mn: + raise ValueError("%s: default_rounds must be " + ">= min_rounds" % self._errprefix) + if mx is not None and df > mx: + raise ValueError("%s: default_rounds must be " + "<= max_rounds" % self._errprefix) + check_against_handler(df, "default_rounds") + elif vr or mx or mn: + # need an explicit default to work with + df = getattr(handler, "default_rounds", None) or mx or mn + assert df is not None, "couldn't find fallback default_rounds" + else: + # no need for rounds generation + self._has_rounds_options = self._has_rounds_bounds + return + + # clip default to handler & policy limits *before* vary rounds + # is calculated, so that proportion vr values are scaled against + # the effective default. + def clip(value): + "clip value to intersection of policy + handler limits" + if mn is not None and value < mn: + value = mn + if hmn is not None and value < hmn: + value = hmn + if mx is not None and value > mx: + value = mx + if hmx is not None and value > hmx: + value = hmx + return value + df = clip(df) + + #---------------------------------------------------- + # validate vary_rounds, + # coerce df/vr to linear scale, + # and setup scale_value() to undo coercion + #---------------------------------------------------- + # NOTE: vr=0 same as if vr not set + if vr: + if vr < 0: + raise ValueError("%s: vary_rounds must be >= 0" % + self._errprefix) + def scale_value(value, upper): + return value + if isinstance(vr, float): + # vr is value from 0..1 expressing fraction of default rounds. + if vr > 1: + # XXX: deprecate 1.0 ? + raise ValueError("%s: vary_rounds must be < 1.0" % + self._errprefix) + # calculate absolute vr value based on df & rounds_cost + cost_scale = getattr(handler, "rounds_cost", "linear") + assert cost_scale in ["log2", "linear"] + if cost_scale == "log2": + # convert df & vr to linear scale for limit calc, + # and redefine scale_value() to convert back to log2. + df = 1<= %d, increasing value from %d" % + (self._errprefix, mn, rounds), PasslibConfigWarning, 4) + rounds = mn + mx = self._max_rounds + if mx and rounds > mx: + warn("%s requires rounds <= %d, decreasing value from %d" % + (self._errprefix, mx, rounds), PasslibConfigWarning, 4) + rounds = mx + kwds['rounds'] = rounds + + #=================================================================== + # verify() + #=================================================================== + # TODO: once min_verify_time is removed, this will just be a clone + # of handler.verify() + + def _init_verify(self, mvt): + "initialize verify() wrapper - implements min_verify_time" + if mvt: + assert isinstance(mvt, (int,float)) and mvt > 0, "CryptPolicy should catch this" + self._min_verify_time = mvt + else: + # no mvt wrapper needed, so just use handler.verify directly + self.verify = self.handler.verify + + def verify(self, secret, hash, **context): + "verify helper - adds min_verify_time delay" + mvt = self._min_verify_time + assert mvt > 0, "wrapper should have been replaced for mvt=0" + start = tick() + if self.handler.verify(secret, hash, **context): + return True + end = tick() + delta = mvt + start - end + if delta > 0: + sleep(delta) + elif delta < 0: + # warn app they exceeded bounds (this might reveal + # relative costs of different hashes if under migration) + warn("CryptContext: verify exceeded min_verify_time: " + "scheme=%r min_verify_time=%r elapsed=%r" % + (self.scheme, mvt, end-start), PasslibConfigWarning) + return False + + #=================================================================== + # needs_update() + #=================================================================== + def _init_needs_update(self): + """initialize state for needs_update()""" + # if handler has been deprecated, replace wrapper and skip other checks + if self.deprecated: + self.needs_update = lambda hash, secret: True + return + + # let handler detect hashes with configurations that don't match + # current settings. currently do this by calling + # ``handler._bind_needs_update(**settings)``, which if defined + # should return None or a callable ``needs_update(hash,secret)->bool``. + # + # NOTE: this interface is still private, because it was hacked in + # for the sake of bcrypt & scram, and is subject to change. + handler = self.handler + const = getattr(handler, "_bind_needs_update", None) + if const: + self._needs_update = const(**self.settings) + + # XXX: what about a "min_salt_size" deprecator? + + # set flag if we can extract rounds from hash, allowing + # needs_update() to check for rounds that are outside of + # the configured range. + if self._has_rounds_bounds and hasattr(handler, "from_string"): + self._has_rounds_introspection = True + + def needs_update(self, hash, secret): + # init replaces this method entirely for this case. + ### check if handler has been deprecated + ##if self.deprecated: + ## return True + + # check handler's detector if it provided one. + check = self._needs_update + if check and check(hash, secret): + return True + + # XXX: should we use from_string() call below to check + # for config strings, and flag them as needing update? + # or throw an error? + # or leave that as an explicitly undefined border case, + # to keep the codepath simpler & faster? + + # if we can parse rounds parameter, check if it's w/in bounds. + if self._has_rounds_introspection: + # XXX: this might be a good place to use parsehash() + hash_obj = self.handler.from_string(hash) + try: + rounds = hash_obj.rounds + except AttributeError: # pragma: no cover -- sanity check + # XXX: all builtin hashes should have rounds attr, + # so should a warning be issues here? + pass + else: + mn = self._min_rounds + if mn is not None and rounds < mn: + return True + mx = self._max_rounds + if mx and rounds > mx: + return True + + return False + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# _CryptConfig helper class +#============================================================================= +class _CryptConfig(object): + """parses, validates, and stores CryptContext config + + this is a helper used internally by CryptContext to handle + parsing, validation, and serialization of it's config options. + split out from the main class, but not made public since + that just complicates interface too much (c.f. CryptPolicy) + + :arg source: config as dict mapping ``(cat,scheme,option) -> value`` + """ + #=================================================================== + # instance attrs + #=================================================================== + + # triple-nested dict which maps scheme -> category -> key -> value, + # storing all hash-specific options + _scheme_options = None + + # double-nested dict which maps key -> category -> value + # storing all CryptContext options + _context_options = None + + # tuple of handler objects + handlers = None + + # tuple of scheme objects in same order as handlers + schemes = None + + # tuple of categories in alphabetical order (not including None) + categories = None + + # dict mapping category -> default scheme + _default_schemes = None + + # dict mapping (scheme, category) -> _CryptRecord + _records = None + + # dict mapping category -> list of _CryptRecord instances for that category, + # in order of schemes(). populated on demand by _get_record_list() + _record_lists = None + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, source): + self._init_scheme_list(source.get((None,None,"schemes"))) + self._init_options(source) + self._init_default_schemes() + self._init_records() + + def _init_scheme_list(self, data): + """initialize .handlers and .schemes attributes""" + handlers = [] + schemes = [] + if isinstance(data, str): + data = splitcomma(data) + for elem in data or (): + # resolve elem -> handler & scheme + if hasattr(elem, "name"): + handler = elem + scheme = handler.name + _validate_handler_name(scheme) + elif isinstance(elem, str): + handler = get_crypt_handler(elem) + scheme = handler.name + else: + raise TypeError("scheme must be name or CryptHandler, " + "not %r" % type(elem)) + + # check scheme name isn't already in use + if scheme in schemes: + raise KeyError("multiple handlers with same name: %r" % + (scheme,)) + + # add to handler list + handlers.append(handler) + schemes.append(scheme) + + self.handlers = tuple(handlers) + self.schemes = tuple(schemes) + + #=================================================================== + # lowlevel options + #=================================================================== + + #--------------------------------------------------------------- + # init lowlevel option storage + #--------------------------------------------------------------- + def _init_options(self, source): + """load config dict into internal representation, + and init .categories attr + """ + # prepare dicts & locals + norm_scheme_option = self._norm_scheme_option + norm_context_option = self._norm_context_option + self._scheme_options = scheme_options = {} + self._context_options = context_options = {} + categories = set() + + # load source config into internal storage + for (cat, scheme, key), value in iteritems(source): + categories.add(cat) + if scheme: + # normalize scheme option + key, value = norm_scheme_option(key, value) + + # store in scheme_options + # map structure: scheme_options[scheme][category][key] = value + try: + category_map = scheme_options[scheme] + except KeyError: + scheme_options[scheme] = {cat: {key: value}} + else: + try: + option_map = category_map[cat] + except KeyError: + category_map[cat] = {key: value} + else: + option_map[key] = value + else: + # normalize context option + if cat and key == "schemes": + raise KeyError("'schemes' context option is not allowed " + "per category") + key, value = norm_context_option(key, value) + + # store in context_options + # map structure: context_options[key][category] = value + try: + category_map = context_options[key] + except KeyError: + context_options[key] = {cat: value} + else: + category_map[cat] = value + + # store list of configured categories + categories.discard(None) + self.categories = tuple(sorted(categories)) + + def _norm_scheme_option(self, key, value): + # check for invalid options + if key == "rounds": + # for now, translating this to 'default_rounds' to be helpful. + # need to pick one of the two names as official, + # and deprecate the other one. + key = "default_rounds" + elif key in _forbidden_scheme_options: + raise KeyError("%r option not allowed in CryptContext " + "configuration" % (key,)) + # coerce strings for certain fields (e.g. min_rounds uses ints) + if isinstance(value, str): + func = _coerce_scheme_options.get(key) + if func: + value = func(value) + return key, value + + def _norm_context_option(self, key, value): + schemes = self.schemes + if key == "default": + if hasattr(value, "name"): + value = value.name + elif not isinstance(value, str): + raise ExpectedTypeError(value, "str", "default") + if schemes and value not in schemes: + raise KeyError("default scheme not found in policy") + elif key == "deprecated": + if isinstance(value, str): + value = splitcomma(value) + elif not isinstance(value, (list,tuple)): + raise ExpectedTypeError(value, "str or seq", "deprecated") + if 'auto' in value: + if len(value) > 1: + raise ValueError("cannot list other schemes if " + "``deprecated=['auto']`` is used") + elif schemes: + # make sure list of deprecated schemes is subset of configured schemes + for scheme in value: + if not isinstance(scheme, str): + raise ExpectedTypeError(value, "str", "deprecated element") + if scheme not in schemes: + raise KeyError("deprecated scheme not found " + "in policy: %r" % (scheme,)) + elif key == "min_verify_time": + warn("'min_verify_time' is deprecated as of Passlib 1.6, will be " + "ignored in 1.7, and removed in 1.8.", DeprecationWarning) + value = float(value) + if value < 0: + raise ValueError("'min_verify_time' must be >= 0") + elif key != "schemes": + raise KeyError("unknown CryptContext keyword: %r" % (key,)) + return key, value + + #--------------------------------------------------------------- + # reading context options + #--------------------------------------------------------------- + def get_context_optionmap(self, key, _default={}): + """return dict mapping category->value for specific context option. + (treat retval as readonly). + """ + return self._context_options.get(key, _default) + + def get_context_option_with_flag(self, category, key): + """return value of specific option, handling category inheritance. + also returns flag indicating whether value is category-specific. + """ + try: + category_map = self._context_options[key] + except KeyError: + return None, False + value = category_map.get(None) + if category: + try: + alt = category_map[category] + except KeyError: + pass + else: + if value is None or alt != value: + return alt, True + return value, False + + #--------------------------------------------------------------- + # reading scheme options + #--------------------------------------------------------------- + def _get_scheme_optionmap(self, scheme, category, default={}): + """return all options for (scheme,category) combination + (treat return as readonly) + """ + try: + return self._scheme_options[scheme][category] + except KeyError: + return default + + def get_scheme_options_with_flag(self, scheme, category): + """return composite dict of all options set for scheme. + includes options inherited from 'all' and from default category. + result can be modified. + returns (kwds, has_cat_specific_options) + """ + # start out with copy of global options + get_optionmap = self._get_scheme_optionmap + kwds = get_optionmap("all", None).copy() + has_cat_options = False + + # add in category-specific global options + if category: + defkwds = kwds.copy() # <-- used to detect category-specific options + kwds.update(get_optionmap("all", category)) + + # add in default options for scheme + other = get_optionmap(scheme, None) + kwds.update(other) + + # load category-specific options for scheme + if category: + defkwds.update(other) + kwds.update(get_optionmap(scheme, category)) + + # compare default category options to see if there's anything + # category-specific + if kwds != defkwds: + has_cat_options = True + + return kwds, has_cat_options + + #=================================================================== + # deprecated & default schemes + #=================================================================== + def _init_default_schemes(self): + """initialize maps containing default scheme for each category. + + have to do this after _init_options(), since the default scheme + is affected by the list of deprecated schemes. + """ + # init maps & locals + get_optionmap = self.get_context_optionmap + default_map = self._default_schemes = get_optionmap("default").copy() + dep_map = get_optionmap("deprecated") + schemes = self.schemes + if not schemes: + return + + # figure out default scheme + deps = dep_map.get(None) or () + default = default_map.get(None) + if not default: + for scheme in schemes: + if scheme not in deps: + default_map[None] = scheme + break + else: + raise ValueError("must have at least one non-deprecated scheme") + elif default in deps: + raise ValueError("default scheme cannot be deprecated") + + # figure out per-category default schemes, + for cat in self.categories: + cdeps = dep_map.get(cat, deps) + cdefault = default_map.get(cat, default) + if not cdefault: + for scheme in schemes: + if scheme not in cdeps: + default_map[cat] = scheme + break + else: + raise ValueError("must have at least one non-deprecated " + "scheme for %r category" % cat) + elif cdefault in cdeps: + raise ValueError("default scheme for %r category " + "cannot be deprecated" % cat) + + def default_scheme(self, category): + "return default scheme for specific category" + defaults = self._default_schemes + try: + return defaults[category] + except KeyError: + pass + if not self.schemes: + raise KeyError("no hash schemes configured for this " + "CryptContext instance") + return defaults[None] + + def is_deprecated_with_flag(self, scheme, category): + "is scheme deprecated under particular category?" + depmap = self.get_context_optionmap("deprecated") + def test(cat): + source = depmap.get(cat, depmap.get(None)) + if source is None: + return None + elif 'auto' in source: + return scheme != self.default_scheme(cat) + else: + return scheme in source + value = test(None) or False + if category: + alt = test(category) + if alt is not None and value != alt: + return alt, True + return value, False + + #=================================================================== + # CryptRecord objects + #=================================================================== + def _init_records(self): + # NOTE: this step handles final validation of settings, + # checking for violatiions against handler's internal invariants. + # this is why we create all the records now, + # so CryptContext throws error immediately rather than later. + self._record_lists = {} + records = self._records = {} + get_options = self._get_record_options_with_flag + categories = self.categories + for handler in self.handlers: + scheme = handler.name + kwds, _ = get_options(scheme, None) + records[scheme, None] = _CryptRecord(handler, **kwds) + for cat in categories: + kwds, has_cat_options = get_options(scheme, cat) + if has_cat_options: + records[scheme, cat] = _CryptRecord(handler, cat, **kwds) + # NOTE: if handler has no category-specific opts, get_record() + # will automatically use the default category's record. + # NOTE: default records for specific category stored under the + # key (None,category); these are populated on-demand by get_record(). + + def _get_record_options_with_flag(self, scheme, category): + """return composite dict of options for given scheme + category. + + this is currently a private method, though some variant + of it's output may eventually be made public. + + given a scheme & category, it returns two things: + a set of all the keyword options to pass to the _CryptRecord constructor, + and a bool flag indicating whether any of these options + were specific to the named category. if this flag is false, + the options are identical to the options for the default category. + + the options dict includes all the scheme-specific settings, + as well as optional *deprecated* and *min_verify_time* keywords. + """ + # get scheme options + kwds, has_cat_options = self.get_scheme_options_with_flag(scheme, category) + + # throw in deprecated flag + value, not_inherited = self.is_deprecated_with_flag(scheme, category) + if value: + kwds['deprecated'] = True + if not_inherited: + has_cat_options = True + + # add in min_verify_time setting from context + value, not_inherited = self.get_context_option_with_flag(category, "min_verify_time") + if value: + kwds['min_verify_time'] = value + if not_inherited: + has_cat_options = True + + return kwds, has_cat_options + + def get_record(self, scheme, category): + "return record for specific scheme & category (cached)" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + + # quick lookup in cache + try: + return self._records[scheme, category] + except KeyError: + pass + + # type check + if category is not None and not isinstance(category, str): + if PY2 and isinstance(category, unicode): + # for compatibility with unicode-centric py2 apps + return self.get_record(scheme, category.encode("utf-8")) + raise ExpectedTypeError(category, "str or None", "category") + if scheme is not None and not isinstance(scheme, str): + raise ExpectedTypeError(scheme, "str or None", "scheme") + + # if scheme=None, + # use record for category's default scheme, and cache result. + if not scheme: + default = self.default_scheme(category) + assert default + record = self._records[None, category] = self.get_record(default, + category) + return record + + # if no record for (scheme, category), + # use record for (scheme, None), and cache result. + if category: + try: + cache = self._records + record = cache[scheme, category] = cache[scheme, None] + return record + except KeyError: + pass + + # scheme not found in configuration for default category + raise KeyError("crypt algorithm not found in policy: %r" % (scheme,)) + + def _get_record_list(self, category=None): + """return list of records for category (cached) + + this is an internal helper used only by identify_record() + """ + # type check of category - handled by _get_record() + # quick lookup in cache + try: + return self._record_lists[category] + except KeyError: + pass + # cache miss - build list from scratch + value = self._record_lists[category] = [ + self.get_record(scheme, category) + for scheme in self.schemes + ] + return value + + def identify_record(self, hash, category, required=True): + """internal helper to identify appropriate _CryptRecord for hash""" + # NOTE: this is part of the critical path shared by + # all of CryptContext's PasswordHash methods, + # hence all the caching and error checking. + # FIXME: if multiple hashes could match (e.g. lmhash vs nthash) + # this will only return first match. might want to do something + # about this in future, but for now only hashes with + # unique identifiers will work properly in a CryptContext. + # XXX: if all handlers have a unique prefix (e.g. all are MCF / LDAP), + # could use dict-lookup to speed up this search. + if not isinstance(hash, base_string_types): + raise ExpectedStringError(hash, "hash") + # type check of category - handled by _get_record_list() + for record in self._get_record_list(category): + if record.identify(hash): + return record + if not required: + return None + elif not self.schemes: + raise KeyError("no crypt algorithms supported") + else: + raise ValueError("hash could not be identified") + + #=================================================================== + # serialization + #=================================================================== + def iter_config(self, resolve=False): + """regenerate original config. + + this is an iterator which yields ``(cat,scheme,option),value`` items, + in the order they generally appear inside an INI file. + if interpreted as a dictionary, it should match the original + keywords passed to the CryptContext (aside from any canonization). + + it's mainly used as the internal backend for most of the public + serialization methods. + """ + # grab various bits of data + scheme_options = self._scheme_options + context_options = self._context_options + scheme_keys = sorted(scheme_options) + context_keys = sorted(context_options) + + # write loaded schemes (may differ from 'schemes' local var) + if 'schemes' in context_keys: + context_keys.remove("schemes") + value = self.handlers if resolve else self.schemes + if value: + yield (None, None, "schemes"), list(value) + + # then run through config for each user category + for cat in (None,) + self.categories: + + # write context options + for key in context_keys: + try: + value = context_options[key][cat] + except KeyError: + pass + else: + if isinstance(value, list): + value = list(value) + yield (cat, None, key), value + + # write per-scheme options for all schemes. + for scheme in scheme_keys: + try: + kwds = scheme_options[scheme][cat] + except KeyError: + pass + else: + for key in sorted(kwds): + yield (cat, scheme, key), kwds[key] + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# main CryptContext class +#============================================================================= +class CryptContext(object): + """Helper for encrypting passwords using different algorithms. + + Instances of this class allow applications to choose a specific + set of hash algorithms which they wish to support, set limits and defaults + for the rounds and salt sizes those algorithms should use, flag + which algorithms should be deprecated, and automatically handle + migrating users to stronger hashes when they log in. + + Basic usage:: + + >>> ctx = CryptContext(schemes=[...]) + + See the Passlib online documentation for details and full documentation. + """ + # FIXME: altering the configuration of this object isn't threadsafe, + # but is generally only done during application init, so not a major + # issue (just yet). + + # XXX: would like some way to restrict the categories that are allowed, + # to restrict what the app OR the config can use. + + #=================================================================== + # instance attrs + #=================================================================== + + # _CryptConfig instance holding current parsed config + _config = None + + # copy of _config methods, stored in CryptContext instance for speed. + _get_record = None + _identify_record = None + + #=================================================================== + # secondary constructors + #=================================================================== + @classmethod + def _norm_source(cls, source): + "internal helper - accepts string, dict, or context" + if isinstance(source, dict): + return cls(**source) + elif isinstance(source, cls): + return source + else: + self = cls() + self.load(source) + return self + + @classmethod + def from_string(cls, source, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted string. + + :type source: unicode or bytes + :arg source: + string containing INI-formatted content. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + optional encoding used when source is bytes, defaults to ``"utf-8"``. + + :returns: + new :class:`CryptContext` instance, configured based on the + parameters in the *source* string. + + Usage example:: + + >>> from passlib.context import CryptContext + >>> context = CryptContext.from_string(''' + ... [passlib] + ... schemes = sha256_crypt, des_crypt + ... sha256_crypt__default_rounds = 30000 + ... ''') + + .. versionadded:: 1.6 + + .. seealso:: :meth:`to_string`, the inverse of this constructor. + """ + if not isinstance(source, base_string_types): + raise ExpectedTypeError(source, "unicode or bytes", "source") + self = cls(_autoload=False) + self.load(source, section=section, encoding=encoding) + return self + + @classmethod + def from_path(cls, path, section="passlib", encoding="utf-8"): + """create new CryptContext instance from an INI-formatted file. + + this functions exactly the same as :meth:`from_string`, + except that it loads from a local file. + + :type path: str + :arg path: + path to local file containing INI-formatted config. + + :type section: str + :param section: + option name of section to read from, defaults to ``"passlib"``. + + :type encoding: str + :arg encoding: + encoding used to load file, defaults to ``"utf-8"``. + + :returns: + new CryptContext instance, configured based on the parameters + stored in the file *path*. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`from_string` for an equivalent usage example. + """ + self = cls(_autoload=False) + self.load_path(path, section=section, encoding=encoding) + return self + + def copy(self, **kwds): + """Return copy of existing CryptContext instance. + + This function returns a new CryptContext instance whose configuration + is exactly the same as the original, with the exception that any keywords + passed in will take precedence over the original settings. + As an example:: + + >>> from passlib.context import CryptContext + + >>> # given an existing context... + >>> ctx1 = CryptContext(["sha256_crypt", "md5_crypt"]) + + >>> # copy can be used to make a clone, and update + >>> # some of the settings at the same time... + >>> ctx2 = custom_app_context.copy(default="md5_crypt") + + >>> # and the original will be unaffected by the change + >>> ctx1.default_scheme() + "sha256_crypt" + >>> ctx2.default_scheme() + "md5_crypt" + + .. versionadded:: 1.6 + This method was previously named :meth:`!replace`. That alias + has been deprecated, and will be removed in Passlib 1.8. + + .. seealso:: :meth:`update` + """ + # XXX: it would be faster to store ref to self._config, + # but don't want to share config objects til sure + # can rely on them being immutable. + other = CryptContext(_autoload=False) + other.load(self) + if kwds: + other.load(kwds, update=True) + return other + + def replace(self, **kwds): + "deprecated alias of :meth:`copy`" + warn("CryptContext().replace() has been deprecated in Passlib 1.6, " + "and will be removed in Passlib 1.8, " + "it has been renamed to CryptContext().copy()", + DeprecationWarning, stacklevel=2) + return self.copy(**kwds) + + #=================================================================== + # init + #=================================================================== + def __init__(self, schemes=None, + # keyword only... + policy=_UNSET, # <-- deprecated + _autoload=True, **kwds): + # XXX: add ability to make flag certain contexts as immutable, + # e.g. the builtin passlib ones? + # XXX: add a name or import path for the contexts, to help out repr? + if schemes is not None: + kwds['schemes'] = schemes + if policy is not _UNSET: + warn("The CryptContext ``policy`` keyword has been deprecated as of Passlib 1.6, " + "and will be removed in Passlib 1.8; please use " + "``CryptContext.from_string()` or " + "``CryptContext.from_path()`` instead.", + DeprecationWarning) + if policy is None: + self.load(kwds) + elif isinstance(policy, CryptPolicy): + self.load(policy._context) + self.update(kwds) + else: + raise TypeError("policy must be a CryptPolicy instance") + elif _autoload: + self.load(kwds) + else: + assert not kwds, "_autoload=False and kwds are mutually exclusive" + + # XXX: would this be useful? + ##def __str__(self): + ## if PY3: + ## return self.to_string() + ## else: + ## return self.to_string().encode("utf-8") + + def __repr__(self): + return "" % id(self) + + #=================================================================== + # deprecated policy object + #=================================================================== + def _get_policy(self): + # The CryptPolicy class has been deprecated, so to support any + # legacy accesses, we create a stub policy object so .policy attr + # will continue to work. + # + # the code waits until app accesses a specific policy object attribute + # before issuing deprecation warning, so developer gets method-specific + # suggestion for how to upgrade. + + # NOTE: making a copy of the context so the policy acts like a snapshot, + # to retain the pre-1.6 behavior. + return CryptPolicy(_internal_context=self.copy(), _stub_policy=True) + + def _set_policy(self, policy): + warn("The CryptPolicy class and the ``context.policy`` attribute have " + "been deprecated as of Passlib 1.6, and will be removed in " + "Passlib 1.8; please use the ``context.load()`` and " + "``context.update()`` methods instead.", + DeprecationWarning, stacklevel=2) + if isinstance(policy, CryptPolicy): + self.load(policy._context) + else: + raise TypeError("expected CryptPolicy instance") + + policy = property(_get_policy, _set_policy, + doc="[deprecated] returns CryptPolicy instance " + "tied to this CryptContext") + + #=================================================================== + # loading / updating configuration + #=================================================================== + @staticmethod + def _parse_ini_stream(stream, section, filename): + "helper read INI from stream, extract passlib section as dict" + # NOTE: this expects a unicode stream under py3, + # and a utf-8 bytes stream under py2, + # allowing the resulting dict to always use native strings. + p = SafeConfigParser() + if PY_MIN_32: + # python 3.2 deprecated readfp in favor of read_file + p.read_file(stream, filename) + else: + p.readfp(stream, filename) + return dict(p.items(section)) + + def load_path(self, path, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext from a local file. + + This function is a wrapper for :meth:`load` which + loads a configuration string from the local file *path*, + instead of an in-memory source. It's behavior and options + are otherwise identical to :meth:`!load` when provided with + an INI-formatted string. + + .. versionadded:: 1.6 + """ + def helper(stream): + kwds = self._parse_ini_stream(stream, section, path) + return self.load(kwds, update=update) + if PY3: + # decode to unicode, which load() expected under py3 + with open(path, "rt", encoding=encoding) as stream: + return helper(stream) + elif encoding in ["utf-8", "ascii"]: + # keep as utf-8 bytes, which load() expects under py2 + with open(path, "rb") as stream: + return helper(stream) + else: + # transcode to utf-8 bytes + with open(path, "rb") as fh: + tmp = fh.read().decode(encoding).encode("utf-8") + return helper(BytesIO(tmp)) + + def load(self, source, update=False, section="passlib", encoding="utf-8"): + """Load new configuration into CryptContext, replacing existing config. + + :arg source: + source of new configuration to load. + this value can be a number of different types: + + * a :class:`!dict` object, or compatible Mapping + + the key/value pairs will be interpreted the same + keywords for the :class:`CryptContext` class constructor. + + * a :class:`!unicode` or :class:`!bytes` string + + this will be interpreted as an INI-formatted file, + and appropriate key/value pairs will be loaded from + the specified *section*. + + * another :class:`!CryptContext` object. + + this will export a snapshot of it's configuration + using :meth:`to_dict`. + + :type update: bool + :param update: + By default, :meth:`load` will replace the existing configuration + entirely. If ``update=True``, it will preserve any existing + configuration options that are not overridden by the new source, + much like the :meth:`update` method. + + :type section: str + :param section: + When parsing an INI-formatted string, :meth:`load` will look for + a section named ``"passlib"``. This option allows an alternate + section name to be used. Ignored when loading from a dictionary. + + :type encoding: str + :param encoding: + Encoding to use when decode bytes from string. + Defaults to ``"utf-8"``. Ignoring when loading from a dictionary. + + :raises TypeError: + * If the source cannot be identified. + * If an unknown / malformed keyword is encountered. + + :raises ValueError: + If an invalid keyword value is encountered. + + .. note:: + + If an error occurs during a :meth:`!load` call, the :class:`!CryptContext` + instance will be restored to the configuration it was in before + the :meth:`!load` call was made; this is to ensure it is + *never* left in an inconsistent state due to a load error. + + .. versionadded:: 1.6 + """ + #----------------------------------------------------------- + # autodetect source type, convert to dict + #----------------------------------------------------------- + parse_keys = True + if isinstance(source, base_string_types): + if PY3: + source = to_unicode(source, encoding, param="source") + else: + source = to_bytes(source, "utf-8", source_encoding=encoding, + param="source") + source = self._parse_ini_stream(NativeStringIO(source), section, + "") + elif isinstance(source, CryptContext): + # extract dict directly from config, so it can be merged later + source = dict(source._config.iter_config(resolve=True)) + parse_keys = False + elif not hasattr(source, "items"): + # mappings are left alone, otherwise throw an error. + raise ExpectedTypeError(source, "string or dict", "source") + + # XXX: add support for other iterable types, e.g. sequence of pairs? + + #----------------------------------------------------------- + # parse dict keys into (category, scheme, option) format, + # merge with existing configuration if needed + #----------------------------------------------------------- + if parse_keys: + parse = self._parse_config_key + source = dict((parse(key), value) + for key, value in iteritems(source)) + if update and self._config is not None: + # if updating, do nothing if source is empty, + if not source: + return + # otherwise overlay source on top of existing config + tmp = source + source = dict(self._config.iter_config(resolve=True)) + source.update(tmp) + + #----------------------------------------------------------- + # compile into _CryptConfig instance, and update state + #----------------------------------------------------------- + config = _CryptConfig(source) + self._config = config + self._get_record = config.get_record + self._identify_record = config.identify_record + + @staticmethod + def _parse_config_key(ckey): + """helper used to parse ``cat__scheme__option`` keys into a tuple""" + # split string into 1-3 parts + assert isinstance(ckey, str) + parts = ckey.replace(".","__").split("__") + count = len(parts) + if count == 1: + cat, scheme, key = None, None, parts[0] + elif count == 2: + cat = None + scheme, key = parts + elif count == 3: + cat, scheme, key = parts + else: + raise TypeError("keys must have less than 3 separators: %r" % + (ckey,)) + # validate & normalize the parts + if cat == "default": + cat = None + elif not cat and cat is not None: + raise TypeError("empty category: %r" % ckey) + if scheme == "context": + scheme = None + elif not scheme and scheme is not None: + raise TypeError("empty scheme: %r" % ckey) + if not key: + raise TypeError("empty option: %r" % ckey) + return cat, scheme, key + + def update(self, *args, **kwds): + """Helper for quickly changing configuration. + + This acts much like the :meth:`!dict.update` method: + it updates the context's configuration, + replacing the original value(s) for the specified keys, + and preserving the rest. + It accepts any :ref:`keyword ` + accepted by the :class:`!CryptContext` constructor. + + .. versionadded:: 1.6 + + .. seealso:: :meth:`copy` + """ + if args: + if len(args) > 1: + raise TypeError("expected at most one positional argument") + if kwds: + raise TypeError("positional arg and keywords mutually exclusive") + self.load(args[0], update=True) + elif kwds: + self.load(kwds, update=True) + + # XXX: make this public? even just as flag to load? + # FIXME: this function suffered some bitrot in 1.6.1, + # will need to be updated before works again. + ##def _simplify(self): + ## "helper to remove redundant/unused options" + ## # don't do anything if no schemes are defined + ## if not self._schemes: + ## return + ## + ## def strip_items(target, filter): + ## keys = [key for key,value in iteritems(target) + ## if filter(key,value)] + ## for key in keys: + ## del target[key] + ## + ## # remove redundant default. + ## defaults = self._default_schemes + ## if defaults.get(None) == self._schemes[0]: + ## del defaults[None] + ## + ## # remove options for unused schemes. + ## scheme_options = self._scheme_options + ## schemes = self._schemes + ("all",) + ## strip_items(scheme_options, lambda k,v: k not in schemes) + ## + ## # remove rendundant cat defaults. + ## cur = self.default_scheme() + ## strip_items(defaults, lambda k,v: k and v==cur) + ## + ## # remove redundant category deprecations. + ## # TODO: this should work w/ 'auto', but needs closer inspection + ## deprecated = self._deprecated_schemes + ## cur = self._deprecated_schemes.get(None) + ## strip_items(deprecated, lambda k,v: k and v==cur) + ## + ## # remove redundant category options. + ## for scheme, config in iteritems(scheme_options): + ## if None in config: + ## cur = config[None] + ## strip_items(config, lambda k,v: k and v==cur) + ## + ## # XXX: anything else? + + #=================================================================== + # reading configuration + #=================================================================== + def schemes(self, resolve=False): + """return schemes loaded into this CryptContext instance. + + :type resolve: bool + :arg resolve: + if ``True``, will return a tuple of :class:`~passlib.ifc.PasswordHash` + objects instead of their names. + + :returns: + returns tuple of the schemes configured for this context + via the *schemes* option. + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.schemes()`` + + .. seealso:: the :ref:`schemes ` option for usage example. + """ + return self._config.handlers if resolve else self._config.schemes + + # XXX: need to decide if exposing this would be useful to applications + # in any way that isn't already served by to_dict(); + # and then decide whether to expose ability as deprecated_schemes(), + # is_deprecated(), or a just add a schemes(deprecated=True) flag. + def _is_deprecated_scheme(self, scheme, category=None): + "helper used by unittests to check if scheme is deprecated" + return self._get_record(scheme, category).deprecated + + def default_scheme(self, category=None, resolve=False): + """return name of scheme that :meth:`encrypt` will use by default. + + :type resolve: bool + :arg resolve: + if ``True``, will return a :class:`~passlib.ifc.PasswordHash` + object instead of the name. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will return the catgory-specific default scheme instead. + + :returns: + name of the default scheme. + + .. seealso:: the :ref:`default ` option for usage example. + + .. versionadded:: 1.6 + """ + # type check of category - handled by _get_record() + record = self._get_record(None, category) + return record.handler if resolve else record.scheme + + # XXX: need to decide if exposing this would be useful in any way + ##def categories(self): + ## """return user-categories with algorithm-specific options in this CryptContext. + ## + ## this will always return a tuple. + ## if no categories besides the default category have been configured, + ## the tuple will be empty. + ## """ + ## return self._config.categories + + def handler(self, scheme=None, category=None): + """helper to resolve name of scheme -> :class:`~passlib.ifc.PasswordHash` object used by scheme. + + :arg scheme: + This should identify the scheme to lookup. + If omitted or set to ``None``, this will return the handler + for the default scheme. + + :arg category: + If a user category is specified, and no scheme is provided, + it will use the default for that category. + Otherwise this parameter is ignored. + + :raises KeyError: + If the scheme does not exist OR is not being used within this context. + + :returns: + :class:`~passlib.ifc.PasswordHash` object used to implement + the named scheme within this context (this will usually + be one of the objects from :mod:`passlib.hash`) + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.get_handler()`` + """ + try: + return self._get_record(scheme, category).handler + except KeyError: + pass + if self._config.handlers: + raise KeyError("crypt algorithm not found in this " + "CryptContext instance: %r" % (scheme,)) + else: + raise KeyError("no crypt algorithms loaded in this " + "CryptContext instance") + + def _get_unregistered_handlers(self): + "check if any handlers in this context aren't in the global registry" + return tuple(handler for handler in self._config.handlers + if not _is_handler_registered(handler)) + + #=================================================================== + # exporting config + #=================================================================== + @staticmethod + def _render_config_key(key): + "convert 3-part config key to single string" + cat, scheme, option = key + if cat: + return "%s__%s__%s" % (cat, scheme or "context", option) + elif scheme: + return "%s__%s" % (scheme, option) + else: + return option + + @staticmethod + def _render_ini_value(key, value): + "render value to string suitable for INI file" + # convert lists to comma separated lists + # (mainly 'schemes' & 'deprecated') + if isinstance(value, (list,tuple)): + value = ", ".join(value) + + # convert numbers to strings + elif isinstance(value, num_types): + if isinstance(value, float) and key[2] == "vary_rounds": + value = ("%.2f" % value).rstrip("0") if value else "0" + else: + value = str(value) + + assert isinstance(value, str), \ + "expected string for key: %r %r" % (key, value) + + # escape any percent signs. + return value.replace("%", "%%") + + def to_dict(self, resolve=False): + """Return current configuration as a dictionary. + + :type resolve: bool + :arg resolve: + if ``True``, the ``schemes`` key will contain a list of + a :class:`~passlib.ifc.PasswordHash` objects instead of just + their names. + + This method dumps the current configuration of the CryptContext + instance. The key/value pairs should be in the format accepted + by the :class:`!CryptContext` class constructor, in fact + ``CryptContext(**myctx.to_dict())`` will create an exact copy of ``myctx``. + As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> ldap_nocrypt_context.to_dict() + {'schemes': ['ldap_salted_sha1', + 'ldap_salted_md5', + 'ldap_sha1', + 'ldap_md5', + 'ldap_plaintext']} + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_dict()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + # XXX: should resolve default to conditional behavior + # based on presence of unregistered handlers? + render_key = self._render_config_key + return dict((render_key(key), value) + for key, value in self._config.iter_config(resolve)) + + def _write_to_parser(self, parser, section): + "helper to write to ConfigParser instance" + render_key = self._render_config_key + render_value = self._render_ini_value + parser.add_section(section) + for k,v in self._config.iter_config(): + v = render_value(k, v) + k = render_key(k) + parser.set(section, k, v) + + def to_string(self, section="passlib"): + """serialize to INI format and return as unicode string. + + :param section: + name of INI section to output, defaults to ``"passlib"``. + + :returns: + CryptContext configuration, serialized to a INI unicode string. + + This function acts exactly like :meth:`to_dict`, except that it + serializes all the contents into a single human-readable string, + which can be hand edited, and/or stored in a file. The + output of this method is accepted by :meth:`from_string`, + :meth:`from_path`, and :meth:`load`. As an example:: + + >>> # you can dump the configuration of any crypt context... + >>> from passlib.apps import ldap_nocrypt_context + >>> print ldap_nocrypt_context.to_string() + [passlib] + schemes = ldap_salted_sha1, ldap_salted_md5, ldap_sha1, ldap_md5, ldap_plaintext + + .. versionadded:: 1.6 + This was previously available as ``CryptContext().policy.to_string()`` + + .. seealso:: the :ref:`context-serialization-example` example in the tutorial. + """ + parser = SafeConfigParser() + self._write_to_parser(parser, section) + buf = NativeStringIO() + parser.write(buf) + unregistered = self._get_unregistered_handlers() + if unregistered: + buf.write(( + "# NOTE: the %s handler(s) are not registered with Passlib,\n" + "# this string may not correctly reproduce the current configuration.\n\n" + ) % ", ".join(repr(handler.name) for handler in unregistered)) + out = buf.getvalue() + if not PY3: + out = out.decode("utf-8") + return out + + # XXX: is this useful enough to enable? + ##def write_to_path(self, path, section="passlib", update=False): + ## "write to INI file" + ## parser = ConfigParser() + ## if update and os.path.exists(path): + ## if not parser.read([path]): + ## raise EnvironmentError("failed to read existing file") + ## parser.remove_section(section) + ## self._write_to_parser(parser, section) + ## fh = file(path, "w") + ## parser.write(fh) + ## fh.close() + + #=================================================================== + # password hash api + #=================================================================== + + # NOTE: all the following methods do is look up the appropriate + # _CryptRecord for a given (scheme,category) combination, + # and hand off the real work to the record's methods, + # which are optimized for the specific (scheme,category) configuration. + # + # The record objects are cached inside the _CryptConfig + # instance stored in self._config, and are retreived + # via get_record() and identify_record(). + # + # _get_record() and _identify_record() are references + # to _config methods of the same name, + # stored in CryptContext for speed. + + def _get_or_identify_record(self, hash, scheme=None, category=None): + "return record based on scheme, or failing that, by identifying hash" + if scheme: + if not isinstance(hash, base_string_types): + raise ExpectedStringError(hash, "hash") + return self._get_record(scheme, category) + else: + # hash typecheck handled by identify_record() + return self._identify_record(hash, category) + + def needs_update(self, hash, scheme=None, category=None, secret=None): + """Check if hash needs to be replaced for some reason, + in which case the secret should be re-hashed. + + This function is the core of CryptContext's support for hash migration: + This function takes in a hash string, and checks the scheme, + number of rounds, and other properties against the current policy. + It returns ``True`` if the hash is using a deprecated scheme, + or is otherwise outside of the bounds specified by the policy + (e.g. the number of rounds is lower than :ref:`min_rounds ` + configuration for that algorithm). + If so, the password should be re-encrypted using :meth:`encrypt` + Otherwise, it will return ``False``. + + :type hash: unicode or bytes + :arg hash: + The hash string to examine. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, it will be identified + based on the value of *hash*. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when determining if the hash needs to be updated + (e.g. is below the minimum rounds). + + :type secret: unicode, bytes, or None + :param secret: + Optional secret associated with the provided ``hash``. + This is not required, or even currently used for anything... + it's for forward-compatibility with any future + update checks that might need this information. + If provided, Passlib assumes the secret has already been + verified successfully against the hash. + + .. versionadded:: 1.6 + + :returns: ``True`` if hash should be replaced, otherwise ``False``. + + :raises ValueError: + If the hash did not match any of the configured :meth:`schemes`. + + .. versionadded:: 1.6 + This method was previously named :meth:`hash_needs_update`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + record = self._get_or_identify_record(hash, scheme, category) + return record.needs_update(hash, secret) + + @deprecated_method(deprecated="1.6", removed="2.0", replacement="CryptContext.needs_update()") + def hash_needs_update(self, hash, scheme=None, category=None): + """Legacy alias for :meth:`needs_update`. + + .. deprecated:: 1.6 + This method was renamed to :meth:`!needs_update` in version 1.6. + This alias will be removed in version 2.0, and should only + be used for compatibility with Passlib 1.3 - 1.5. + """ + return self.needs_update(hash, scheme, category) + + def genconfig(self, scheme=None, category=None, **settings): + """Generate a config string for specified scheme. + + This wraps the :meth:`~passlib.ifc.PasswordHash.genconfig` + method of the appropriate algorithm, using the default if + one is not specified. + The main difference between this and calling a hash's + :meth:`!genconfig` method directly is that this way, the CryptContext + will add in any hash-specific options, such as the default rounds. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, the configured default + will be used. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when hashing the password (e.g. different default scheme, + different default rounds values, etc). + + :param \*\*settings: + All additional keywords are passed to the appropriate handler, + and should match it's :attr:`~passlib.ifc.PasswordHash.setting_kwds`. + + :returns: + A configuration string suitable for passing to :meth:`~CryptContext.genhash`, + encoding all the provided settings and defaults; or ``None`` + if the selected algorithm doesn't support configuration strings. + The return value will always be a :class:`!str`. + """ + return self._get_record(scheme, category).genconfig(**settings) + + def genhash(self, secret, config, scheme=None, category=None, **kwds): + """Generate hash for the specified secret using another hash. + + This wraps the :meth:`~passlib.ifc.PasswordHash.genhash` + method of the appropriate algorithm, identifying it based + on the provided hash / configuration if a scheme is not specified + explicitly. + + :type secret: unicode or bytes + :arg secret: + the password to hash. + + :type config: unicode or bytes + :arg hash: + The hash or configuration string to extract the settings and salt + from when hashing the password. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, it will be identified + based on the value of *config*. + + :type category: str or None + :param category: + Optional :ref:`user category `. + Ignored by this function, this parameter + is provided for symmetry with the other methods. + + :param \*\*kwds: + All additional keywords are passed to the appropriate handler, + and should match it's :attr:`~passlib.ifc.PasswordHash.context_kwds`. + + :returns: + The secret as encoded by the specified algorithm and options. + The return value will always be a :class:`!str`. + + :raises TypeError, ValueError: + * if any of the arguments have an invalid type or value. + * if the selected algorithm's underlying :meth:`~passlib.ifc.PasswordHash.genhash` + method throws an error based on *secret* or the provided *kwds*. + """ + # XXX: could insert normalization to preferred unicode encoding here + return self._get_record(scheme, category).genhash(secret, config, **kwds) + + def identify(self, hash, category=None, resolve=False, required=False): + """Attempt to identify which algorithm the hash belongs to. + + Note that this will only consider the algorithms + currently configured for this context + (see the :ref:`schemes ` option). + All registered algorithms will be checked, from first to last, + and whichever one positively identifies the hash first will be returned. + + :type hash: unicode or bytes + :arg hash: + The hash string to test. + + :type category: str or None + :param category: + Optional :ref:`user category `. + Ignored by this function, this parameter + is provided for symmetry with the other methods. + + :type resolve: bool + :param resolve: + If ``True``, returns the hash handler itself, + instead of the name of the hash. + + :type required: bool + :param required: + If ``True``, this will raise a ValueError if the hash + cannot be identified, instead of returning ``None``. + + :returns: + The handler which first identifies the hash, + or ``None`` if none of the algorithms identify the hash. + """ + record = self._identify_record(hash, category, required) + if record is None: + return None + elif resolve: + return record.handler + else: + return record.scheme + + def encrypt(self, secret, scheme=None, category=None, **kwds): + """run secret through selected algorithm, returning resulting hash. + + :type secret: unicode or bytes + :arg secret: + the password to hash. + + :type scheme: str or None + :param scheme: + + Optional scheme to use. Scheme must be one of the ones + configured for this context (see the + :ref:`schemes ` option). + If no scheme is specified, the configured default + will be used. + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used when hashing the password (e.g. different default scheme, + different default rounds values, etc). + + :param \*\*kwds: + All other keyword options are passed to the selected algorithm's + :meth:`PasswordHash.encrypt() ` method. + + :returns: + The secret as encoded by the specified algorithm and options. + The return value will always be a :class:`!str`. + + :raises TypeError, ValueError: + * If any of the arguments have an invalid type or value. + This includes any keywords passed to the underlying hash's + :meth:`PasswordHash.encrypt() ` method. + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: could insert normalization to preferred unicode encoding here + return self._get_record(scheme, category).encrypt(secret, **kwds) + + def verify(self, secret, hash, scheme=None, category=None, **kwds): + """verify secret against an existing hash. + + If no scheme is specified, this will attempt to identify + the scheme based on the contents of the provided hash + (limited to the schemes configured for this context). + It will then check whether the password verifies against the hash. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type secret: unicode or bytes + :arg hash: + hash string to compare to + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + :type category: str or None + :param category: + Optional :ref:`user category ` string. + This is mainly used when generating new hashes, it has little + effect when verifying; this keyword is mainly provided for symmetry. + + :param \*\*kwds: + All additional keywords are passed to the appropriate handler, + and should match it's :attr:`~passlib.ifc.PasswordHash.context_kwds`. + + :returns: + ``True`` if the password matched the hash, else ``False``. + + :raises ValueError: + * if the hash did not match any of the configured :meth:`schemes`. + + * if any of the arguments have an invalid value (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + :raises TypeError: + * if any of the arguments have an invalid type (this includes + any keywords passed to the underlying hash's + :meth:`PasswordHash.verify() ` method). + + .. seealso:: the :ref:`context-basic-example` example in the tutorial + """ + # XXX: have record strip context kwds if scheme doesn't use them? + # XXX: could insert normalization to preferred unicode encoding here + # XXX: what about supporting a setter() callback ala django 1.4 ? + record = self._get_or_identify_record(hash, scheme, category) + return record.verify(secret, hash, **kwds) + + def verify_and_update(self, secret, hash, scheme=None, category=None, **kwds): + """verify password and re-hash the password if needed, all in a single call. + + This is a convenience method which takes care of all the following: + first it verifies the password (:meth:`~CryptContext.verify`), if this is successfull + it checks if the hash needs updating (:meth:`~CryptContext.needs_update`), and if so, + re-hashes the password (:meth:`~CryptContext.encrypt`), returning the replacement hash. + This series of steps is a very common task for applications + which wish to update deprecated hashes, and this call takes + care of all 3 steps efficiently. + + :type secret: unicode or bytes + :arg secret: + the secret to verify + + :type secret: unicode or bytes + :arg hash: + hash string to compare to + + :type scheme: str + :param scheme: + Optionally force context to use specific scheme. + This is usually not needed, as most hashes can be unambiguously + identified. Scheme must be one of the ones configured + for this context + (see the :ref:`schemes ` option). + + :type category: str or None + :param category: + Optional :ref:`user category `. + If specified, this will cause any category-specific defaults to + be used if the password has to be re-hashed. + + :param \*\*kwds: + all additional keywords are passed to the appropriate handler, + and should match that hash's + :attr:`PasswordHash.context_kwds `. + + :returns: + This function returns a tuple containing two elements: + ``(verified, replacement_hash)``. The first is a boolean + flag indicating whether the password verified, + and the second an optional replacement hash. + The tuple will always match one of the following 3 cases: + + * ``(False, None)`` indicates the secret failed to verify. + * ``(True, None)`` indicates the secret verified correctly, + and the hash does not need updating. + * ``(True, str)`` indicates the secret verified correctly, + but the current hash needs to be updated. The :class:`!str` + will be the freshly generated hash, to replace the old one. + + :raises TypeError, ValueError: + For the same reasons as :meth:`verify`. + + .. seealso:: the :ref:`context-migration-example` example in the tutorial. + """ + # XXX: have record strip context kwds if scheme doesn't use them? + # XXX: could insert normalization to preferred unicode encoding here. + record = self._get_or_identify_record(hash, scheme, category) + if not record.verify(secret, hash, **kwds): + return False, None + elif record.needs_update(hash, secret): + # NOTE: we re-encrypt with default scheme, not current one. + return True, self.encrypt(secret, None, category, **kwds) + else: + return True, None + + #=================================================================== + # eoc + #=================================================================== + +class LazyCryptContext(CryptContext): + """CryptContext subclass which doesn't load handlers until needed. + + This is a subclass of CryptContext which takes in a set of arguments + exactly like CryptContext, but won't load any handlers + (or even parse it's arguments) until + the first time one of it's methods is accessed. + + :arg schemes: + The first positional argument can be a list of schemes, or omitted, + just like CryptContext. + + :param onload: + + If a callable is passed in via this keyword, + it will be invoked at lazy-load time + with the following signature: + ``onload(**kwds) -> kwds``; + where ``kwds`` is all the additional kwds passed to LazyCryptContext. + It should perform any additional deferred initialization, + and return the final dict of options to be passed to CryptContext. + + .. versionadded:: 1.6 + + :param create_policy: + + .. deprecated:: 1.6 + This option will be removed in Passlib 1.8, + applications should use ``onload`` instead. + + :param kwds: + + All additional keywords are passed to CryptContext; + or to the *onload* function (if provided). + + This is mainly used internally by modules such as :mod:`passlib.apps`, + which define a large number of contexts, but only a few of them will be needed + at any one time. Use of this class saves the memory needed to import + the specified handlers until the context instance is actually accessed. + As well, it allows constructing a context at *module-init* time, + but using :func:`!onload()` to provide dynamic configuration + at *application-run* time. + + .. versionadded:: 1.4 + """ + _lazy_kwds = None + + # NOTE: the way this class works changed in 1.6. + # previously it just called _lazy_init() when ``.policy`` was + # first accessed. now that is done whenever any of the public + # attributes are accessed, and the class itself is changed + # to a regular CryptContext, to remove the overhead once it's unneeded. + + def __init__(self, schemes=None, **kwds): + if schemes is not None: + kwds['schemes'] = schemes + self._lazy_kwds = kwds + + def _lazy_init(self): + kwds = self._lazy_kwds + if 'create_policy' in kwds: + warn("The CryptPolicy class, and LazyCryptContext's " + "``create_policy`` keyword have been deprecated as of " + "Passlib 1.6, and will be removed in Passlib 1.8; " + "please use the ``onload`` keyword instead.", + DeprecationWarning) + create_policy = kwds.pop("create_policy") + result = create_policy(**kwds) + policy = CryptPolicy.from_source(result, _warn=False) + kwds = policy._context.to_dict() + elif 'onload' in kwds: + onload = kwds.pop("onload") + kwds = onload(**kwds) + del self._lazy_kwds + super(LazyCryptContext, self).__init__(**kwds) + self.__class__ = CryptContext + + def __getattribute__(self, attr): + if (not attr.startswith("_") or attr.startswith("__")) and \ + self._lazy_kwds is not None: + self._lazy_init() + return object.__getattribute__(self, attr) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/exc.py b/passlib/exc.py new file mode 100644 index 00000000..8d872a71 --- /dev/null +++ b/passlib/exc.py @@ -0,0 +1,184 @@ +"""passlib.exc -- exceptions & warnings raised by passlib""" +#============================================================================= +# exceptions +#============================================================================= +class MissingBackendError(RuntimeError): + """Error raised if multi-backend handler has no available backends; + or if specifically requested backend is not available. + + :exc:`!MissingBackendError` derives + from :exc:`RuntimeError`, since it usually indicates + lack of an external library or OS feature. + This is primarily raised by handlers which depend on + external libraries (which is currently just + :class:`~passlib.hash.bcrypt`). + """ + +class PasswordSizeError(ValueError): + """Error raised if a password exceeds the maximum size allowed + by Passlib (4096 characters). + + Many password hash algorithms take proportionately larger amounts of time and/or + memory depending on the size of the password provided. This could present + a potential denial of service (DOS) situation if a maliciously large + password is provided to an application. Because of this, Passlib enforces + a maximum size limit, but one which should be *much* larger + than any legitimate password. :exc:`!PasswordSizeError` derives + from :exc:`!ValueError`. + + .. note:: + Applications wishing to use a different limit should set the + ``PASSLIB_MAX_PASSWORD_SIZE`` environmental variable before + Passlib is loaded. The value can be any large positive integer. + + .. versionadded:: 1.6 + """ + def __init__(self): + ValueError.__init__(self, "password exceeds maximum allowed size") + + # this also prevents a glibc crypt segfault issue, detailed here ... + # http://www.openwall.com/lists/oss-security/2011/11/15/1 + +#============================================================================= +# warnings +#============================================================================= +class PasslibWarning(UserWarning): + """base class for Passlib's user warnings, + derives from the builtin :exc:`UserWarning`. + + .. versionadded:: 1.6 + """ + +class PasslibConfigWarning(PasslibWarning): + """Warning issued when non-fatal issue is found related to the configuration + of a :class:`~passlib.context.CryptContext` instance. + + This occurs primarily in one of two cases: + + * The CryptContext contains rounds limits which exceed the hard limits + imposed by the underlying algorithm. + * An explicit rounds value was provided which exceeds the limits + imposed by the CryptContext. + + In both of these cases, the code will perform correctly & securely; + but the warning is issued as a sign the configuration may need updating. + + .. versionadded:: 1.6 + """ + +class PasslibHashWarning(PasslibWarning): + """Warning issued when non-fatal issue is found with parameters + or hash string passed to a passlib hash class. + + This occurs primarily in one of two cases: + + * A rounds value or other setting was explicitly provided which + exceeded the handler's limits (and has been clamped + by the :ref:`relaxed` flag). + + * A malformed hash string was encountered which (while parsable) + should be re-encoded. + + .. versionadded:: 1.6 + """ + +class PasslibRuntimeWarning(PasslibWarning): + """Warning issued when something unexpected happens during runtime. + + The fact that it's a warning instead of an error means Passlib + was able to correct for the issue, but that it's anonmalous enough + that the developers would love to hear under what conditions it occurred. + + .. versionadded:: 1.6 + """ + +class PasslibSecurityWarning(PasslibWarning): + """Special warning issued when Passlib encounters something + that might affect security. + + .. versionadded:: 1.6 + """ + +#============================================================================= +# error constructors +# +# note: these functions are used by the hashes in Passlib to raise common +# error messages. They are currently just functions which return ValueError, +# rather than subclasses of ValueError, since the specificity isn't needed +# yet; and who wants to import a bunch of error classes when catching +# ValueError will do? +#============================================================================= + +def _get_name(handler): + return handler.name if handler else "" + +#------------------------------------------------------------------------ +# generic helpers +#------------------------------------------------------------------------ +def type_name(value): + "return pretty-printed string containing name of value's type" + cls = value.__class__ + if cls.__module__ and cls.__module__ not in ["__builtin__", "builtins"]: + return "%s.%s" % (cls.__module__, cls.__name__) + elif value is None: + return 'None' + else: + return cls.__name__ + +def ExpectedTypeError(value, expected, param): + "error message when param was supposed to be one type, but found another" + # NOTE: value is never displayed, since it may sometimes be a password. + name = type_name(value) + return TypeError("%s must be %s, not %s" % (param, expected, name)) + +def ExpectedStringError(value, param): + "error message when param was supposed to be unicode or bytes" + return ExpectedTypeError(value, "unicode or bytes", param) + +#------------------------------------------------------------------------ +# encrypt/verify parameter errors +#------------------------------------------------------------------------ +def MissingDigestError(handler=None): + "raised when verify() method gets passed config string instead of hash" + name = _get_name(handler) + return ValueError("expected %s hash, got %s config string instead" % + (name, name)) + +def NullPasswordError(handler=None): + "raised by OS crypt() supporting hashes, which forbid NULLs in password" + name = _get_name(handler) + return ValueError("%s does not allow NULL bytes in password" % name) + +#------------------------------------------------------------------------ +# errors when parsing hashes +#------------------------------------------------------------------------ +def InvalidHashError(handler=None): + "error raised if unrecognized hash provided to handler" + return ValueError("not a valid %s hash" % _get_name(handler)) + +def MalformedHashError(handler=None, reason=None): + "error raised if recognized-but-malformed hash provided to handler" + text = "malformed %s hash" % _get_name(handler) + if reason: + text = "%s (%s)" % (text, reason) + return ValueError(text) + +def ZeroPaddedRoundsError(handler=None): + "error raised if hash was recognized but contained zero-padded rounds field" + return MalformedHashError(handler, "zero-padded rounds") + +#------------------------------------------------------------------------ +# settings / hash component errors +#------------------------------------------------------------------------ +def ChecksumSizeError(handler, raw=False): + "error raised if hash was recognized, but checksum was wrong size" + # TODO: if handler.use_defaults is set, this came from app-provided value, + # not from parsing a hash string, might want different error msg. + checksum_size = handler.checksum_size + unit = "bytes" if raw else "chars" + reason = "checksum must be exactly %d %s" % (checksum_size, unit) + return MalformedHashError(handler, reason) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/ext/__init__.py b/passlib/ext/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/passlib/ext/__init__.py @@ -0,0 +1 @@ + diff --git a/passlib/ext/django/__init__.py b/passlib/ext/django/__init__.py new file mode 100644 index 00000000..2dc9b282 --- /dev/null +++ b/passlib/ext/django/__init__.py @@ -0,0 +1,6 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework + +this plugin monkeypatches django's hashing framework +so that it uses a passlib context object, allowing handling of arbitrary +hashes in Django databases. +""" diff --git a/passlib/ext/django/models.py b/passlib/ext/django/models.py new file mode 100644 index 00000000..6c4d245a --- /dev/null +++ b/passlib/ext/django/models.py @@ -0,0 +1,323 @@ +"""passlib.ext.django.models -- monkeypatch django hashing framework""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +from django import VERSION +from django.conf import settings +# pkg +from passlib.context import CryptContext +from passlib.exc import ExpectedTypeError +from passlib.ext.django.utils import _PatchManager, hasher_to_passlib_name, \ + get_passlib_hasher, get_preset_config +from passlib.utils.compat import callable, unicode, bytes +# local +__all__ = ["password_context"] + +#============================================================================= +# global attrs +#============================================================================= + +# the context object which this patches contrib.auth to use for password hashing. +# configuration controlled by ``settings.PASSLIB_CONFIG``. +password_context = CryptContext() + +# function mapping User objects -> passlib user category. +# may be overridden via ``settings.PASSLIB_GET_CATEGORY``. +def _get_category(user): + """default get_category() implementation""" + if user.is_superuser: + return "superuser" + elif user.is_staff: + return "staff" + else: + return None + +# object used to track state of patches applied to django. +_manager = _PatchManager(log=logging.getLogger(__name__ + "._manager")) + +# patch status +_patched = False + +#============================================================================= +# applying & removing the patches +#============================================================================= +def _apply_patch(): + """monkeypatch django's password handling to use ``passlib_context``, + assumes the caller will configure the object. + """ + # + # setup constants + # + log.debug("preparing to monkeypatch 'django.contrib.auth' ...") + global _patched + assert not _patched, "monkeypatching already applied" + HASHERS_PATH = "django.contrib.auth.hashers" + MODELS_PATH = "django.contrib.auth.models" + USER_PATH = MODELS_PATH + ":User" + FORMS_PATH = "django.contrib.auth.forms" + + # + # import UNUSUABLE_PASSWORD and is_password_usuable() helpers + # (providing stubs for older django versions) + # + if VERSION < (1,4): + has_hashers = False + if VERSION < (1,0): + UNUSABLE_PASSWORD = "!" + else: + from django.contrib.auth.models import UNUSABLE_PASSWORD + + def is_password_usable(encoded): + return encoded is not None and encoded != UNUSABLE_PASSWORD + + def is_valid_secret(secret): + return secret is not None + + elif VERSION < (1,6): + has_hashers = True + from django.contrib.auth.hashers import UNUSABLE_PASSWORD, \ + is_password_usable + + # NOTE: 1.4 - 1.5 - empty passwords no longer valid. + def is_valid_secret(secret): + return bool(secret) + + else: + has_hashers = True + from django.contrib.auth.hashers import is_password_usable + + # 1.6 - empty passwords valid again + def is_valid_secret(secret): + return secret is not None + + if VERSION < (1,6): + def make_unusable_password(): + return UNUSABLE_PASSWORD + else: + from django.contrib.auth.hashers import make_password as _make_password + def make_unusable_password(): + return _make_password(None) + + # django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes + has_unsalted_sha1 = (VERSION >= (1,4,6)) + + # + # backport ``User.set_unusable_password()`` for Django 0.9 + # (simplifies rest of the code) + # + if not hasattr(_manager.getorig(USER_PATH), "set_unusable_password"): + assert VERSION < (1,0) + + @_manager.monkeypatch(USER_PATH) + def set_unusable_password(user): + user.password = make_unusable_password() + + @_manager.monkeypatch(USER_PATH) + def has_usable_password(user): + return is_password_usable(user.password) + + # + # patch ``User.set_password() & ``User.check_password()`` to use + # context & get_category (would just leave these as wrappers for hashers + # module under django 1.4, but then we couldn't pass User object into + # get_category very easily) + # + @_manager.monkeypatch(USER_PATH) + def set_password(user, password): + "passlib replacement for User.set_password()" + if is_valid_secret(password): + # NOTE: pulls _get_category from module globals + cat = _get_category(user) + user.password = password_context.encrypt(password, category=cat) + else: + user.set_unusable_password() + + @_manager.monkeypatch(USER_PATH) + def check_password(user, password): + "passlib replacement for User.check_password()" + hash = user.password + if not is_valid_secret(password) or not is_password_usable(hash): + return False + if not hash and VERSION < (1,4): + return False + # NOTE: pulls _get_category from module globals + cat = _get_category(user) + ok, new_hash = password_context.verify_and_update(password, hash, + category=cat) + if ok and new_hash is not None: + # migrate to new hash if needed. + user.password = new_hash + user.save() + return ok + + # + # override check_password() with our own implementation + # + @_manager.monkeypatch(HASHERS_PATH, enable=has_hashers) + @_manager.monkeypatch(MODELS_PATH) + def check_password(password, encoded, setter=None, preferred="default"): + "passlib replacement for check_password()" + # XXX: this currently ignores "preferred" keyword, since it's purpose + # was for hash migration, and that's handled by the context. + if not is_valid_secret(password) or not is_password_usable(encoded): + return False + ok = password_context.verify(password, encoded) + if ok and setter and password_context.needs_update(encoded): + setter(password) + return ok + + # + # patch the other functions defined in the ``hashers`` module, as well + # as any other known locations where they're imported within ``contrib.auth`` + # + if has_hashers: + @_manager.monkeypatch(HASHERS_PATH) + @_manager.monkeypatch(MODELS_PATH) + def make_password(password, salt=None, hasher="default"): + "passlib replacement for make_password()" + if not is_valid_secret(password): + return make_unusable_password() + if hasher == "default": + scheme = None + else: + scheme = hasher_to_passlib_name(hasher) + kwds = dict(scheme=scheme) + handler = password_context.handler(scheme) + # NOTE: django make specify an empty string for the salt, + # even if scheme doesn't accept a salt. we omit keyword + # in that case. + if salt is not None and (salt or 'salt' in handler.setting_kwds): + kwds['salt'] = salt + return password_context.encrypt(password, **kwds) + + @_manager.monkeypatch(HASHERS_PATH) + @_manager.monkeypatch(FORMS_PATH) + def get_hasher(algorithm="default"): + "passlib replacement for get_hasher()" + if algorithm == "default": + scheme = None + else: + scheme = hasher_to_passlib_name(algorithm) + # NOTE: resolving scheme -> handler instead of + # passing scheme into get_passlib_hasher(), + # in case context contains custom handler + # shadowing name of a builtin handler. + handler = password_context.handler(scheme) + return get_passlib_hasher(handler, algorithm=algorithm) + + # identify_hasher() was added in django 1.5, + # patching it anyways for 1.4, so passlib's version is always available. + @_manager.monkeypatch(HASHERS_PATH) + @_manager.monkeypatch(FORMS_PATH) + def identify_hasher(encoded): + "passlib helper to identify hasher from encoded password" + handler = password_context.identify(encoded, resolve=True, + required=True) + algorithm = None + if (has_unsalted_sha1 and handler.name == "django_salted_sha1" and + encoded.startswith("sha1$$")): + # django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes, + # but passlib just reuses the "sha1$salt$digest" handler. + # we want to resolve to correct django hasher. + algorithm = "unsalted_sha1" + return get_passlib_hasher(handler, algorithm=algorithm) + + _patched = True + log.debug("... finished monkeypatching django") + +def _remove_patch(): + """undo the django monkeypatching done by this module. + offered as a last resort if it's ever needed. + + .. warning:: + This may cause problems if any other Django modules have imported + their own copies of the patched functions, though the patched + code has been designed to throw an error as soon as possible in + this case. + """ + global _patched + if _patched: + log.debug("removing django monkeypatching...") + _manager.unpatch_all(unpatch_conflicts=True) + password_context.load({}) + _patched = False + log.debug("...finished removing django monkeypatching") + return True + if _manager: # pragma: no cover -- sanity check + log.warning("reverting partial monkeypatching of django...") + _manager.unpatch_all() + password_context.load({}) + log.debug("...finished removing django monkeypatching") + return True + log.debug("django not monkeypatched") + return False + +#============================================================================= +# main code +#============================================================================= +def _load(): + global _get_category + + # TODO: would like to add support for inheriting config from a preset + # (or from existing hasher state) and letting PASSLIB_CONFIG + # be an update, not a replacement. + + # TODO: wrap and import any custom hashers as passlib handlers, + # so they could be used in the passlib config. + + # load config from settings + _UNSET = object() + config = getattr(settings, "PASSLIB_CONFIG", _UNSET) + if config is _UNSET: + # XXX: should probably deprecate this alias + config = getattr(settings, "PASSLIB_CONTEXT", _UNSET) + if config is _UNSET: + config = "passlib-default" + if config is None: + warn("setting PASSLIB_CONFIG=None is deprecated, " + "and support will be removed in Passlib 1.8, " + "use PASSLIB_CONFIG='disabled' instead.", + DeprecationWarning) + config = "disabled" + elif not isinstance(config, (unicode, bytes, dict)): + raise ExpectedTypeError(config, "str or dict", "PASSLIB_CONFIG") + + # load custom category func (if any) + get_category = getattr(settings, "PASSLIB_GET_CATEGORY", None) + if get_category and not callable(get_category): + raise ExpectedTypeError(get_category, "callable", "PASSLIB_GET_CATEGORY") + + # check if we've been disabled + if config == "disabled": + if _patched: # pragma: no cover -- sanity check + log.error("didn't expect monkeypatching would be applied!") + _remove_patch() + return + + # resolve any preset aliases + if isinstance(config, str) and '\n' not in config: + config = get_preset_config(config) + + # setup context + _apply_patch() + password_context.load(config) + if get_category: + # NOTE: _get_category is module global which is read by + # monkeypatched functions constructed by _apply_patch() + _get_category = get_category + log.debug("passlib.ext.django loaded") + +# wrap load function so we can undo any patching if something goes wrong +try: + _load() +except: + _remove_patch() + raise + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/ext/django/utils.py b/passlib/ext/django/utils.py new file mode 100644 index 00000000..161212b4 --- /dev/null +++ b/passlib/ext/django/utils.py @@ -0,0 +1,505 @@ +"""passlib.ext.django.utils - helper functions used by this plugin""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +from weakref import WeakKeyDictionary +from warnings import warn +# site +try: + from django import VERSION as DJANGO_VERSION + log.debug("found django %r installation", DJANGO_VERSION) +except ImportError: + log.debug("django installation not found") + DJANGO_VERSION = () +# pkg +from passlib.context import CryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib.registry import get_crypt_handler, list_crypt_handlers +from passlib.utils import classproperty +from passlib.utils.compat import bytes, get_method_function, iteritems +# local +__all__ = [ + "get_preset_config", + "get_passlib_hasher", +] + +#============================================================================= +# default policies +#============================================================================= + +# map preset names -> passlib.app attrs +_preset_map = { + "django-1.0": "django10_context", + "django-1.4": "django14_context", + "django-1.6": "django16_context", + "django-latest": "django_context", +} + +def get_preset_config(name): + """Returns configuration string for one of the preset strings + supported by the ``PASSLIB_CONFIG`` setting. + Currently supported presets: + + * ``"passlib-default"`` - default config used by this release of passlib. + * ``"django-default"`` - config matching currently installed django version. + * ``"django-latest"`` - config matching newest django version (currently same as ``"django-1.6"``). + * ``"django-1.0"`` - config used by stock Django 1.0 - 1.3 installs + * ``"django-1.4"`` - config used by stock Django 1.4 installs + * ``"django-1.6"`` - config used by stock Django 1.6 installs + """ + # TODO: add preset which includes HASHERS + PREFERRED_HASHERS, + # after having imported any custom hashers. e.g. "django-current" + if name == "django-default": + if not DJANGO_VERSION: + raise ValueError("can't resolve django-default preset, " + "django not installed") + if DJANGO_VERSION < (1,4): + name = "django-1.0" + elif DJANGO_VERSION < (1,6): + name = "django-1.4" + else: + name = "django-1.6" + if name == "passlib-default": + return PASSLIB_DEFAULT + try: + attr = _preset_map[name] + except KeyError: + raise ValueError("unknown preset config name: %r" % name) + import passlib.apps + return getattr(passlib.apps, attr).to_string() + +# default context used by passlib 1.6 +PASSLIB_DEFAULT = """ +[passlib] + +; list of schemes supported by configuration +; currently all django 1.6, 1.4, and 1.0 hashes, +; and three common modular crypt format hashes. +schemes = + django_pbkdf2_sha256, django_pbkdf2_sha1, django_bcrypt, django_bcrypt_sha256, + django_salted_sha1, django_salted_md5, django_des_crypt, hex_md5, + sha512_crypt, bcrypt, phpass + +; default scheme to use for new hashes +default = django_pbkdf2_sha256 + +; hashes using these schemes will automatically be re-hashed +; when the user logs in (currently all django 1.0 hashes) +deprecated = + django_pbkdf2_sha1, django_salted_sha1, django_salted_md5, + django_des_crypt, hex_md5 + +; sets some common options, including minimum rounds for two primary hashes. +; if a hash has less than this number of rounds, it will be re-hashed. +all__vary_rounds = 0.05 +sha512_crypt__min_rounds = 80000 +django_pbkdf2_sha256__min_rounds = 10000 + +; set somewhat stronger iteration counts for ``User.is_staff`` +staff__sha512_crypt__default_rounds = 100000 +staff__django_pbkdf2_sha256__default_rounds = 12500 + +; and even stronger ones for ``User.is_superuser`` +superuser__sha512_crypt__default_rounds = 120000 +superuser__django_pbkdf2_sha256__default_rounds = 15000 +""" + +#============================================================================= +# translating passlib names <-> hasher names +#============================================================================= + +# prefix used to shoehorn passlib's handler names into django hasher namespace; +# allows get_hasher() to be meaningfully called even if passlib handler +# is the one being used. +PASSLIB_HASHER_PREFIX = "passlib_" + +# prefix all the django-specific hash formats are stored under w/in passlib; +# all of these hashes should expose their hasher name via ``.django_name``. +DJANGO_PASSLIB_PREFIX = "django_" + +# non-django-specific hashes which also expose ``.django_name``. +_other_django_hashes = ["hex_md5"] + +def passlib_to_hasher_name(passlib_name): + "convert passlib handler name -> hasher name" + handler = get_crypt_handler(passlib_name) + if hasattr(handler, "django_name"): + return handler.django_name + return PASSLIB_HASHER_PREFIX + passlib_name + +def hasher_to_passlib_name(hasher_name): + "convert hasher name -> passlib handler name" + if hasher_name.startswith(PASSLIB_HASHER_PREFIX): + return hasher_name[len(PASSLIB_HASHER_PREFIX):] + if hasher_name == "unsalted_sha1": + # django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes, + # but passlib just reuses the "sha1$salt$digest" handler. + hasher_name = "sha1" + for name in list_crypt_handlers(): + if name.startswith(DJANGO_PASSLIB_PREFIX) or name in _other_django_hashes: + handler = get_crypt_handler(name) + if getattr(handler, "django_name", None) == hasher_name: + return name + # XXX: this should only happen for custom hashers that have been registered. + # _HasherHandler (below) is work in progress that would fix this. + raise ValueError("can't translate hasher name to passlib name: %r" % + hasher_name) + +#============================================================================= +# wrapping passlib handlers as django hashers +#============================================================================= +_GEN_SALT_SIGNAL = "--!!!generate-new-salt!!!--" + +class _HasherWrapper(object): + """helper for wrapping passlib handlers in Hasher-compatible class.""" + + # filled in by subclass, drives the other methods. + passlib_handler = None + iterations = None + + @classproperty + def algorithm(cls): + assert not hasattr(cls.passlib_handler, "django_name") + return PASSLIB_HASHER_PREFIX + cls.passlib_handler.name + + def salt(self): + # NOTE: passlib's handler.encrypt() should generate new salt each time, + # so this just returns a special constant which tells + # encode() (below) not to pass a salt keyword along. + return _GEN_SALT_SIGNAL + + def verify(self, password, encoded): + return self.passlib_handler.verify(password, encoded) + + def encode(self, password, salt=None, iterations=None): + kwds = {} + if salt is not None and salt != _GEN_SALT_SIGNAL: + kwds['salt'] = salt + if iterations is not None: + kwds['rounds'] = iterations + elif self.iterations is not None: + kwds['rounds'] = self.iterations + return self.passlib_handler.encrypt(password, **kwds) + + _translate_kwds = dict(checksum="hash", rounds="iterations") + + def safe_summary(self, encoded): + from django.contrib.auth.hashers import mask_hash, _, SortedDict + handler = self.passlib_handler + items = [ + # since this is user-facing, we're reporting passlib's name, + # without the distracting PASSLIB_HASHER_PREFIX prepended. + (_('algorithm'), handler.name), + ] + if hasattr(handler, "parsehash"): + kwds = handler.parsehash(encoded, sanitize=mask_hash) + for key, value in iteritems(kwds): + key = self._translate_kwds.get(key, key) + items.append((_(key), value)) + return SortedDict(items) + + # added in django 1.6 + def must_update(self, encoded): + # TODO: would like to do something useful here, + # but would require access to password context, + # which would mean a serious recoding of this ext. + return False + +# cache of hasher wrappers generated by get_passlib_hasher() +_hasher_cache = WeakKeyDictionary() + +def get_passlib_hasher(handler, algorithm=None): + """create *Hasher*-compatible wrapper for specified passlib hash. + + This takes in the name of a passlib hash (or the handler object itself), + and returns a wrapper instance which should be compatible with + Django 1.4's Hashers framework. + + If the named hash corresponds to one of Django's builtin hashers, + an instance of the real hasher class will be returned. + + Note that the format of the handler won't be altered, + so will probably not be compatible with Django's algorithm format, + so the monkeypatch provided by this plugin must have been applied. + + .. note:: + This function requires Django 1.4 or later. + """ + if DJANGO_VERSION < (1,4): + raise RuntimeError("get_passlib_hasher() requires Django >= 1.4") + if isinstance(handler, str): + handler = get_crypt_handler(handler) + if hasattr(handler, "django_name"): + # return native hasher instance + # XXX: should add this to _hasher_cache[] + name = handler.django_name + if name == "sha1" and algorithm == "unsalted_sha1": + # django 1.4.6+ uses a separate hasher for "sha1$$digest" hashes, + # but passlib just reuses the "sha1$salt$digest" handler. + # we want to resolve to correct django hasher. + name = algorithm + return _get_hasher(name) + if handler.name == "django_disabled": + raise ValueError("can't wrap unusable-password handler") + try: + return _hasher_cache[handler] + except KeyError: + name = "Passlib_%s_PasswordHasher" % handler.name.title() + cls = type(name, (_HasherWrapper,), dict(passlib_handler=handler)) + hasher = _hasher_cache[handler] = cls() + return hasher + +def _get_hasher(algorithm): + "wrapper to call django.contrib.auth.hashers:get_hasher()" + import sys + module = sys.modules.get("passlib.ext.django.models") + if module is None: + # we haven't patched django, so just import directly + from django.contrib.auth.hashers import get_hasher + else: + # we've patched django, so have to use patch manager to retreive + # original get_hasher() function... + get_hasher = module._manager.getorig("django.contrib.auth.hashers:get_hasher") + return get_hasher(algorithm) + +#============================================================================= +# adapting django hashers -> passlib handlers +#============================================================================= +# TODO: this code probably halfway works, mainly just needs +# a routine to read HASHERS and PREFERRED_HASHER. + +##from passlib.registry import register_crypt_handler +##from passlib.utils import classproperty, to_native_str, to_unicode +##from passlib.utils.compat import unicode +## +## +##class _HasherHandler(object): +## "helper for wrapping Hasher instances as passlib handlers" +## # FIXME: this generic wrapper doesn't handle custom settings +## # FIXME: genconfig / genhash not supported. +## +## def __init__(self, hasher): +## self.django_hasher = hasher +## if hasattr(hasher, "iterations"): +## # assume encode() accepts an "iterations" parameter. +## # fake min/max rounds +## self.min_rounds = 1 +## self.max_rounds = 0xFFFFffff +## self.default_rounds = self.django_hasher.iterations +## self.setting_kwds += ("rounds",) +## +## # hasher instance - filled in by constructor +## django_hasher = None +## +## setting_kwds = ("salt",) +## context_kwds = () +## +## @property +## def name(self): +## # XXX: need to make sure this wont' collide w/ builtin django hashes. +## # maybe by renaming this to django compatible aliases? +## return DJANGO_PASSLIB_PREFIX + self.django_name +## +## @property +## def django_name(self): +## # expose this so hasher_to_passlib_name() extracts original name +## return self.django_hasher.algorithm +## +## @property +## def ident(self): +## # this should always be correct, as django relies on ident prefix. +## return unicode(self.django_name + "$") +## +## @property +## def identify(self, hash): +## # this should always work, as django relies on ident prefix. +## return to_unicode(hash, "latin-1", "hash").startswith(self.ident) +## +## @property +## def genconfig(self): +## # XXX: not sure how to support this. +## return None +## +## @property +## def genhash(self, secret, config): +## if config is not None: +## # XXX: not sure how to support this. +## raise NotImplementedError("genhash() for hashers not implemented") +## return self.encrypt(secret) +## +## @property +## def encrypt(self, secret, salt=None, **kwds): +## # NOTE: from how make_password() is coded, all hashers +## # should have salt param. but only some will have +## # 'iterations' parameter. +## opts = {} +## if 'rounds' in self.setting_kwds and 'rounds' in kwds: +## opts['iterations'] = kwds.pop("rounds") +## if kwds: +## raise TypeError("unexpected keyword arguments: %r" % list(kwds)) +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## if salt is None: +## salt = self.django_hasher.salt() +## return to_native_str(self.django_hasher(secret, salt, **opts)) +## +## @property +## def verify(self, secret, hash): +## hash = to_native_str(hash, "utf-8", "hash") +## if isinstance(secret, unicode): +## secret = secret.encode("utf-8") +## return self.django_hasher.verify(secret, hash) +## +##def register_hasher(hasher): +## handler = _HasherHandler(hasher) +## register_crypt_handler(handler) +## return handler + +#============================================================================= +# monkeypatch helpers +#============================================================================= +# private singleton indicating lack-of-value +_UNSET = object() + +class _PatchManager(object): + "helper to manage monkeypatches and run sanity checks" + + # NOTE: this could easily use a dict interface, + # but keeping it distinct to make clear that it's not a dict, + # since it has important side-effects. + + #=================================================================== + # init and support + #=================================================================== + def __init__(self, log=None): + # map of key -> (original value, patched value) + # original value may be _UNSET + self.log = log or logging.getLogger(__name__ + "._PatchManager") + self._state = {} + + # bool value tests if any patches are currently applied. + __bool__ = __nonzero__ = lambda self: bool(self._state) + + def _import_path(self, path): + "retrieve obj and final attribute name from resource path" + name, attr = path.split(":") + obj = __import__(name, fromlist=[attr], level=0) + while '.' in attr: + head, attr = attr.split(".", 1) + obj = getattr(obj, head) + return obj, attr + + @staticmethod + def _is_same_value(left, right): + "check if two values are the same (stripping method wrappers, etc)" + return get_method_function(left) == get_method_function(right) + + #=================================================================== + # reading + #=================================================================== + def _get_path(self, key, default=_UNSET): + obj, attr = self._import_path(key) + return getattr(obj, attr, default) + + def get(self, path, default=None): + "return current value for path" + return self._get_path(path, default) + + def getorig(self, path, default=None): + "return original (unpatched) value for path" + try: + value, _= self._state[path] + except KeyError: + value = self._get_path(path) + return default if value is _UNSET else value + + def check_all(self, strict=False): + """run sanity check on all keys, issue warning if out of sync""" + same = self._is_same_value + for path, (orig, expected) in iteritems(self._state): + if same(self._get_path(path), expected): + continue + msg = "another library has patched resource: %r" % path + if strict: + raise RuntimeError(msg) + else: + warn(msg, PasslibRuntimeWarning) + + #=================================================================== + # patching + #=================================================================== + def _set_path(self, path, value): + obj, attr = self._import_path(path) + if value is _UNSET: + if hasattr(obj, attr): + delattr(obj, attr) + else: + setattr(obj, attr, value) + + def patch(self, path, value): + "monkeypatch object+attr at to have , stores original" + assert value != _UNSET + current = self._get_path(path) + try: + orig, expected = self._state[path] + except KeyError: + self.log.debug("patching resource: %r", path) + orig = current + else: + self.log.debug("modifying resource: %r", path) + if not self._is_same_value(current, expected): + warn("overridding resource another library has patched: %r" + % path, PasslibRuntimeWarning) + self._set_path(path, value) + self._state[path] = (orig, value) + + ##def patch_many(self, **kwds): + ## "override specified resources with new values" + ## for path, value in iteritems(kwds): + ## self.patch(path, value) + + def monkeypatch(self, parent, name=None, enable=True): + "function decorator which patches function of same name in " + def builder(func): + if enable: + sep = "." if ":" in parent else ":" + path = parent + sep + (name or func.__name__) + self.patch(path, func) + return func + return builder + + #=================================================================== + # unpatching + #=================================================================== + def unpatch(self, path, unpatch_conflicts=True): + try: + orig, expected = self._state[path] + except KeyError: + return + current = self._get_path(path) + self.log.debug("unpatching resource: %r", path) + if not self._is_same_value(current, expected): + if unpatch_conflicts: + warn("reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + else: + warn("not reverting resource another library has patched: %r" + % path, PasslibRuntimeWarning) + del self._state[path] + return + self._set_path(path, orig) + del self._state[path] + + def unpatch_all(self, **kwds): + for key in list(self._state): + self.unpatch(key, **kwds) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/__init__.py b/passlib/handlers/__init__.py new file mode 100644 index 00000000..0a0338c8 --- /dev/null +++ b/passlib/handlers/__init__.py @@ -0,0 +1 @@ +"""passlib.handlers -- holds implementations of all passlib's builtin hash formats""" diff --git a/passlib/handlers/bcrypt.py b/passlib/handlers/bcrypt.py new file mode 100644 index 00000000..42f0eca1 --- /dev/null +++ b/passlib/handlers/bcrypt.py @@ -0,0 +1,457 @@ +"""passlib.bcrypt -- implementation of OpenBSD's BCrypt algorithm. + +TODO: + +* support 2x and altered-2a hashes? + http://www.openwall.com/lists/oss-security/2011/06/27/9 + +* deal with lack of PY3-compatibile c-ext implementation +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement, absolute_import +# core +from base64 import b64encode +from hashlib import sha256 +import os +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +try: + import bcrypt as _bcrypt +except ImportError: # pragma: no cover + _bcrypt = None +try: + from bcryptor.engine import Engine as bcryptor_engine +except ImportError: # pragma: no cover + bcryptor_engine = None +# pkg +from passlib.exc import PasslibHashWarning +from passlib.utils import bcrypt64, safe_crypt, repeat_string, to_bytes, \ + classproperty, rng, getrandstr, test_crypt, to_unicode +from passlib.utils.compat import bytes, b, u, uascii_to_str, unicode, str_to_uascii +import passlib.utils.handlers as uh + +# local +__all__ = [ + "bcrypt", +] + +#============================================================================= +# support funcs & constants +#============================================================================= +_builtin_bcrypt = None + +def _load_builtin(): + global _builtin_bcrypt + if _builtin_bcrypt is None: + from passlib.utils._blowfish import raw_bcrypt as _builtin_bcrypt + +IDENT_2 = u("$2$") +IDENT_2A = u("$2a$") +IDENT_2X = u("$2x$") +IDENT_2Y = u("$2y$") +_BNULL = b('\x00') + +#============================================================================= +# handler +#============================================================================= +class bcrypt(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.HasManyBackends, uh.GenericHandler): + """This class implements the BCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 12, must be between 4 and 31, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}` + -- increasing the rounds by +1 will double the amount of time taken. + + :type ident: str + :param ident: + Specifies which version of the BCrypt algorithm will be used when creating a new hash. + Typically this option is not needed, as the default (``"2a"``) is usually the correct choice. + If specified, it must be one of the following: + + * ``"2"`` - the first revision of BCrypt, which suffers from a minor security flaw and is generally not used anymore. + * ``"2a"`` - latest revision of the official BCrypt algorithm, and the current default. + * ``"2y"`` - format specific to the *crypt_blowfish* BCrypt implementation, + identical to ``"2a"`` in all but name. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This class now supports ``"2y"`` hashes, and recognizes + (but does not support) the broken ``"2x"`` hashes. + (see the :ref:`crypt_blowfish bug ` + for details). + + .. versionchanged:: 1.6 + Added a pure-python backend. + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bcrypt" + setting_kwds = ("salt", "rounds", "ident") + checksum_size = 31 + checksum_chars = bcrypt64.charmap + + #--HasManyIdents-- + default_ident = IDENT_2A + ident_values = (IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y) + ident_aliases = {u("2"): IDENT_2, u("2a"): IDENT_2A, u("2y"): IDENT_2Y} + + #--HasSalt-- + min_salt_size = max_salt_size = 22 + salt_chars = bcrypt64.charmap + # NOTE: 22nd salt char must be in bcrypt64._padinfo2[1], not full charmap + + #--HasRounds-- + default_rounds = 12 # current passlib default + min_rounds = 4 # minimum from bcrypt specification + max_rounds = 31 # 32-bit integer limit (since real_rounds=1<[a-z0-9]+) + ,(?P\d{1,2}) + [$](?P[^$]{22}) + ([$](?P.{31}))? + $ + """, re.X) + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.prefix): + raise uh.exc.InvalidHashError(cls) + m = cls._hash_re.match(hash) + if not m: + raise uh.exc.MalformedHashError(cls) + rounds = m.group("rounds") + if rounds.startswith(uh._UZERO) and rounds != uh._UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + return cls(ident=m.group("variant"), + rounds=int(rounds), + salt=m.group("salt"), + checksum=m.group("digest"), + ) + + def to_string(self): + hash = u("%s%s,%d$%s") % (self.prefix, self.ident.strip(_UDOLLAR), + self.rounds, self.salt) + if self.checksum: + hash = u("%s$%s") % (hash, self.checksum) + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + # NOTE: this bypasses bcrypt's _calc_checksum, + # so has to take care of all it's issues, such as secret encoding. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + # NOTE: can't use digest directly, since bcrypt stops at first NULL. + # NOTE: bcrypt doesn't fully mix entropy for bytes 55-72 of password + # (XXX: citation needed), so we don't want key to be > 55 bytes. + # thus, have to use base64 (44 bytes) rather than hex (64 bytes). + key = b64encode(sha256(secret).digest()) + return self._calc_checksum_backend(key) + + # patch set_backend so it modifies bcrypt class, not this one... + # else it would clobber our _calc_checksum() wrapper above. + @classmethod + def set_backend(cls, *args, **kwds): + return bcrypt.set_backend(*args, **kwds) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/cisco.py b/passlib/handlers/cisco.py new file mode 100644 index 00000000..b1d25b51 --- /dev/null +++ b/passlib/handlers/cisco.py @@ -0,0 +1,219 @@ +"""passlib.handlers.cisco - Cisco password hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import md5 +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import h64, right_pad_string, to_unicode +from passlib.utils.compat import b, bascii_to_str, bytes, unicode, u, join_byte_values, \ + join_byte_elems, byte_elem_value, iter_byte_values, uascii_to_str, str_to_uascii +import passlib.utils.handlers as uh +# local +__all__ = [ + "cisco_pix", + "cisco_type7", +] + +#============================================================================= +# cisco pix firewall hash +#============================================================================= +class cisco_pix(uh.HasUserContext, uh.StaticHandler): + """This class implements the password hash used by Cisco PIX firewalls, + and follows the :ref:`password-hash-api`. + It does a single round of hashing, and relies on the username + as the salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following extra keyword: + + :type user: str + :param user: + String containing name of user account this password is associated with. + + This is *required* in order to correctly hash passwords associated + with a user account on the Cisco device, as it is used to salt + the hash. + + Conversely, this *must* be omitted or set to ``""`` in order to correctly + hash passwords which don't have an associated user account + (such as the "enable" password). + """ + #=================================================================== + # class attrs + #=================================================================== + name = "cisco_pix" + checksum_size = 16 + checksum_chars = uh.HASH64_CHARS + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + # XXX: no idea what unicode policy is, but all examples are + # 7-bit ascii compatible, so using UTF-8 + secret = secret.encode("utf-8") + + user = self.user + if user: + # not positive about this, but it looks like per-user + # accounts use the first 4 chars of the username as the salt, + # whereas global "enable" passwords don't have any salt at all. + if isinstance(user, unicode): + user = user.encode("utf-8") + secret += user[:4] + + # null-pad or truncate to 16 bytes + secret = right_pad_string(secret, 16) + + # md5 digest + hash = md5(secret).digest() + + # drop every 4th byte + hash = join_byte_elems(c for i,c in enumerate(hash) if i & 3 < 3) + + # encode using Hash64 + return h64.encode_bytes(hash).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# type 7 +#============================================================================= +class cisco_type7(uh.GenericHandler): + """This class implements the Type 7 password encoding used by Cisco IOS, + and follows the :ref:`password-hash-api`. + It has a simple 4-5 bit salt, but is nonetheless a reversible encoding + instead of a real hash. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genhash` methods + have the following optional keywords: + + :type salt: int + :param salt: + This may be an optional salt integer drawn from ``range(0,16)``. + If omitted, one will be chosen at random. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` values that are out of range. + + Note that while this class outputs digests in upper-case hexidecimal, + it will accept lower-case as well. + + This class also provides the following additional method: + + .. automethod:: decode + """ + #=================================================================== + # class attrs + #=================================================================== + name = "cisco_type7" + setting_kwds = ("salt",) + checksum_chars = uh.UPPER_HEX_CHARS + + # NOTE: encoding could handle max_salt_value=99, but since key is only 52 + # chars in size, not sure what appropriate behavior is for that edge case. + min_salt_value = 0 + max_salt_value = 52 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def genconfig(cls): + return None + + @classmethod + def genhash(cls, secret, config): + # special case to handle ``config=None`` in same style as StaticHandler + if config is None: + return cls.encrypt(secret) + else: + return super(cisco_type7, cls).genhash(secret, config) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if len(hash) < 2: + raise uh.exc.InvalidHashError(cls) + salt = int(hash[:2]) # may throw ValueError + return cls(salt=salt, checksum=hash[2:].upper()) + + def __init__(self, salt=None, **kwds): + super(cisco_type7, self).__init__(**kwds) + self.salt = self._norm_salt(salt) + + def _norm_salt(self, salt): + "the salt for this algorithm is an integer 0-52, not a string" + # XXX: not entirely sure that values >15 are valid, so for + # compatibility we don't output those values, but we do accept them. + if salt is None: + if self.use_defaults: + salt = self._generate_salt() + else: + raise TypeError("no salt specified") + if not isinstance(salt, int): + raise uh.exc.ExpectedTypeError(salt, "integer", "salt") + if salt < 0 or salt > self.max_salt_value: + msg = "salt/offset must be in 0..52 range" + if self.relaxed: + warn(msg, uh.PasslibHashWarning) + salt = 0 if salt < 0 else self.max_salt_value + else: + raise ValueError(msg) + return salt + + def _generate_salt(self): + return uh.rng.randint(0, 15) + + def to_string(self): + return "%02d%s" % (self.salt, uascii_to_str(self.checksum)) + + def _calc_checksum(self, secret): + # XXX: no idea what unicode policy is, but all examples are + # 7-bit ascii compatible, so using UTF-8 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return hexlify(self._cipher(secret, self.salt)).decode("ascii").upper() + + @classmethod + def decode(cls, hash, encoding="utf-8"): + """decode hash, returning original password. + + :arg hash: encoded password + :param encoding: optional encoding to use (defaults to ``UTF-8``). + :returns: password as unicode + """ + self = cls.from_string(hash) + tmp = unhexlify(self.checksum.encode("ascii")) + raw = self._cipher(tmp, self.salt) + return raw.decode(encoding) if encoding else raw + + # type7 uses a xor-based vingere variant, using the following secret key: + _key = u("dsfd;kfoA,.iyewrkldJKDHSUBsgvca69834ncxv9873254k;fg87") + + @classmethod + def _cipher(cls, data, salt): + "xor static key against data - encrypts & decrypts" + key = cls._key + key_size = len(key) + return join_byte_values( + value ^ ord(key[(salt + idx) % key_size]) + for idx, value in enumerate(iter_byte_values(data)) + ) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/des_crypt.py b/passlib/handlers/des_crypt.py new file mode 100644 index 00000000..1699e1d7 --- /dev/null +++ b/passlib/handlers/des_crypt.py @@ -0,0 +1,517 @@ +"""passlib.handlers.des_crypt - traditional unix (DES) crypt and variants""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import classproperty, h64, h64big, safe_crypt, test_crypt, to_unicode +from passlib.utils.compat import b, bytes, byte_elem_value, u, uascii_to_str, unicode +from passlib.utils.des import des_encrypt_int_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "des_crypt", + "bsdi_crypt", + "bigcrypt", + "crypt16", +] + +#============================================================================= +# pure-python backend for des_crypt family +#============================================================================= +_BNULL = b('\x00') + +def _crypt_secret_to_key(secret): + """convert secret to 64-bit DES key. + + this only uses the first 8 bytes of the secret, + and discards the high 8th bit of each byte at that. + a null parity bit is inserted after every 7th bit of the output. + """ + # NOTE: this would set the parity bits correctly, + # but des_encrypt_int_block() would just ignore them... + ##return sum(expand_7bit(byte_elem_value(c) & 0x7f) << (56-i*8) + ## for i, c in enumerate(secret[:8])) + return sum((byte_elem_value(c) & 0x7f) << (57-i*8) + for i, c in enumerate(secret[:8])) + +def _raw_des_crypt(secret, salt): + "pure-python backed for des_crypt" + assert len(salt) == 2 + + # NOTE: some OSes will accept non-HASH64 characters in the salt, + # but what value they assign these characters varies wildy, + # so just rejecting them outright. + # NOTE: the same goes for single-character salts... + # some OSes duplicate the char, some insert a '.' char, + # and openbsd does something which creates an invalid hash. + try: + salt_value = h64.decode_int12(salt) + except ValueError: # pragma: no cover - always caught by class + raise ValueError("invalid chars in salt") + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(des_crypt) + + # convert first 8 bytes of secret string into an integer + key_value = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, 25) + + # run h64 encode on result + return h64big.encode_int64(result) + +def _bsdi_secret_to_key(secret): + "covert secret to DES key used by bsdi_crypt" + key_value = _crypt_secret_to_key(secret) + idx = 8 + end = len(secret) + while idx < end: + next = idx+8 + tmp_value = _crypt_secret_to_key(secret[idx:next]) + key_value = des_encrypt_int_block(key_value, key_value) ^ tmp_value + idx = next + return key_value + +def _raw_bsdi_crypt(secret, rounds, salt): + "pure-python backend for bsdi_crypt" + + # decode salt + try: + salt_value = h64.decode_int24(salt) + except ValueError: # pragma: no cover - always caught by class + raise ValueError("invalid salt") + + # gotta do something - no official policy since this predates unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + assert isinstance(secret, bytes) + + # forbidding NULL char because underlying crypt() rejects them too. + if _BNULL in secret: + raise uh.exc.NullPasswordError(bsdi_crypt) + + # convert secret string into an integer + key_value = _bsdi_secret_to_key(secret) + + # run data through des using input of 0 + result = des_encrypt_int_block(key_value, 0, salt_value, rounds) + + # run h64 encode on result + return h64big.encode_int64(result) + +#============================================================================= +# handlers +#============================================================================= +class des_crypt(uh.HasManyBackends, uh.HasSalt, uh.GenericHandler): + """This class implements the des-crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "des_crypt" + setting_kwds = ("salt",) + checksum_chars = uh.HASH64_CHARS + checksum_size = 11 + + #--HasSalt-- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # formatting + #=================================================================== + # FORMAT: 2 chars of H64-encoded salt + 11 chars of H64-encoded checksum + + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + salt, chk = hash[:2], hash[2:] + return cls(salt=salt, checksum=chk or None) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + _has_backend_builtin = True + + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", 'abgOeLfPimXQo') + + def _calc_checksum_builtin(self, secret): + return _raw_des_crypt(secret, self.salt.encode("ascii")).decode("ascii") + + def _calc_checksum_os_crypt(self, secret): + # NOTE: safe_crypt encodes unicode secret -> utf8 + # no official policy since des-crypt predates unicode + hash = safe_crypt(secret, self.salt) + if hash: + assert hash.startswith(self.salt) and len(hash) == 13 + return hash[2:] + else: + return self._calc_checksum_builtin(secret) + + #=================================================================== + # eoc + #=================================================================== + +class bsdi_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the BSDi-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 5001, must be between 1 and 16777215, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + :meth:`encrypt` will now issue a warning if an even number of rounds is used + (see :ref:`bsdi-crypt-security-issues` regarding weak DES keys). + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bsdi_crypt" + setting_kwds = ("salt", "rounds") + checksum_size = 11 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 4 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 5001 + min_rounds = 1 + max_rounds = 16777215 # (1<<24)-1 + rounds_cost = "linear" + + # NOTE: OpenBSD login.conf reports 7250 as minimum allowed rounds, + # but that seems to be an OS policy, not a algorithm limitation. + + #=================================================================== + # parsing + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + _ + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{4}) + (?P[./a-z0-9]{11})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + rounds, salt, chk = m.group("rounds", "salt", "chk") + return cls( + rounds=h64.decode_int24(rounds.encode("ascii")), + salt=salt, + checksum=chk, + ) + + def to_string(self): + hash = u("_%s%s%s") % (h64.encode_int24(self.rounds).decode("ascii"), + self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # validation + #=================================================================== + + # flag so CryptContext won't generate even rounds. + _avoid_even_rounds = True + + def _norm_rounds(self, rounds): + rounds = super(bsdi_crypt, self)._norm_rounds(rounds) + # issue warning if app provided an even rounds value + if self.use_defaults and not rounds & 1: + warn("bsdi_crypt rounds should be odd, " + "as even rounds may reveal weak DES keys", + uh.exc.PasslibSecurityWarning) + return rounds + + @classmethod + def _bind_needs_update(cls, **settings): + return cls._needs_update + + @classmethod + def _needs_update(cls, hash, secret): + # mark bsdi_crypt hashes as deprecated if they have even rounds. + assert cls.identify(hash) + if isinstance(hash, unicode): + hash = hash.encode("ascii") + rounds = h64.decode_int24(hash[1:5]) + return not rounds & 1 + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + _has_backend_builtin = True + + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", '_/...lLDAxARksGCHin.') + + def _calc_checksum_builtin(self, secret): + return _raw_bsdi_crypt(secret, self.rounds, self.salt.encode("ascii")).decode("ascii") + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string() + hash = safe_crypt(secret, config) + if hash: + assert hash.startswith(config[:9]) and len(hash) == 20 + return hash[-11:] + else: + return self._calc_checksum_builtin(secret) + + #=================================================================== + # eoc + #=================================================================== + +class bigcrypt(uh.HasSalt, uh.GenericHandler): + """This class implements the BigCrypt password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 22 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "bigcrypt" + setting_kwds = ("salt",) + checksum_chars = uh.HASH64_CHARS + # NOTE: checksum chars must be multiple of 11 + + #--HasSalt-- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P([./a-z0-9]{11})+)? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + def _norm_checksum(self, value): + value = super(bigcrypt, self)._norm_checksum(value) + if value and len(value) % 11: + raise uh.exc.InvalidHashError(self) + return value + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = _raw_des_crypt(secret, self.salt.encode("ascii")) + idx = 8 + end = len(secret) + while idx < end: + next = idx + 8 + chk += _raw_des_crypt(secret[idx:next], chk[-11:-9]) + idx = next + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +class crypt16(uh.HasSalt, uh.GenericHandler): + """This class implements the crypt16 password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "crypt16" + setting_kwds = ("salt",) + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 2 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # internal helpers + #=================================================================== + _hash_regex = re.compile(u(r""" + ^ + (?P[./a-z0-9]{2}) + (?P[./a-z0-9]{22})? + $"""), re.X|re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk) + + def to_string(self): + hash = u("%s%s") % (self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + # parse salt value + try: + salt_value = h64.decode_int12(self.salt.encode("ascii")) + except ValueError: # pragma: no cover - caught by class + raise ValueError("invalid chars in salt") + + # convert first 8 byts of secret string into an integer, + key1 = _crypt_secret_to_key(secret) + + # run data through des using input of 0 + result1 = des_encrypt_int_block(key1, 0, salt_value, 20) + + # convert next 8 bytes of secret string into integer (key=0 if secret < 8 chars) + key2 = _crypt_secret_to_key(secret[8:16]) + + # run data through des using input of 0 + result2 = des_encrypt_int_block(key2, 0, salt_value, 5) + + # done + chk = h64big.encode_int64(result1) + h64big.encode_int64(result2) + return chk.decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/digests.py b/passlib/handlers/digests.py new file mode 100644 index 00000000..f1a21bde --- /dev/null +++ b/passlib/handlers/digests.py @@ -0,0 +1,144 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str, to_bytes, render_bytes, consteq +from passlib.utils.compat import bascii_to_str, bytes, unicode, str_to_uascii +import passlib.utils.handlers as uh +from passlib.utils.md4 import md4 +# local +__all__ = [ + "create_hex_hash", + "hex_md4", + "hex_md5", + "hex_sha1", + "hex_sha256", + "hex_sha512", +] + +#============================================================================= +# helpers for hexidecimal hashes +#============================================================================= +class HexDigestHash(uh.StaticHandler): + "this provides a template for supporting passwords stored as plain hexidecimal hashes" + #=================================================================== + # class attrs + #=================================================================== + _hash_func = None # hash function to use - filled in by create_hex_hash() + checksum_size = None # filled in by create_hex_hash() + checksum_chars = uh.HEX_CHARS + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(self._hash_func(secret).hexdigest()) + + #=================================================================== + # eoc + #=================================================================== + +def create_hex_hash(hash, digest_name, module=__name__): + # NOTE: could set digest_name=hash.name for cpython, but not for some other platforms. + h = hash() + name = "hex_" + digest_name + return type(name, (HexDigestHash,), dict( + name=name, + __module__=module, # so ABCMeta won't clobber it + _hash_func=staticmethod(hash), # sometimes it's a function, sometimes not. so wrap it. + checksum_size=h.digest_size*2, + __doc__="""This class implements a plain hexidecimal %s hash, and follows the :ref:`password-hash-api`. + +It supports no optional or contextual keywords. +""" % (digest_name,) + )) + +#============================================================================= +# predefined handlers +#============================================================================= +hex_md4 = create_hex_hash(md4, "md4") +hex_md5 = create_hex_hash(hashlib.md5, "md5") +hex_md5.django_name = "unsalted_md5" +hex_sha1 = create_hex_hash(hashlib.sha1, "sha1") +hex_sha256 = create_hex_hash(hashlib.sha256, "sha256") +hex_sha512 = create_hex_hash(hashlib.sha512, "sha512") + +#============================================================================= +# htdigest +#============================================================================= +class htdigest(uh.PasswordHash): + """htdigest hash function. + + .. todo:: + document this hash + """ + name = "htdigest" + setting_kwds = () + context_kwds = ("user", "realm", "encoding") + default_encoding = "utf-8" + + @classmethod + def encrypt(cls, secret, user, realm, encoding=None): + # NOTE: this was deliberately written so that raw bytes are passed through + # unchanged, the encoding kwd is only used to handle unicode values. + if not encoding: + encoding = cls.default_encoding + uh.validate_secret(secret) + if isinstance(secret, unicode): + secret = secret.encode(encoding) + user = to_bytes(user, encoding, "user") + realm = to_bytes(realm, encoding, "realm") + data = render_bytes("%s:%s:%s", user, realm, secret) + return hashlib.md5(data).hexdigest() + + @classmethod + def _norm_hash(cls, hash): + "normalize hash to native string, and validate it" + hash = to_native_str(hash, param="hash") + if len(hash) != 32: + raise uh.exc.MalformedHashError(cls, "wrong size") + for char in hash: + if char not in uh.LC_HEX_CHARS: + raise uh.exc.MalformedHashError(cls, "invalid chars in hash") + return hash + + @classmethod + def verify(cls, secret, hash, user, realm, encoding="utf-8"): + hash = cls._norm_hash(hash) + other = cls.encrypt(secret, user, realm, encoding) + return consteq(hash, other) + + @classmethod + def identify(cls, hash): + try: + cls._norm_hash(hash) + except ValueError: + return False + return True + + @classmethod + def genconfig(cls): + return None + + @classmethod + def genhash(cls, secret, config, user, realm, encoding="utf-8"): + if config is not None: + cls._norm_hash(config) + return cls.encrypt(secret, user, realm, encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/django.py b/passlib/handlers/django.py new file mode 100644 index 00000000..cdb853b8 --- /dev/null +++ b/passlib/handlers/django.py @@ -0,0 +1,472 @@ +"""passlib.handlers.django- Django password hash support""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode +from binascii import hexlify +from hashlib import md5, sha1, sha256 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.hash import bcrypt, pbkdf2_sha1, pbkdf2_sha256 +from passlib.utils import to_unicode, classproperty +from passlib.utils.compat import b, bytes, str_to_uascii, uascii_to_str, unicode, u +from passlib.utils.pbkdf2 import pbkdf2 +import passlib.utils.handlers as uh +# local +__all__ = [ + "django_salted_sha1", + "django_salted_md5", + "django_bcrypt", + "django_pbkdf2_sha1", + "django_pbkdf2_sha256", + "django_des_crypt", + "django_disabled", +] + +#============================================================================= +# lazy imports & constants +#============================================================================= + +# imported by django_des_crypt._calc_checksum() +des_crypt = None + +def _import_des_crypt(): + global des_crypt + if des_crypt is None: + from passlib.hash import des_crypt + return des_crypt + +# django 1.4's salt charset +SALT_CHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789' + +#============================================================================= +# salted hashes +#============================================================================= +class DjangoSaltedHash(uh.HasSalt, uh.GenericHandler): + """base class providing common code for django hashes""" + # name, ident, checksum_size must be set by subclass. + # ident must include "$" suffix. + setting_kwds = ("salt", "salt_size") + + min_salt_size = 0 + # NOTE: django 1.0-1.3 would accept empty salt strings. + # django 1.4 won't, but this appears to be regression + # (https://code.djangoproject.com/ticket/18144) + # so presumably it will be fixed in a later release. + default_salt_size = 12 + max_salt_size = None + salt_chars = SALT_CHARS + + checksum_chars = uh.LOWER_HEX_CHARS + + @classproperty + def _stub_checksum(cls): + return cls.checksum_chars[0] * cls.checksum_size + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, + self.checksum or self._stub_checksum) + +class DjangoVariableHash(uh.HasRounds, DjangoSaltedHash): + """base class providing common code for django hashes w/ variable rounds""" + setting_kwds = DjangoSaltedHash.setting_kwds + ("rounds",) + + min_rounds = 1 + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc3(self.ident, self.rounds, self.salt, + self.checksum or self._stub_checksum) + +class django_salted_sha1(DjangoSaltedHash): + """This class implements Django's Salted SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of SHA1. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with Django 1.4's :class:`!SHA1PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_sha1" + django_name = "sha1" + ident = u("sha1$") + checksum_size = 40 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(self.salt.encode("ascii") + secret).hexdigest()) + +class django_salted_md5(DjangoSaltedHash): + """This class implements Django's Salted MD5 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and uses a single round of MD5. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!MD5PasswordHasher` class. + + .. versionchanged: 1.6 + This class now generates 12-character salts instead of 5, + and generated salts uses the character range ``[0-9a-zA-Z]`` instead of + the ``[0-9a-f]``. This is to be compatible with how Django >= 1.4 + generates these hashes; but hashes generated in this manner will still be + correctly interpreted by earlier versions of Django. + """ + name = "django_salted_md5" + django_name = "md5" + ident = u("md5$") + checksum_size = 32 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(self.salt.encode("ascii") + secret).hexdigest()) + +django_bcrypt = uh.PrefixWrapper("django_bcrypt", bcrypt, + prefix=u('bcrypt$'), ident=u("bcrypt$"), + # NOTE: this docstring is duplicated in the docs, since sphinx + # seems to be having trouble reading it via autodata:: + doc="""This class implements Django 1.4's BCrypt wrapper, and follows the :ref:`password-hash-api`. + + This is identical to :class:`!bcrypt` itself, but with + the Django-specific prefix ``"bcrypt$"`` prepended. + + See :doc:`/lib/passlib.hash.bcrypt` for more details, + the usage and behavior is identical. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!BCryptPasswordHasher` class. + + .. versionadded:: 1.6 + """) +django_bcrypt.django_name = "bcrypt" + +class django_bcrypt_sha256(bcrypt): + """This class implements Django 1.6's Bcrypt+SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + While the algorithm and format is somewhat different, + the api and options for this hash are identical to :class:`!bcrypt` itself, + see :doc:`/lib/passlib.hash.bcrypt` for more details. + + .. versionadded:: 1.6.2 + """ + name = "django_bcrypt_sha256" + django_name = "bcrypt_sha256" + _digest = sha256 + + # NOTE: django bcrypt ident locked at "$2a$", so omitting 'ident' support. + setting_kwds = ("salt", "rounds") + + # sample hash: + # bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu + + # XXX: we can't use .ident attr due to bcrypt code using it. + # working around that via django_prefix + django_prefix = u('bcrypt_sha256$') + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + if not hash: + return False + return hash.startswith(cls.django_prefix) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + if not hash.startswith(cls.django_prefix): + raise uh.exc.InvalidHashError(cls) + bhash = hash[len(cls.django_prefix):] + if not bhash.startswith("$2"): + raise uh.exc.MalformedHashError(cls) + return super(django_bcrypt_sha256, cls).from_string(bhash) + + def __init__(self, **kwds): + if 'ident' in kwds and kwds.get("use_defaults"): + raise TypeError("%s does not support the ident keyword" % + self.__class__.__name__) + return super(django_bcrypt_sha256, self).__init__(**kwds) + + def to_string(self): + bhash = super(django_bcrypt_sha256, self).to_string() + return uascii_to_str(self.django_prefix) + bhash + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + secret = hexlify(self._digest(secret).digest()) + return super(django_bcrypt_sha256, self)._calc_checksum(secret) + + # patch set_backend so it modifies bcrypt class, not this one... + # else it would clobber our _calc_checksum() wrapper above. + @classmethod + def set_backend(cls, *args, **kwds): + return bcrypt.set_backend(*args, **kwds) + +class django_pbkdf2_sha256(DjangoVariableHash): + """This class implements Django's PBKDF2-HMAC-SHA256 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 20000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha256" + django_name = "pbkdf2_sha256" + ident = u('pbkdf2_sha256$') + min_salt_size = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + checksum_chars = uh.PADDED_BASE64_CHARS + checksum_size = 44 # 32 bytes -> base64 + default_rounds = pbkdf2_sha256.default_rounds # NOTE: django 1.6 uses 12000 + _prf = "hmac-sha256" + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + hash = pbkdf2(secret, self.salt.encode("ascii"), self.rounds, + keylen=None, prf=self._prf) + return b64encode(hash).rstrip().decode("ascii") + +class django_pbkdf2_sha1(django_pbkdf2_sha256): + """This class implements Django's PBKDF2-HMAC-SHA1 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a 12 character one will be autogenerated (this is recommended). + If specified, may be any series of characters drawn from the regexp range ``[0-9a-zA-Z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 12, but can be any positive value. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!PBKDF2SHA1PasswordHasher` class. + + .. versionadded:: 1.6 + """ + name = "django_pbkdf2_sha1" + django_name = "pbkdf2_sha1" + ident = u('pbkdf2_sha1$') + checksum_size = 28 # 20 bytes -> base64 + default_rounds = pbkdf2_sha1.default_rounds # NOTE: django 1.6 uses 12000 + _prf = "hmac-sha1" + +#============================================================================= +# other +#============================================================================= +class django_des_crypt(uh.HasSalt, uh.GenericHandler): + """This class implements Django's :class:`des_crypt` wrapper, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 2 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + This should be compatible with the hashes generated by + Django 1.4's :class:`!CryptPasswordHasher` class. + Note that Django only supports this hash on Unix systems + (though :class:`!django_des_crypt` is available cross-platform + under Passlib). + + .. versionchanged:: 1.6 + This class will now accept hashes with empty salt strings, + since Django 1.4 generates them this way. + """ + name = "django_des_crypt" + django_name = "crypt" + setting_kwds = ("salt", "salt_size") + ident = u("crypt$") + checksum_chars = salt_chars = uh.HASH64_CHARS + checksum_size = 11 + min_salt_size = default_salt_size = 2 + _stub_checksum = u('.')*11 + + # NOTE: regarding duplicate salt field: + # + # django 1.0 had a "crypt$$" hash format, + # used [a-z0-9] to generate a 5 char salt, stored it in salt1, + # duplicated the first two chars of salt1 as salt2. + # it would throw an error if salt1 was empty. + # + # django 1.4 started generating 2 char salt using the full alphabet, + # left salt1 empty, and only paid attention to salt2. + # + # in order to be compatible with django 1.0, the hashes generated + # by this function will always include salt1, unless the following + # class-level field is disabled (mainly used for testing) + use_duplicate_salt = True + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + if chk: + # chk should be full des_crypt hash + if not salt: + # django 1.4 always uses empty salt field, + # so extract salt from des_crypt hash + salt = chk[:2] + elif salt[:2] != chk[:2]: + # django 1.0 stored 5 chars in salt field, and duplicated + # the first two chars in . we keep the full salt, + # but make sure the first two chars match as sanity check. + raise uh.exc.MalformedHashError(cls, + "first two digits of salt and checksum must match") + # in all cases, strip salt chars from + chk = chk[2:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + salt = self.salt + chk = salt[:2] + (self.checksum or self._stub_checksum) + if self.use_duplicate_salt: + # filling in salt field, so that we're compatible with django 1.0 + return uh.render_mc2(self.ident, salt, chk) + else: + # django 1.4+ style hash + return uh.render_mc2(self.ident, "", chk) + + def _calc_checksum(self, secret): + # NOTE: we lazily import des_crypt, + # since most django deploys won't use django_des_crypt + global des_crypt + if des_crypt is None: + _import_des_crypt() + return des_crypt(salt=self.salt[:2])._calc_checksum(secret) + +class django_disabled(uh.StaticHandler): + """This class provides disabled password behavior for Django, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead + claims the special hash string ``"!"`` which Django uses + to indicate an account's password has been disabled. + + * newly encrypted passwords will hash to ``"!"``. + * it rejects all passwords. + + .. note:: + + Django 1.6 prepends a randomly generate 40-char alphanumeric string + to each unusuable password. This class recognizes such strings, + but for backwards compatibility, still returns ``"!"``. + + .. versionchanged:: 1.6.2 added Django 1.6 support + """ + name = "django_disabled" + + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(u("!")) + + def _calc_checksum(self, secret): + return u("!") + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return False + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/fshp.py b/passlib/handlers/fshp.py new file mode 100644 index 00000000..6efc782e --- /dev/null +++ b/passlib/handlers/fshp.py @@ -0,0 +1,206 @@ +"""passlib.handlers.fshp +""" + +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode +import passlib.utils.handlers as uh +from passlib.utils.compat import b, bytes, bascii_to_str, iteritems, u,\ + unicode +from passlib.utils.pbkdf2 import pbkdf1 +# local +__all__ = [ + 'fshp', +] +#============================================================================= +# sha1-crypt +#============================================================================= +class fshp(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the FSHP password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :param salt: + Optional raw salt string. + If not specified, one will be autogenerated (this is recommended). + + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any non-negative value. + + :param rounds: + Optional number of rounds to use. + Defaults to 100000, must be between 1 and 4294967295, inclusive. + + :param variant: + Optionally specifies variant of FSHP to use. + + * ``0`` - uses SHA-1 digest (deprecated). + * ``1`` - uses SHA-2/256 digest (default). + * ``2`` - uses SHA-2/384 digest. + * ``3`` - uses SHA-2/512 digest. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "fshp" + setting_kwds = ("salt", "salt_size", "rounds", "variant") + checksum_chars = uh.PADDED_BASE64_CHARS + ident = u("{FSHP") + # checksum_size is property() that depends on variant + + #--HasRawSalt-- + default_salt_size = 16 # current passlib default, FSHP uses 8 + min_salt_size = 0 + max_salt_size = None + + #--HasRounds-- + # FIXME: should probably use different default rounds + # based on the variant. setting for default variant (sha256) for now. + default_rounds = 100000 # current passlib default, FSHP uses 4096 + min_rounds = 1 # set by FSHP + max_rounds = 4294967295 # 32-bit integer limit - not set by FSHP + rounds_cost = "linear" + + #--variants-- + default_variant = 1 + _variant_info = { + # variant: (hash name, digest size) + 0: ("sha1", 20), + 1: ("sha256", 32), + 2: ("sha384", 48), + 3: ("sha512", 64), + } + _variant_aliases = dict( + [(unicode(k),k) for k in _variant_info] + + [(v[0],k) for k,v in iteritems(_variant_info)] + ) + + #=================================================================== + # instance attrs + #=================================================================== + variant = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, variant=None, **kwds): + # NOTE: variant must be set first, since it controls checksum size, etc. + self.use_defaults = kwds.get("use_defaults") # load this early + self.variant = self._norm_variant(variant) + super(fshp, self).__init__(**kwds) + + def _norm_variant(self, variant): + if variant is None: + if not self.use_defaults: + raise TypeError("no variant specified") + variant = self.default_variant + if isinstance(variant, bytes): + variant = variant.decode("ascii") + if isinstance(variant, unicode): + try: + variant = self._variant_aliases[variant] + except KeyError: + raise ValueError("invalid fshp variant") + if not isinstance(variant, int): + raise TypeError("fshp variant must be int or known alias") + if variant not in self._variant_info: + raise ValueError("invalid fshp variant") + return variant + + @property + def checksum_alg(self): + return self._variant_info[self.variant][0] + + @property + def checksum_size(self): + return self._variant_info[self.variant][1] + + #=================================================================== + # formatting + #=================================================================== + + _hash_regex = re.compile(u(r""" + ^ + \{FSHP + (\d+)\| # variant + (\d+)\| # salt size + (\d+)\} # rounds + ([a-zA-Z0-9+/]+={0,3}) # digest + $"""), re.X) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + variant, salt_size, rounds, data = m.group(1,2,3,4) + variant = int(variant) + salt_size = int(salt_size) + rounds = int(rounds) + try: + data = b64decode(data.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + salt = data[:salt_size] + chk = data[salt_size:] + return cls(salt=salt, checksum=chk, rounds=rounds, variant=variant) + + @property + def _stub_checksum(self): + return b('\x00') * self.checksum_size + + def to_string(self): + chk = self.checksum or self._stub_checksum + salt = self.salt + data = bascii_to_str(b64encode(salt+chk)) + return "{FSHP%d|%d|%d}%s" % (self.variant, len(salt), self.rounds, data) + + #=================================================================== + # backend + #=================================================================== + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + # NOTE: for some reason, FSHP uses pbkdf1 with password & salt reversed. + # this has only a minimal impact on security, + # but it is worth noting this deviation. + return pbkdf1( + secret=self.salt, + salt=secret, + rounds=self.rounds, + keylen=self.checksum_size, + hash=self.checksum_alg, + ) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/ldap_digests.py b/passlib/handlers/ldap_digests.py new file mode 100644 index 00000000..a25a3946 --- /dev/null +++ b/passlib/handlers/ldap_digests.py @@ -0,0 +1,270 @@ +"""passlib.handlers.digests - plain hash digests +""" +#============================================================================= +# imports +#============================================================================= +# core +from base64 import b64encode, b64decode +from hashlib import md5, sha1 +import logging; log = logging.getLogger(__name__) +import re +from warnings import warn +# site +# pkg +from passlib.handlers.misc import plaintext +from passlib.utils import to_native_str, unix_crypt_schemes, \ + classproperty, to_unicode +from passlib.utils.compat import b, bytes, uascii_to_str, unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "ldap_plaintext", + "ldap_md5", + "ldap_sha1", + "ldap_salted_md5", + "ldap_salted_sha1", + + ##"get_active_ldap_crypt_schemes", + "ldap_des_crypt", + "ldap_bsdi_crypt", + "ldap_md5_crypt", + "ldap_sha1_crypt" + "ldap_bcrypt", + "ldap_sha256_crypt", + "ldap_sha512_crypt", +] + +#============================================================================= +# ldap helpers +#============================================================================= +class _Base64DigestHelper(uh.StaticHandler): + "helper for ldap_md5 / ldap_sha1" + # XXX: could combine this with hex digests in digests.py + + ident = None # required - prefix identifier + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + checksum_chars = uh.PADDED_BASE64_CHARS + + @classproperty + def _hash_prefix(cls): + "tell StaticHandler to strip ident from checksum" + return cls.ident + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = self._hash_func(secret).digest() + return b64encode(chk).decode("ascii") + +class _SaltedBase64DigestHelper(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + "helper for ldap_salted_md5 / ldap_salted_sha1" + setting_kwds = ("salt", "salt_size") + checksum_chars = uh.PADDED_BASE64_CHARS + + ident = None # required - prefix identifier + checksum_size = None # required + _hash_func = None # required - hash function + _hash_regex = None # required - regexp to recognize hash + _stub_checksum = None # required - default checksum to plug in + min_salt_size = max_salt_size = 4 + + # NOTE: openldap implementation uses 4 byte salt, + # but it's been reported (issue 30) that some servers use larger salts. + # the semi-related rfc3112 recommends support for up to 16 byte salts. + min_salt_size = 4 + default_salt_size = 4 + max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + try: + data = b64decode(m.group("tmp").encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + cs = cls.checksum_size + assert cs + return cls(checksum=data[:cs], salt=data[cs:]) + + def to_string(self): + data = (self.checksum or self._stub_checksum) + self.salt + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return self._hash_func(secret + self.salt).digest() + +#============================================================================= +# implementations +#============================================================================= +class ldap_md5(_Base64DigestHelper): + """This class stores passwords using LDAP's plain MD5 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_md5" + ident = u("{MD5}") + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{MD5\}(?P[+/a-zA-Z0-9]{22}==)$")) + +class ldap_sha1(_Base64DigestHelper): + """This class stores passwords using LDAP's plain SHA1 format, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods have no optional keywords. + """ + name = "ldap_sha1" + ident = u("{SHA}") + _hash_func = sha1 + _hash_regex = re.compile(u(r"^\{SHA\}(?P[+/a-zA-Z0-9]{27}=)$")) + +class ldap_salted_md5(_SaltedBase64DigestHelper): + """This class stores passwords using LDAP's salted MD5 format, and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keyword: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_md5" + ident = u("{SMD5}") + checksum_size = 16 + _hash_func = md5 + _hash_regex = re.compile(u(r"^\{SMD5\}(?P[+/a-zA-Z0-9]{27,}={0,2})$")) + _stub_checksum = b('\x00') * 16 + +class ldap_salted_sha1(_SaltedBase64DigestHelper): + """This class stores passwords using LDAP's salted SHA1 format, and follows the :ref:`password-hash-api`. + + It supports a 4-16 byte salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keyword: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it may be any 4-16 byte string. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 4 bytes for compatibility with the LDAP spec, + but some systems use larger salts, and Passlib supports + any value between 4-16. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + .. versionchanged:: 1.6 + This format now supports variable length salts, instead of a fix 4 bytes. + """ + name = "ldap_salted_sha1" + ident = u("{SSHA}") + checksum_size = 20 + _hash_func = sha1 + _hash_regex = re.compile(u(r"^\{SSHA\}(?P[+/a-zA-Z0-9]{32,}={0,2})$")) + _stub_checksum = b('\x00') * 20 + +class ldap_plaintext(plaintext): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + This class acts much like the generic :class:`!passlib.hash.plaintext` handler, + except that it will identify a hash only if it does NOT begin with the ``{XXX}`` identifier prefix + used by RFC2307 passwords. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this subclasses plaintext, since all it does differently + # is override identify() + + name = "ldap_plaintext" + _2307_pat = re.compile(u(r"^\{\w+\}.*$")) + + @classmethod + def identify(cls, hash): + # NOTE: identifies all strings EXCEPT those with {XXX} prefix + hash = uh.to_unicode_for_identify(hash) + return bool(hash) and cls._2307_pat.match(hash) is None + +#============================================================================= +# {CRYPT} wrappers +# the following are wrappers around the base crypt algorithms, +# which add the ldap required {CRYPT} prefix +#============================================================================= +ldap_crypt_schemes = [ 'ldap_' + name for name in unix_crypt_schemes ] + +def _init_ldap_crypt_handlers(): + # NOTE: I don't like to implicitly modify globals() like this, + # but don't want to write out all these handlers out either :) + g = globals() + for wname in unix_crypt_schemes: + name = 'ldap_' + wname + g[name] = uh.PrefixWrapper(name, wname, prefix=u("{CRYPT}"), lazy=True) + del g +_init_ldap_crypt_handlers() + +##_lcn_host = None +##def get_host_ldap_crypt_schemes(): +## global _lcn_host +## if _lcn_host is None: +## from passlib.hosts import host_context +## schemes = host_context.schemes() +## _lcn_host = [ +## "ldap_" + name +## for name in unix_crypt_names +## if name in schemes +## ] +## return _lcn_host + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/md5_crypt.py b/passlib/handlers/md5_crypt.py new file mode 100644 index 00000000..642316e8 --- /dev/null +++ b/passlib/handlers/md5_crypt.py @@ -0,0 +1,333 @@ +"""passlib.handlers.md5_crypt - md5-crypt algorithm""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import classproperty, h64, safe_crypt, test_crypt, repeat_string +from passlib.utils.compat import b, bytes, irange, unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "md5_crypt", + "apr_md5_crypt", +] + +#============================================================================= +# pure-python backend +#============================================================================= +_BNULL = b("\x00") +_MD5_MAGIC = b("$1$") +_APR_MAGIC = b("$apr1$") + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final digest +_transpose_map = (12, 6, 0, 13, 7, 1, 14, 8, 2, 15, 9, 3, 5, 10, 4, 11) + +def _raw_md5_crypt(pwd, salt, use_apr=False): + """perform raw md5-crypt calculation + + this function provides a pure-python implementation of the internals + for the MD5-Crypt algorithms; it doesn't handle any of the + parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to encrypt + :arg salt: salt chars to use + :arg use_apr: use apache variant + + :returns: + encoded checksum chars + """ + # NOTE: regarding 'apr' format: + # really, apache? you had to invent a whole new "$apr1$" format, + # when all you did was change the ident incorporated into the hash? + # would love to find webpage explaining why just using a portable + # implementation of $1$ wasn't sufficient. *nothing else* was changed. + + #=================================================================== + # init & validate inputs + #=================================================================== + + # validate secret + # XXX: not sure what official unicode policy is, using this as default + if isinstance(pwd, unicode): + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes), "pwd not unicode or bytes" + if _BNULL in pwd: + raise uh.exc.NullPasswordError(md5_crypt) + pwd_len = len(pwd) + + # validate salt - should have been taken care of by caller + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + assert len(salt) < 9, "salt too large" + # NOTE: spec says salts larger than 8 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load APR specific constants + if use_apr: + magic = _APR_MAGIC + else: + magic = _MD5_MAGIC + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = md5(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + magic + salt + a_ctx = md5(pwd + magic + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # add null chars & first char of password + # NOTE: this may have historically been a bug, + # where they meant to use db[0] instead of B_NULL, + # but the original code memclear'ed db, + # and now all implementations have to use this. + i = pwd_len + evenchar = pwd[:1] + while i: + a_ctx_update(_BNULL if i & 1 else evenchar) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest C - for a 1000 rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original MD5-Crypt implementation performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = md5(pwd if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(salt) + ## if i % 7: + ## tmp_ctx.update(pwd) + ## tmp_ctx.update(dc if i & 1 else pwd) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'salt', and 'pwd'; and the exact + # combination is determined by whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of salt & pwd for each round 0-41 + # * runs through as many 42-round blocks as possible (23) + # * runs through as many pairs of rounds as needed for remaining rounds (17) + # * this results in the required 42*23+2*17=1000 rounds required by md5_crypt. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 1000 times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of pwd & salt which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + pwd_pwd = pwd+pwd + pwd_salt = pwd+salt + perms = [pwd, pwd_pwd, pwd_salt, pwd_salt+pwd, salt+pwd, salt+pwd_pwd] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform 23 blocks of 42 rounds each (for a total of 966 rounds) + dc = da + blocks = 23 + while blocks: + for even, odd in data: + dc = md5(odd + md5(dc + even).digest()).digest() + blocks -= 1 + + # perform 17 more pairs of rounds (34 more rounds, for a total of 1000) + for even, odd in data[:17]: + dc = md5(odd + md5(dc + even).digest()).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, _transpose_map).decode("ascii") + +#============================================================================= +# handler +#============================================================================= +class _MD5_Common(uh.HasSalt, uh.GenericHandler): + "common code for md5_crypt and apr_md5_crypt" + #=================================================================== + # class attrs + #=================================================================== + # name - set in subclass + setting_kwds = ("salt", "salt_size") + # ident - set in subclass + checksum_size = 22 + checksum_chars = uh.HASH64_CHARS + + min_salt_size = 0 + max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + salt, chk = uh.parse_mc2(hash, cls.ident, handler=cls) + return cls(salt=salt, checksum=chk) + + def to_string(self): + return uh.render_mc2(self.ident, self.salt, self.checksum) + + # _calc_checksum() - provided by subclass + + #=================================================================== + # eoc + #=================================================================== + +class md5_crypt(uh.HasManyBackends, _MD5_Common): + """This class implements the MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of characters to use when autogenerating new salts. + Defaults to 8, but can be any value between 0 and 8. + (This is mainly needed when generating Cisco-compatible hashes, + which require ``salt_size=4``). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "md5_crypt" + ident = u("$1$") + + #=================================================================== + # methods + #=================================================================== + # FIXME: can't find definitive policy on how md5-crypt handles non-ascii. + # all backends currently coerce -> utf-8 + + backends = ("os_crypt", "builtin") + + _has_backend_builtin = True + + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", '$1$test$pi/xDtU5WFVRqYS6BMU8X/') + + def _calc_checksum_builtin(self, secret): + return _raw_md5_crypt(secret, self.salt) + + def _calc_checksum_os_crypt(self, secret): + config = self.ident + self.salt + hash = safe_crypt(secret, config) + if hash: + assert hash.startswith(config) and len(hash) == len(config) + 23 + return hash[-22:] + else: + return self._calc_checksum_builtin(secret) + + #=================================================================== + # eoc + #=================================================================== + +class apr_md5_crypt(_MD5_Common): + """This class implements the Apr-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "apr_md5_crypt" + ident = u("$apr1$") + + #=================================================================== + # methods + #=================================================================== + def _calc_checksum(self, secret): + return _raw_md5_crypt(secret, self.salt, use_apr=True) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/misc.py b/passlib/handlers/misc.py new file mode 100644 index 00000000..e7f8fe1a --- /dev/null +++ b/passlib/handlers/misc.py @@ -0,0 +1,242 @@ +"""passlib.handlers.misc - misc generic handlers +""" +#============================================================================= +# imports +#============================================================================= +# core +import sys +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str, consteq +from passlib.utils.compat import bytes, unicode, u, b, base_string_types +import passlib.utils.handlers as uh +# local +__all__ = [ + "unix_disabled", + "unix_fallback", + "plaintext", +] + +#============================================================================= +# handler +#============================================================================= +class unix_fallback(uh.StaticHandler): + """This class provides the fallback behavior for unix shadow files, and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead provides fallback + behavior as found in /etc/shadow on most unix variants. + If used, should be the last scheme in the context. + + * this class will positive identify all hash strings. + * for security, newly encrypted passwords will hash to ``!``. + * it rejects all passwords if the hash is NOT an empty string (``!`` or ``*`` are frequently used). + * by default it rejects all passwords if the hash is an empty string, + but if ``enable_wildcard=True`` is passed to verify(), + all passwords will be allowed through if the hash is an empty string. + + .. deprecated:: 1.6 + This has been deprecated due to it's "wildcard" feature, + and will be removed in Passlib 1.8. Use :class:`unix_disabled` instead. + """ + name = "unix_fallback" + context_kwds = ("enable_wildcard",) + + @classmethod + def identify(cls, hash): + if isinstance(hash, base_string_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + def __init__(self, enable_wildcard=False, **kwds): + warn("'unix_fallback' is deprecated, " + "and will be removed in Passlib 1.8; " + "please use 'unix_disabled' instead.", + DeprecationWarning) + super(unix_fallback, self).__init__(**kwds) + self.enable_wildcard = enable_wildcard + + @classmethod + def genhash(cls, secret, config): + # override default to preserve checksum + if config is None: + return cls.encrypt(secret) + else: + uh.validate_secret(secret) + self = cls.from_string(config) + self.checksum = self._calc_checksum(secret) + return self.to_string() + + def _calc_checksum(self, secret): + if self.checksum: + # NOTE: hash will generally be "!", but we want to preserve + # it in case it's something else, like "*". + return self.checksum + else: + return u("!") + + @classmethod + def verify(cls, secret, hash, enable_wildcard=False): + uh.validate_secret(secret) + if not isinstance(hash, base_string_types): + raise uh.exc.ExpectedStringError(hash, "hash") + elif hash: + return False + else: + return enable_wildcard + +_MARKER_CHARS = u("*!") +_MARKER_BYTES = b("*!") + +class unix_disabled(uh.PasswordHash): + """This class provides disabled password behavior for unix shadow files, + and follows the :ref:`password-hash-api`. + + This class does not implement a hash, but instead matches the "disabled account" + strings found in ``/etc/shadow`` on most Unix variants. "encrypting" a password + will simply return the disabled account marker. It will reject all passwords, + no matter the hash string. The :meth:`~passlib.ifc.PasswordHash.encrypt` + method supports one optional keyword: + + :type marker: str + :param marker: + Optional marker string which overrides the platform default + used to indicate a disabled account. + + If not specified, this will default to ``"*"`` on BSD systems, + and use the Linux default ``"!"`` for all other platforms. + (:attr:`!unix_disabled.default_marker` will contain the default value) + + .. versionadded:: 1.6 + This class was added as a replacement for the now-deprecated + :class:`unix_fallback` class, which had some undesirable features. + """ + name = "unix_disabled" + setting_kwds = ("marker",) + context_kwds = () + + if 'bsd' in sys.platform: # pragma: no cover -- runtime detection + default_marker = u("*") + else: + # use the linux default for other systems + # (glibc also supports adding old hash after the marker + # so it can be restored later). + default_marker = u("!") + + @classmethod + def identify(cls, hash): + # NOTE: technically, anything in the /etc/shadow password field + # which isn't valid crypt() output counts as "disabled". + # but that's rather ambiguous, and it's hard to predict what + # valid output is for unknown crypt() implementations. + # so to be on the safe side, we only match things *known* + # to be disabled field indicators, and will add others + # as they are found. things beginning w/ "$" should *never* match. + # + # things currently matched: + # * linux uses "!" + # * bsd uses "*" + # * linux may use "!" + hash to disable but preserve original hash + # * linux counts empty string as "any password" + if isinstance(hash, unicode): + start = _MARKER_CHARS + elif isinstance(hash, bytes): + start = _MARKER_BYTES + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return not hash or hash[0] in start + + @classmethod + def encrypt(cls, secret, marker=None): + return cls.genhash(secret, None, marker) + + @classmethod + def verify(cls, secret, hash): + uh.validate_secret(secret) + if not cls.identify(hash): # handles typecheck + raise uh.exc.InvalidHashError(cls) + return False + + @classmethod + def genconfig(cls): + return None + + @classmethod + def genhash(cls, secret, config, marker=None): + uh.validate_secret(secret) + if config is not None and not cls.identify(config): # handles typecheck + raise uh.exc.InvalidHashError(cls) + if config: + # we want to preserve the existing str, + # since it might contain a disabled password hash ("!" + hash) + return to_native_str(config, param="config") + # if None or empty string, replace with marker + if marker: + if not cls.identify(marker): + raise ValueError("invalid marker: %r" % marker) + else: + marker = cls.default_marker + assert marker and cls.identify(marker) + return to_native_str(marker, param="marker") + +class plaintext(uh.PasswordHash): + """This class stores passwords in plaintext, and follows the :ref:`password-hash-api`. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keyword: + + :type encoding: str + :param encoding: + This controls the character encoding to use (defaults to ``utf-8``). + + This encoding will be used to encode :class:`!unicode` passwords + under Python 2, and decode :class:`!bytes` hashes under Python 3. + + .. versionchanged:: 1.6 + The ``encoding`` keyword was added. + """ + # NOTE: this is subclassed by ldap_plaintext + + name = "plaintext" + setting_kwds = () + context_kwds = ("encoding",) + default_encoding = "utf-8" + + @classmethod + def identify(cls, hash): + if isinstance(hash, base_string_types): + return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + + @classmethod + def encrypt(cls, secret, encoding=None): + uh.validate_secret(secret) + if not encoding: + encoding = cls.default_encoding + return to_native_str(secret, encoding, "secret") + + @classmethod + def verify(cls, secret, hash, encoding=None): + if not encoding: + encoding = cls.default_encoding + hash = to_native_str(hash, encoding, "hash") + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return consteq(cls.encrypt(secret, encoding), hash) + + @classmethod + def genconfig(cls): + return None + + @classmethod + def genhash(cls, secret, hash, encoding=None): + if hash is not None and not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + return cls.encrypt(secret, encoding) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/mssql.py b/passlib/handlers/mssql.py new file mode 100644 index 00000000..1d892732 --- /dev/null +++ b/passlib/handlers/mssql.py @@ -0,0 +1,246 @@ +"""passlib.handlers.mssql - MS-SQL Password Hash + +Notes +===== +MS-SQL has used a number of hash algs over the years, +most of which were exposed through the undocumented +'pwdencrypt' and 'pwdcompare' sql functions. + +Known formats +------------- +6.5 + snefru hash, ascii encoded password + no examples found + +7.0 + snefru hash, unicode (what encoding?) + saw ref that these blobs were 16 bytes in size + no examples found + +2000 + byte string using displayed as 0x hex, using 0x0100 prefix. + contains hashes of password and upper-case password. + +2007 + same as 2000, but without the upper-case hash. + +refs +---------- +https://blogs.msdn.com/b/lcris/archive/2007/04/30/sql-server-2005-about-login-password-hashes.aspx?Redirected=true +http://us.generation-nt.com/securing-passwords-hash-help-35429432.html +http://forum.md5decrypter.co.uk/topic230-mysql-and-mssql-get-password-hashes.aspx +http://www.theregister.co.uk/2002/07/08/cracking_ms_sql_server_passwords/ +""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import consteq +from passlib.utils.compat import b, bytes, bascii_to_str, unicode, u +import passlib.utils.handlers as uh +# local +__all__ = [ + "mssql2000", + "mssql2005", +] + +#============================================================================= +# mssql 2000 +#============================================================================= +def _raw_mssql(secret, salt): + assert isinstance(secret, unicode) + assert isinstance(salt, bytes) + return sha1(secret.encode("utf-16-le") + salt).digest() + +BIDENT = b("0x0100") +##BIDENT2 = b("\x01\x00") +UIDENT = u("0x0100") + +def _ident_mssql(hash, csize, bsize): + "common identify for mssql 2000/2005" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + return True + elif isinstance(hash, bytes): + if len(hash) == csize and hash.startswith(BIDENT): + return True + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return True + else: + raise uh.exc.ExpectedStringError(hash, "hash") + return False + +def _parse_mssql(hash, csize, bsize, handler): + "common parser for mssql 2000/2005; returns 4 byte salt + checksum" + if isinstance(hash, unicode): + if len(hash) == csize and hash.startswith(UIDENT): + try: + return unhexlify(hash[6:].encode("utf-8")) + except TypeError: # throw when bad char found + pass + elif isinstance(hash, bytes): + # assumes ascii-compat encoding + assert isinstance(hash, bytes) + if len(hash) == csize and hash.startswith(BIDENT): + try: + return unhexlify(hash[6:]) + except TypeError: # throw when bad char found + pass + ##elif len(hash) == bsize and hash.startswith(BIDENT2): # raw bytes + ## return hash[2:] + else: + raise uh.exc.ExpectedStringError(hash, "hash") + raise uh.exc.InvalidHashError(handler) + +class mssql2000(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2000, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2000" + setting_kwds = ("salt",) + checksum_size = 40 + min_salt_size = max_salt_size = 4 + _stub_checksum = b("\x00") * 40 + + #=================================================================== + # formatting + #=================================================================== + + # 0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # 20 byte checksum + # = 46 bytes + # encoded '0x' + 92 chars = 94 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 94, 46) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 94, 46, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + (self.checksum or self._stub_checksum) + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw).upper()) + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + salt = self.salt + return _raw_mssql(secret, salt) + _raw_mssql(secret.upper(), salt) + + @classmethod + def verify(cls, secret, hash): + # NOTE: we only compare against the upper-case hash + # XXX: add 'full' just to verify both checksums? + uh.validate_secret(secret) + self = cls.from_string(hash) + chk = self.checksum + if chk is None: + raise uh.exc.MissingDigestError(cls) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + result = _raw_mssql(secret.upper(), self.salt) + return consteq(result, chk[20:]) + +#============================================================================= +# handler +#============================================================================= +class mssql2005(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the password hash used by MS-SQL 2005, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 4 bytes in length. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "mssql2005" + setting_kwds = ("salt",) + + checksum_size = 20 + min_salt_size = max_salt_size = 4 + _stub_checksum = b("\x00") * 20 + + #=================================================================== + # formatting + #=================================================================== + + # 0x0100 - 2 byte identifier + # 4 byte salt + # 20 byte checksum + # = 26 bytes + # encoded '0x' + 52 chars = 54 + + @classmethod + def identify(cls, hash): + return _ident_mssql(hash, 54, 26) + + @classmethod + def from_string(cls, hash): + data = _parse_mssql(hash, 54, 26, cls) + return cls(salt=data[:4], checksum=data[4:]) + + def to_string(self): + raw = self.salt + (self.checksum or self._stub_checksum) + # raw bytes format - BIDENT2 + raw + return "0x0100" + bascii_to_str(hexlify(raw)).upper() + + def _calc_checksum(self, secret): + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + return _raw_mssql(secret, self.salt) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/mysql.py b/passlib/handlers/mysql.py new file mode 100644 index 00000000..8f57d05f --- /dev/null +++ b/passlib/handlers/mysql.py @@ -0,0 +1,128 @@ +"""passlib.handlers.mysql + +MySQL 3.2.3 / OLD_PASSWORD() + + This implements Mysql's OLD_PASSWORD algorithm, introduced in version 3.2.3, deprecated in version 4.1. + + See :mod:`passlib.handlers.mysql_41` for the new algorithm was put in place in version 4.1 + + This algorithm is known to be very insecure, and should only be used to verify existing password hashes. + + http://djangosnippets.org/snippets/1508/ + +MySQL 4.1.1 / NEW PASSWORD + This implements Mysql new PASSWORD algorithm, introduced in version 4.1. + + This function is unsalted, and therefore not very secure against rainbow attacks. + It should only be used when dealing with mysql passwords, + for all other purposes, you should use a salted hash function. + + Description taken from http://dev.mysql.com/doc/refman/6.0/en/password-hashing.html +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_native_str +from passlib.utils.compat import b, bascii_to_str, bytes, unicode, u, \ + byte_elem_value, str_to_uascii +import passlib.utils.handlers as uh +# local +__all__ = [ + 'mysql323', + 'mysq41', +] + +#============================================================================= +# backend +#============================================================================= +class mysql323(uh.StaticHandler): + """This class implements the MySQL 3.2.3 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql323" + checksum_size = 16 + checksum_chars = uh.HEX_CHARS + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + + MASK_32 = 0xffffffff + MASK_31 = 0x7fffffff + WHITE = b(' \t') + + nr1 = 0x50305735 + nr2 = 0x12345671 + add = 7 + for c in secret: + if c in WHITE: + continue + tmp = byte_elem_value(c) + nr1 ^= ((((nr1 & 63)+add)*tmp) + (nr1 << 8)) & MASK_32 + nr2 = (nr2+((nr2 << 8) ^ nr1)) & MASK_32 + add = (add+tmp) & MASK_32 + return u("%08x%08x") % (nr1 & MASK_31, nr2 & MASK_31) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# handler +#============================================================================= +class mysql41(uh.StaticHandler): + """This class implements the MySQL 4.1 password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "mysql41" + _hash_prefix = u("*") + checksum_chars = uh.HEX_CHARS + checksum_size = 40 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: no idea if mysql has a policy about handling unicode passwords + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(sha1(sha1(secret).digest()).hexdigest()).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/oracle.py b/passlib/handlers/oracle.py new file mode 100644 index 00000000..b8265201 --- /dev/null +++ b/passlib/handlers/oracle.py @@ -0,0 +1,175 @@ +"""passlib.handlers.oracle - Oracle DB Password Hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode, to_native_str, xor_bytes +from passlib.utils.compat import b, bytes, bascii_to_str, irange, u, \ + uascii_to_str, unicode, str_to_uascii +from passlib.utils.des import des_encrypt_block +import passlib.utils.handlers as uh +# local +__all__ = [ + "oracle10g", + "oracle11g" +] + +#============================================================================= +# oracle10 +#============================================================================= +def des_cbc_encrypt(key, value, iv=b('\x00') * 8, pad=b('\x00')): + """performs des-cbc encryption, returns only last block. + + this performs a specific DES-CBC encryption implementation + as needed by the Oracle10 hash. it probably won't be useful for + other purposes as-is. + + input value is null-padded to multiple of 8 bytes. + + :arg key: des key as bytes + :arg value: value to encrypt, as bytes. + :param iv: optional IV + :param pad: optional pad byte + + :returns: last block of DES-CBC encryption of all ``value``'s byte blocks. + """ + value += pad * (-len(value) % 8) # null pad to multiple of 8 + hash = iv # start things off + for offset in irange(0,len(value),8): + chunk = xor_bytes(hash, value[offset:offset+8]) + hash = des_encrypt_block(key, chunk) + return hash + +# magic string used as initial des key by oracle10 +ORACLE10_MAGIC = b("\x01\x23\x45\x67\x89\xAB\xCD\xEF") + +class oracle10(uh.HasUserContext, uh.StaticHandler): + """This class implements the password hash used by Oracle up to version 10g, and follows the :ref:`password-hash-api`. + + It does a single round of hashing, and relies on the username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods all require the + following additional contextual keywords: + + :type user: str + :param user: name of oracle user account this password is associated with. + """ + #=================================================================== + # algorithm information + #=================================================================== + name = "oracle10" + checksum_chars = uh.HEX_CHARS + checksum_size = 16 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.upper() + + def _calc_checksum(self, secret): + # FIXME: not sure how oracle handles unicode. + # online docs about 10g hash indicate it puts ascii chars + # in a 2-byte encoding w/ the high byte set to null. + # they don't say how it handles other chars, or what encoding. + # + # so for now, encoding secret & user to utf-16-be, + # since that fits, and if secret/user is bytes, + # we assume utf-8, and decode first. + # + # this whole mess really needs someone w/ an oracle system, + # and some answers :) + if isinstance(secret, bytes): + secret = secret.decode("utf-8") + user = to_unicode(self.user, "utf-8", param="user") + input = (user+secret).upper().encode("utf-16-be") + hash = des_cbc_encrypt(ORACLE10_MAGIC, input) + hash = des_cbc_encrypt(hash, input) + return hexlify(hash).decode("ascii").upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# oracle11 +#============================================================================= +class oracle11(uh.HasSalt, uh.GenericHandler): + """This class implements the Oracle11g password hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 20 hexidecimal characters. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "oracle11" + setting_kwds = ("salt",) + checksum_size = 40 + checksum_chars = uh.UPPER_HEX_CHARS + + _stub_checksum = u('0') * 40 + + #--HasSalt-- + min_salt_size = max_salt_size = 20 + salt_chars = uh.UPPER_HEX_CHARS + + + #=================================================================== + # methods + #=================================================================== + _hash_regex = re.compile(u("^S:(?P[0-9a-f]{40})(?P[0-9a-f]{20})$"), re.I) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + m = cls._hash_regex.match(hash) + if not m: + raise uh.exc.InvalidHashError(cls) + salt, chk = m.group("salt", "chk") + return cls(salt=salt, checksum=chk.upper()) + + def to_string(self): + chk = (self.checksum or self._stub_checksum) + hash = u("S:%s%s") % (chk.upper(), self.salt.upper()) + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + chk = sha1(secret + unhexlify(self.salt.encode("ascii"))).hexdigest() + return str_to_uascii(chk).upper() + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/pbkdf2.py b/passlib/handlers/pbkdf2.py new file mode 100644 index 00000000..cadbbea8 --- /dev/null +++ b/passlib/handlers/pbkdf2.py @@ -0,0 +1,490 @@ +"""passlib.handlers.pbkdf - PBKDF2 based hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify, unhexlify +from base64 import b64encode, b64decode +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import ab64_decode, ab64_encode, to_unicode +from passlib.utils.compat import b, bytes, str_to_bascii, u, uascii_to_str, unicode +from passlib.utils.pbkdf2 import pbkdf2 +import passlib.utils.handlers as uh +# local +__all__ = [ + "pbkdf2_sha1", + "pbkdf2_sha256", + "pbkdf2_sha512", + "cta_pbkdf2_sha1", + "dlitz_pbkdf2_sha1", + "grub_pbkdf2_sha512", +] + +#============================================================================= +# +#============================================================================= +class Pbkdf2DigestHandler(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + "base class for various pbkdf2_{digest} algorithms" + #=================================================================== + # class attrs + #=================================================================== + + #--GenericHandler-- + setting_kwds = ("salt", "salt_size", "rounds") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 16 + min_salt_size = 0 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = None # set by subclass + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #--this class-- + _prf = None # subclass specified prf identifier + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide sanity check. + # the underlying pbkdf2 specifies no bounds for either. + + # NOTE: defaults chosen to be at least as large as pbkdf2 rfc recommends... + # >8 bytes of entropy in salt, >1000 rounds + # increased due to time since rfc established + + #=================================================================== + # methods + #=================================================================== + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + salt = ab64_decode(salt.encode("ascii")) + if chk: + chk = ab64_decode(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, withchk=True): + salt = ab64_encode(self.salt).decode("ascii") + if withchk and self.checksum: + chk = ab64_encode(self.checksum).decode("ascii") + else: + chk = None + return uh.render_mc3(self.ident, self.rounds, salt, chk) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return pbkdf2(secret, self.salt, self.rounds, self.checksum_size, self._prf) + +def create_pbkdf2_hash(hash_name, digest_size, rounds=12000, ident=None, module=__name__): + "create new Pbkdf2DigestHandler subclass for a specific hash" + name = 'pbkdf2_' + hash_name + if ident is None: + ident = u("$pbkdf2-%s$") % (hash_name,) + prf = "hmac-%s" % (hash_name,) + base = Pbkdf2DigestHandler + return type(name, (base,), dict( + __module__=module, # so ABCMeta won't clobber it. + name=name, + ident=ident, + _prf = prf, + default_rounds=rounds, + checksum_size=digest_size, + encoded_checksum_size=(digest_size*4+2)//3, + __doc__="""This class implements a generic ``PBKDF2-%(prf)s``-based password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a %(dsc)d byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to %(dr)d, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ % dict(prf=prf.upper(), dsc=base.default_salt_size, dr=rounds) + )) + +#------------------------------------------------------------------------ +# derived handlers +#------------------------------------------------------------------------ +pbkdf2_sha1 = create_pbkdf2_hash("sha1", 20, 60000, ident=u("$pbkdf2$")) +pbkdf2_sha256 = create_pbkdf2_hash("sha256", 32, 20000) +pbkdf2_sha512 = create_pbkdf2_hash("sha512", 64, 19000) + +ldap_pbkdf2_sha1 = uh.PrefixWrapper("ldap_pbkdf2_sha1", pbkdf2_sha1, "{PBKDF2}", "$pbkdf2$", ident=True) +ldap_pbkdf2_sha256 = uh.PrefixWrapper("ldap_pbkdf2_sha256", pbkdf2_sha256, "{PBKDF2-SHA256}", "$pbkdf2-sha256$", ident=True) +ldap_pbkdf2_sha512 = uh.PrefixWrapper("ldap_pbkdf2_sha512", pbkdf2_sha512, "{PBKDF2-SHA512}", "$pbkdf2-sha512$", ident=True) + +#============================================================================= +# cryptacular's pbkdf2 hash +#============================================================================= + +# bytes used by cta hash for base64 values 63 & 64 +CTA_ALTCHARS = b("-_") + +class cta_pbkdf2_sha1(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Cryptacular's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, it may be any length. + If not specified, a one will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "cta_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + min_salt_size = 0 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0= + # ident $p5k2$ + # rounds 1000 + # salt ZxK4ZBJCfQg= + # chk jJZVscWtO--p1-xIZl6jhO2LKR0= + # NOTE: rounds in hex + + @classmethod + def from_string(cls, hash): + # NOTE: passlib deviation - forbidding zero-padded rounds + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, handler=cls) + salt = b64decode(salt.encode("ascii"), CTA_ALTCHARS) + if chk: + chk = b64decode(chk.encode("ascii"), CTA_ALTCHARS) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, withchk=True): + salt = b64encode(self.salt, CTA_ALTCHARS).decode("ascii") + if withchk and self.checksum: + chk = b64encode(self.checksum, CTA_ALTCHARS).decode("ascii") + else: + chk = None + return uh.render_mc3(self.ident, self.rounds, salt, chk, rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return pbkdf2(secret, self.salt, self.rounds, 20, "hmac-sha1") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# dlitz's pbkdf2 hash +#============================================================================= +class dlitz_pbkdf2_sha1(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements Dwayne Litzenberger's PBKDF2-based crypt algorithm, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If specified, it may be any length, but must use the characters in the regexp range ``[./0-9A-Za-z]``. + If not specified, a 16 character salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 16 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 60000, must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "dlitz_pbkdf2_sha1" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$p5k2$") + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. underlying algorithm (and reference implementation) + # allows effectively unbounded values for both of these parameters. + + #--HasSalt-- + default_salt_size = 16 + min_salt_size = 0 + max_salt_size = 1024 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + # NOTE: for security, the default here is set to match pbkdf2_sha1, + # even though this hash's extra block makes it twice as slow. + default_rounds = pbkdf2_sha1.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + # hash $p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # ident $p5k2$ + # rounds c + # salt u9HvcT4d + # chk Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g + # rounds in lowercase hex, no zero padding + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, rounds_base=16, + default_rounds=400, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, withchk=True): + rounds = self.rounds + if rounds == 400: + rounds = None # omit rounds measurement if == 400 + return uh.render_mc3(self.ident, rounds, self.salt, + checksum=self.checksum if withchk else None, + rounds_base=16) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + salt = str_to_bascii(self.to_string(withchk=False)) + result = pbkdf2(secret, salt, self.rounds, 24, "hmac-sha1") + return ab64_encode(result).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# crowd +#============================================================================= +class atlassian_pbkdf2_sha1(uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements the PBKDF2 hash used by Atlassian. + + It supports a fixed-length salt, and a fixed number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keyword: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be exactly 16 bytes. + If not specified, a salt will be autogenerated (this is recommended). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include + ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #--GenericHandler-- + name = "atlassian_pbkdf2_sha1" + setting_kwds =("salt",) + ident = u("{PKCS5S2}") + checksum_size = 32 + + _stub_checksum = b("\x00") * 32 + + #--HasRawSalt-- + min_salt_size = max_salt_size = 16 + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + data = b64decode(hash[len(ident):].encode("ascii")) + salt, chk = data[:16], data[16:] + return cls(salt=salt, checksum=chk) + + def to_string(self): + data = self.salt + (self.checksum or self._stub_checksum) + hash = self.ident + b64encode(data).decode("ascii") + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + # TODO: find out what crowd's policy is re: unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + # crowd seems to use a fixed number of rounds. + return pbkdf2(secret, self.salt, 10000, 32, "hmac-sha1") + +#============================================================================= +# grub +#============================================================================= +class grub_pbkdf2_sha512(uh.HasRounds, uh.HasRawSalt, uh.HasRawChecksum, uh.GenericHandler): + """This class implements Grub's pbkdf2-hmac-sha512 hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: bytes + :param salt: + Optional salt bytes. + If specified, the length must be between 0-1024 bytes. + If not specified, a 64 byte salt will be autogenerated (this is recommended). + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 64 bytes, but can be any value between 0 and 1024. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 19000, but must be within ``range(1,1<<32)``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + name = "grub_pbkdf2_sha512" + setting_kwds = ("salt", "salt_size", "rounds") + + ident = u("grub.pbkdf2.sha512.") + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide a + # sanity check. the underlying pbkdf2 specifies no bounds for either, + # and it's not clear what grub specifies. + + default_salt_size = 64 + min_salt_size = 0 + max_salt_size = 1024 + + default_rounds = pbkdf2_sha512.default_rounds + min_rounds = 1 + max_rounds = 0xffffffff # setting at 32-bit limit for now + rounds_cost = "linear" + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, sep=u("."), + handler=cls) + salt = unhexlify(salt.encode("ascii")) + if chk: + chk = unhexlify(chk.encode("ascii")) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, withchk=True): + salt = hexlify(self.salt).decode("ascii").upper() + if withchk and self.checksum: + chk = hexlify(self.checksum).decode("ascii").upper() + else: + chk = None + return uh.render_mc3(self.ident, self.rounds, salt, chk, sep=u(".")) + + def _calc_checksum(self, secret): + # TODO: find out what grub's policy is re: unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return pbkdf2(secret, self.salt, self.rounds, 64, "hmac-sha512") + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/phpass.py b/passlib/handlers/phpass.py new file mode 100644 index 00000000..45bd9a64 --- /dev/null +++ b/passlib/handlers/phpass.py @@ -0,0 +1,137 @@ +"""passlib.handlers.phpass - PHPass Portable Crypt + +phppass located - http://www.openwall.com/phpass/ +algorithm described - http://www.openwall.com/articles/PHP-Users-Passwords + +phpass context - blowfish, bsdi_crypt, phpass +""" +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import h64 +from passlib.utils.compat import b, bytes, u, uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "phpass", +] + +#============================================================================= +# phpass +#============================================================================= +class phpass(uh.HasManyIdents, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the PHPass Portable Hash, and follows the :ref:`password-hash-api`. + + It supports a fixed-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 8 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 17, must be between 7 and 30, inclusive. + This value is logarithmic, the actual number of iterations used will be :samp:`2**{rounds}`. + + :type ident: str + :param ident: + phpBB3 uses ``H`` instead of ``P`` for it's identifier, + this may be set to ``H`` in order to generate phpBB3 compatible hashes. + it defaults to ``P``. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "phpass" + setting_kwds = ("salt", "rounds", "ident") + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + min_salt_size = max_salt_size = 8 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 17 + min_rounds = 7 + max_rounds = 30 + rounds_cost = "log2" + + #--HasManyIdents-- + default_ident = u("$P$") + ident_values = [u("$P$"), u("$H$")] + ident_aliases = {u("P"):u("$P$"), u("H"):u("$H$")} + + #=================================================================== + # formatting + #=================================================================== + + #$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0 + # $P$ + # 9 + # IQRaTwmf + # eRo7ud9Fh4E2PdI0S3r.L0 + + @classmethod + def from_string(cls, hash): + ident, data = cls._parse_ident(hash) + rounds, salt, chk = data[0], data[1:9], data[9:] + return cls( + ident=ident, + rounds=h64.decode_int6(rounds.encode("ascii")), + salt=salt, + checksum=chk or None, + ) + + def to_string(self): + hash = u("%s%s%s%s") % (self.ident, + h64.encode_int6(self.rounds).decode("ascii"), + self.salt, + self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backend + #=================================================================== + def _calc_checksum(self, secret): + # FIXME: can't find definitive policy on how phpass handles non-ascii. + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + real_rounds = 1<`_ + hash names. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + + In addition to the standard :ref:`password-hash-api` methods, + this class also provides the following methods for manipulating Passlib + scram hashes in ways useful for pluging into a SCRAM protocol stack: + + .. automethod:: extract_digest_info + .. automethod:: extract_digest_algs + .. automethod:: derive_digest + """ + #=================================================================== + # class attrs + #=================================================================== + + # NOTE: unlike most GenericHandler classes, the 'checksum' attr of + # ScramHandler is actually a map from digest_name -> digest, so + # many of the standard methods have been overridden. + + # NOTE: max_salt_size and max_rounds are arbitrarily chosen to provide + # a sanity check; the underlying pbkdf2 specifies no bounds for either. + + #--GenericHandler-- + name = "scram" + setting_kwds = ("salt", "salt_size", "rounds", "algs") + ident = u("$scram$") + + #--HasSalt-- + default_salt_size = 12 + min_salt_size = 0 + max_salt_size = 1024 + + #--HasRounds-- + default_rounds = 20000 + min_rounds = 1 + max_rounds = 2**32-1 + rounds_cost = "linear" + + #--custom-- + + # default algorithms when creating new hashes. + default_algs = ["sha-1", "sha-256", "sha-512"] + + # list of algs verify prefers to use, in order. + _verify_algs = ["sha-256", "sha-512", "sha-224", "sha-384", "sha-1"] + + #=================================================================== + # instance attrs + #=================================================================== + + # 'checksum' is different from most GenericHandler subclasses, + # in that it contains a dict mapping from alg -> digest, + # or None if no checksum present. + + # list of algorithms to create/compare digests for. + algs = None + + #=================================================================== + # scram frontend helpers + #=================================================================== + @classmethod + def extract_digest_info(cls, hash, alg): + """return (salt, rounds, digest) for specific hash algorithm. + + :type hash: str + :arg hash: + :class:`!scram` hash stored for desired user + + :type alg: str + :arg alg: + Name of digest algorithm (e.g. ``"sha-1"``) requested by client. + + This value is run through :func:`~passlib.utils.pbkdf2.norm_hash_name`, + so it is case-insensitive, and can be the raw SCRAM + mechanism name (e.g. ``"SCRAM-SHA-1"``), the IANA name, + or the hashlib name. + + :raises KeyError: + If the hash does not contain an entry for the requested digest + algorithm. + + :returns: + A tuple containing ``(salt, rounds, digest)``, + where *digest* matches the raw bytes returned by + SCRAM's :func:`Hi` function for the stored password, + the provided *salt*, and the iteration count (*rounds*). + *salt* and *digest* are both raw (unencoded) bytes. + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant digest, and doesn't bother + # with full structure validation each time it's called. + alg = norm_hash_name(alg, 'iana') + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("scram hash contains no digests") + return self.salt, self.rounds, chkmap[alg] + + @classmethod + def extract_digest_algs(cls, hash, format="iana"): + """Return names of all algorithms stored in a given hash. + + :type hash: str + :arg hash: + The :class:`!scram` hash to parse + + :type format: str + :param format: + This changes the naming convention used by the + returned algorithm names. By default the names + are IANA-compatible; see :func:`~passlib.utils.pbkdf2.norm_hash_name` + for possible values. + + :returns: + Returns a list of digest algorithms; e.g. ``["sha-1"]`` + """ + # XXX: this could be sped up by writing custom parsing routine + # that just picks out relevant names, and doesn't bother + # with full structure validation each time it's called. + algs = cls.from_string(hash).algs + if format == "iana": + return algs + else: + return [norm_hash_name(alg, format) for alg in algs] + + @classmethod + def derive_digest(cls, password, salt, rounds, alg): + """helper to create SaltedPassword digest for SCRAM. + + This performs the step in the SCRAM protocol described as:: + + SaltedPassword := Hi(Normalize(password), salt, i) + + :type password: unicode or utf-8 bytes + :arg password: password to run through digest + + :type salt: bytes + :arg salt: raw salt data + + :type rounds: int + :arg rounds: number of iterations. + + :type alg: str + :arg alg: name of digest to use (e.g. ``"sha-1"``). + + :returns: + raw bytes of ``SaltedPassword`` + """ + if isinstance(password, bytes): + password = password.decode("utf-8") + password = saslprep(password).encode("utf-8") + if not isinstance(salt, bytes): + raise TypeError("salt must be bytes") + if rounds < 1: + raise ValueError("rounds must be >= 1") + alg = norm_hash_name(alg, "hashlib") + return pbkdf2(password, salt, rounds, None, "hmac-" + alg) + + #=================================================================== + # serialization + #=================================================================== + + @classmethod + def from_string(cls, hash): + hash = to_native_str(hash, "ascii", "hash") + if not hash.startswith("$scram$"): + raise uh.exc.InvalidHashError(cls) + parts = hash[7:].split("$") + if len(parts) != 3: + raise uh.exc.MalformedHashError(cls) + rounds_str, salt_str, chk_str = parts + + # decode rounds + rounds = int(rounds_str) + if rounds_str != str(rounds): # forbid zero padding, etc. + raise uh.exc.MalformedHashError(cls) + + # decode salt + try: + salt = ab64_decode(salt_str.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + + # decode algs/digest list + if not chk_str: + # scram hashes MUST have something here. + raise uh.exc.MalformedHashError(cls) + elif "=" in chk_str: + # comma-separated list of 'alg=digest' pairs + algs = None + chkmap = {} + for pair in chk_str.split(","): + alg, digest = pair.split("=") + try: + chkmap[alg] = ab64_decode(digest.encode("ascii")) + except TypeError: + raise uh.exc.MalformedHashError(cls) + else: + # comma-separated list of alg names, no digests + algs = chk_str + chkmap = None + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chkmap, + algs=algs, + ) + + def to_string(self, withchk=True): + salt = bascii_to_str(ab64_encode(self.salt)) + chkmap = self.checksum + if withchk and chkmap: + chk_str = ",".join( + "%s=%s" % (alg, bascii_to_str(ab64_encode(chkmap[alg]))) + for alg in self.algs + ) + else: + chk_str = ",".join(self.algs) + return '$scram$%d$%s$%s' % (self.rounds, salt, chk_str) + + #=================================================================== + # init + #=================================================================== + def __init__(self, algs=None, **kwds): + super(scram, self).__init__(**kwds) + self.algs = self._norm_algs(algs) + + def _norm_checksum(self, checksum): + if checksum is None: + return None + for alg, digest in iteritems(checksum): + if alg != norm_hash_name(alg, 'iana'): + raise ValueError("malformed algorithm name in scram hash: %r" % + (alg,)) + if len(alg) > 9: + raise ValueError("SCRAM limits algorithm names to " + "9 characters: %r" % (alg,)) + if not isinstance(digest, bytes): + raise uh.exc.ExpectedTypeError(digest, "raw bytes", "digests") + # TODO: verify digest size (if digest is known) + if 'sha-1' not in checksum: + # NOTE: required because of SCRAM spec. + raise ValueError("sha-1 must be in algorithm list of scram hash") + return checksum + + def _norm_algs(self, algs): + "normalize algs parameter" + # determine default algs value + if algs is None: + # derive algs list from checksum (if present). + chk = self.checksum + if chk is not None: + return sorted(chk) + elif self.use_defaults: + return list(self.default_algs) + else: + raise TypeError("no algs list specified") + elif self.checksum is not None: + raise RuntimeError("checksum & algs kwds are mutually exclusive") + + # parse args value + if isinstance(algs, str): + algs = splitcomma(algs) + algs = sorted(norm_hash_name(alg, 'iana') for alg in algs) + if any(len(alg)>9 for alg in algs): + raise ValueError("SCRAM limits alg names to max of 9 characters") + if 'sha-1' not in algs: + # NOTE: required because of SCRAM spec (rfc 5802) + raise ValueError("sha-1 must be in algorithm list of scram hash") + return algs + + #=================================================================== + # digest methods + #=================================================================== + + @classmethod + def _bind_needs_update(cls, **settings): + "generate a deprecation detector for CryptContext to use" + # generate deprecation hook which marks hashes as deprecated + # if they don't support a superset of current algs. + algs = frozenset(cls(use_defaults=True, **settings).algs) + def detector(hash, secret): + return not algs.issubset(cls.from_string(hash).algs) + return detector + + def _calc_checksum(self, secret, alg=None): + rounds = self.rounds + salt = self.salt + hash = self.derive_digest + if alg: + # if requested, generate digest for specific alg + return hash(secret, salt, rounds, alg) + else: + # by default, return dict containing digests for all algs + return dict( + (alg, hash(secret, salt, rounds, alg)) + for alg in self.algs + ) + + @classmethod + def verify(cls, secret, hash, full=False): + uh.validate_secret(secret) + self = cls.from_string(hash) + chkmap = self.checksum + if not chkmap: + raise ValueError("expected %s hash, got %s config string instead" % + (cls.name, cls.name)) + + # NOTE: to make the verify method efficient, we just calculate hash + # of shortest digest by default. apps can pass in "full=True" to + # check entire hash for consistency. + if full: + correct = failed = False + for alg, digest in iteritems(chkmap): + other = self._calc_checksum(secret, alg) + # NOTE: could do this length check in norm_algs(), + # but don't need to be that strict, and want to be able + # to parse hashes containing algs not supported by platform. + # it's fine if we fail here though. + if len(digest) != len(other): + raise ValueError("mis-sized %s digest in scram hash: %r != %r" + % (alg, len(digest), len(other))) + if consteq(other, digest): + correct = True + else: + failed = True + if correct and failed: + raise ValueError("scram hash verified inconsistently, " + "may be corrupted") + else: + return correct + else: + # XXX: should this just always use sha1 hash? would be faster. + # otherwise only verify against one hash, pick one w/ best security. + for alg in self._verify_algs: + if alg in chkmap: + other = self._calc_checksum(secret, alg) + return consteq(other, chkmap[alg]) + # there should always be sha-1 at the very least, + # or something went wrong inside _norm_algs() + raise AssertionError("sha-1 digest not found!") + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# code used for testing scram against protocol examples during development. +#============================================================================= +##def _test_reference_scram(): +## "quick hack testing scram reference vectors" +## # NOTE: "n,," is GS2 header - see https://tools.ietf.org/html/rfc5801 +## from passlib.utils.compat import print_ +## +## engine = _scram_engine( +## alg="sha-1", +## salt='QSXCR+Q6sek8bf92'.decode("base64"), +## rounds=4096, +## password=u("pencil"), +## ) +## print_(engine.digest.encode("base64").rstrip()) +## +## msg = engine.format_auth_msg( +## username="user", +## client_nonce = "fyko+d2lbbFgONRv9qkxdawL", +## server_nonce = "3rfcNHYJY1ZVvWVs7j", +## header='c=biws', +## ) +## +## cp = engine.get_encoded_client_proof(msg) +## assert cp == "v0X8v3Bz2T0CJGbJQyF0X+HI4Ts=", cp +## +## ss = engine.get_encoded_server_sig(msg) +## assert ss == "rmF9pqV8S7suAoZWja4dJRkFsKQ=", ss +## +##class _scram_engine(object): +## """helper class for verifying scram hash behavior +## against SCRAM protocol examples. not officially part of Passlib. +## +## takes in alg, salt, rounds, and a digest or password. +## +## can calculate the various keys & messages of the scram protocol. +## +## """ +## #========================================================= +## # init +## #========================================================= +## +## @classmethod +## def from_string(cls, hash, alg): +## "create record from scram hash, for given alg" +## return cls(alg, *scram.extract_digest_info(hash, alg)) +## +## def __init__(self, alg, salt, rounds, digest=None, password=None): +## self.alg = norm_hash_name(alg) +## self.salt = salt +## self.rounds = rounds +## self.password = password +## if password: +## data = scram.derive_digest(password, salt, rounds, alg) +## if digest and data != digest: +## raise ValueError("password doesn't match digest") +## else: +## digest = data +## elif not digest: +## raise TypeError("must provide password or digest") +## self.digest = digest +## +## #========================================================= +## # frontend methods +## #========================================================= +## def get_hash(self, data): +## "return hash of raw data" +## return hashlib.new(iana_to_hashlib(self.alg), data).digest() +## +## def get_client_proof(self, msg): +## "return client proof of specified auth msg text" +## return xor_bytes(self.client_key, self.get_client_sig(msg)) +## +## def get_encoded_client_proof(self, msg): +## return self.get_client_proof(msg).encode("base64").rstrip() +## +## def get_client_sig(self, msg): +## "return client signature of specified auth msg text" +## return self.get_hmac(self.stored_key, msg) +## +## def get_server_sig(self, msg): +## "return server signature of specified auth msg text" +## return self.get_hmac(self.server_key, msg) +## +## def get_encoded_server_sig(self, msg): +## return self.get_server_sig(msg).encode("base64").rstrip() +## +## def format_server_response(self, client_nonce, server_nonce): +## return 'r={client_nonce}{server_nonce},s={salt},i={rounds}'.format( +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## rounds=self.rounds, +## salt=self.encoded_salt, +## ) +## +## def format_auth_msg(self, username, client_nonce, server_nonce, +## header='c=biws'): +## return ( +## 'n={username},r={client_nonce}' +## ',' +## 'r={client_nonce}{server_nonce},s={salt},i={rounds}' +## ',' +## '{header},r={client_nonce}{server_nonce}' +## ).format( +## username=username, +## client_nonce=client_nonce, +## server_nonce=server_nonce, +## salt=self.encoded_salt, +## rounds=self.rounds, +## header=header, +## ) +## +## #========================================================= +## # helpers to calculate & cache constant data +## #========================================================= +## def _calc_get_hmac(self): +## return get_prf("hmac-" + iana_to_hashlib(self.alg))[0] +## +## def _calc_client_key(self): +## return self.get_hmac(self.digest, b("Client Key")) +## +## def _calc_stored_key(self): +## return self.get_hash(self.client_key) +## +## def _calc_server_key(self): +## return self.get_hmac(self.digest, b("Server Key")) +## +## def _calc_encoded_salt(self): +## return self.salt.encode("base64").rstrip() +## +## #========================================================= +## # hacks for calculated attributes +## #========================================================= +## +## def __getattr__(self, attr): +## if not attr.startswith("_"): +## f = getattr(self, "_calc_" + attr, None) +## if f: +## value = f() +## setattr(self, attr, value) +## return value +## raise AttributeError("attribute not found") +## +## def __dir__(self): +## cdir = dir(self.__class__) +## attrs = set(cdir) +## attrs.update(self.__dict__) +## attrs.update(attr[6:] for attr in cdir +## if attr.startswith("_calc_")) +## return sorted(attrs) +## #========================================================= +## # eoc +## #========================================================= + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/sha1_crypt.py b/passlib/handlers/sha1_crypt.py new file mode 100644 index 00000000..885c67fc --- /dev/null +++ b/passlib/handlers/sha1_crypt.py @@ -0,0 +1,150 @@ +"""passlib.handlers.sha1_crypt +""" + +#============================================================================= +# imports +#============================================================================= + +# core +from hmac import new as hmac +from hashlib import sha1 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import classproperty, h64, safe_crypt, test_crypt +from passlib.utils.compat import b, bytes, u, uascii_to_str, unicode +from passlib.utils.pbkdf2 import get_prf +import passlib.utils.handlers as uh +# local +__all__ = [ +] +#============================================================================= +# sha1-crypt +#============================================================================= +_hmac_sha1 = get_prf("hmac-sha1")[0] +_BNULL = b('\x00') + +class sha1_crypt(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the SHA1-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, an 8 character one will be autogenerated (this is recommended). + If specified, it must be 0-64 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + Optional number of bytes to use when autogenerating new salts. + Defaults to 8 bytes, but can be any value between 0 and 64. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 64000, must be between 1 and 4294967295, inclusive. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + #--GenericHandler-- + name = "sha1_crypt" + setting_kwds = ("salt", "salt_size", "rounds") + ident = u("$sha1$") + checksum_size = 28 + checksum_chars = uh.HASH64_CHARS + + #--HasSalt-- + default_salt_size = 8 + min_salt_size = 0 + max_salt_size = 64 + salt_chars = uh.HASH64_CHARS + + #--HasRounds-- + default_rounds = 64000 # current passlib default + min_rounds = 1 # really, this should be higher. + max_rounds = 4294967295 # 32-bit integer limit + rounds_cost = "linear" + + #=================================================================== + # formatting + #=================================================================== + + @classmethod + def from_string(cls, hash): + rounds, salt, chk = uh.parse_mc3(hash, cls.ident, handler=cls) + return cls(rounds=rounds, salt=salt, checksum=chk) + + def to_string(self, config=False): + chk = None if config else self.checksum + return uh.render_mc3(self.ident, self.rounds, self.salt, chk) + + #=================================================================== + # backend + #=================================================================== + backends = ("os_crypt", "builtin") + + _has_backend_builtin = True + + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", '$sha1$1$Wq3GL2Vp$C8U25GvfHS8qGHim' + 'ExLaiSFlGkAe') + + def _calc_checksum_builtin(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + if _BNULL in secret: + raise uh.exc.NullPasswordError(self) + rounds = self.rounds + # NOTE: this seed value is NOT the same as the config string + result = (u("%s$sha1$%s") % (self.salt, rounds)).encode("ascii") + # NOTE: this algorithm is essentially PBKDF1, modified to use HMAC. + r = 0 + while r < rounds: + result = _hmac_sha1(secret, result) + r += 1 + return h64.encode_transposed_bytes(result, self._chk_offsets).decode("ascii") + + _chk_offsets = [ + 2,1,0, + 5,4,3, + 8,7,6, + 11,10,9, + 14,13,12, + 17,16,15, + 0,19,18, + ] + + def _calc_checksum_os_crypt(self, secret): + config = self.to_string(config=True) + hash = safe_crypt(secret, config) + if hash: + assert hash.startswith(config) and len(hash) == len(config) + 29 + return hash[-28:] + else: + return self._calc_checksum_builtin(secret) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/sha2_crypt.py b/passlib/handlers/sha2_crypt.py new file mode 100644 index 00000000..c4faaad3 --- /dev/null +++ b/passlib/handlers/sha2_crypt.py @@ -0,0 +1,486 @@ +"""passlib.handlers.sha2_crypt - SHA256-Crypt / SHA512-Crypt""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import classproperty, h64, safe_crypt, test_crypt, \ + repeat_string, to_unicode +from passlib.utils.compat import b, bytes, byte_elem_value, irange, u, \ + uascii_to_str, unicode +import passlib.utils.handlers as uh +# local +__all__ = [ + "sha512_crypt", + "sha256_crypt", +] + +#============================================================================= +# pure-python backend, used by both sha256_crypt & sha512_crypt +# when crypt.crypt() backend is not available. +#============================================================================= +_BNULL = b('\x00') + +# pre-calculated offsets used to speed up C digest stage (see notes below). +# sequence generated using the following: + ##perms_order = "p,pp,ps,psp,sp,spp".split(",") + ##def offset(i): + ## key = (("p" if i % 2 else "") + ("s" if i % 3 else "") + + ## ("p" if i % 7 else "") + ("" if i % 2 else "p")) + ## return perms_order.index(key) + ##_c_digest_offsets = [(offset(i), offset(i+1)) for i in range(0,42,2)] +_c_digest_offsets = ( + (0, 3), (5, 1), (5, 3), (1, 2), (5, 1), (5, 3), (1, 3), + (4, 1), (5, 3), (1, 3), (5, 0), (5, 3), (1, 3), (5, 1), + (4, 3), (1, 3), (5, 1), (5, 2), (1, 3), (5, 1), (5, 3), + ) + +# map used to transpose bytes when encoding final sha256_crypt digest +_256_transpose_map = ( + 20, 10, 0, 11, 1, 21, 2, 22, 12, 23, 13, 3, 14, 4, 24, 5, + 25, 15, 26, 16, 6, 17, 7, 27, 8, 28, 18, 29, 19, 9, 30, 31, +) + +# map used to transpose bytes when encoding final sha512_crypt digest +_512_transpose_map = ( + 42, 21, 0, 1, 43, 22, 23, 2, 44, 45, 24, 3, 4, 46, 25, 26, + 5, 47, 48, 27, 6, 7, 49, 28, 29, 8, 50, 51, 30, 9, 10, 52, + 31, 32, 11, 53, 54, 33, 12, 13, 55, 34, 35, 14, 56, 57, 36, 15, + 16, 58, 37, 38, 17, 59, 60, 39, 18, 19, 61, 40, 41, 20, 62, 63, +) + +def _raw_sha2_crypt(pwd, salt, rounds, use_512=False): + """perform raw sha256-crypt / sha512-crypt + + this function provides a pure-python implementation of the internals + for the SHA256-Crypt and SHA512-Crypt algorithms; it doesn't + handle any of the parsing/validation of the hash strings themselves. + + :arg pwd: password chars/bytes to encrypt + :arg salt: salt chars to use + :arg rounds: linear rounds cost + :arg use_512: use sha512-crypt instead of sha256-crypt mode + + :returns: + encoded checksum chars + """ + #=================================================================== + # init & validate inputs + #=================================================================== + + # validate secret + if isinstance(pwd, unicode): + # XXX: not sure what official unicode policy is, using this as default + pwd = pwd.encode("utf-8") + assert isinstance(pwd, bytes) + if _BNULL in pwd: + raise uh.exc.NullPasswordError(sha512_crypt if use_512 else sha256_crypt) + pwd_len = len(pwd) + + # validate rounds + assert 1000 <= rounds <= 999999999, "invalid rounds" + # NOTE: spec says out-of-range rounds should be clipped, instead of + # causing an error. this function assumes that's been taken care of + # by the handler class. + + # validate salt + assert isinstance(salt, unicode), "salt not unicode" + salt = salt.encode("ascii") + salt_len = len(salt) + assert salt_len < 17, "salt too large" + # NOTE: spec says salts larger than 16 bytes should be truncated, + # instead of causing an error. this function assumes that's been + # taken care of by the handler class. + + # load sha256/512 specific constants + if use_512: + hash_const = hashlib.sha512 + hash_len = 64 + transpose_map = _512_transpose_map + else: + hash_const = hashlib.sha256 + hash_len = 32 + transpose_map = _256_transpose_map + + #=================================================================== + # digest B - used as subinput to digest A + #=================================================================== + db = hash_const(pwd + salt + pwd).digest() + + #=================================================================== + # digest A - used to initialize first round of digest C + #=================================================================== + # start out with pwd + salt + a_ctx = hash_const(pwd + salt) + a_ctx_update = a_ctx.update + + # add pwd_len bytes of b, repeating b as many times as needed. + a_ctx_update(repeat_string(db, pwd_len)) + + # for each bit in pwd_len: add b if it's 1, or pwd if it's 0 + i = pwd_len + while i: + a_ctx_update(db if i & 1 else pwd) + i >>= 1 + + # finish A + da = a_ctx.digest() + + #=================================================================== + # digest P from password - used instead of password itself + # when calculating digest C. + #=================================================================== + if pwd_len < 64: + # method this is faster under python, but uses O(pwd_len**2) memory + # so we don't use it for larger passwords, to avoid a potential DOS. + dp = repeat_string(hash_const(pwd * pwd_len).digest(), pwd_len) + else: + tmp_ctx = hash_const(pwd) + tmp_ctx_update = tmp_ctx.update + i = pwd_len-1 + while i: + tmp_ctx_update(pwd) + i -= 1 + dp = repeat_string(tmp_ctx.digest(), pwd_len) + assert len(dp) == pwd_len + + #=================================================================== + # digest S - used instead of salt itself when calculating digest C + #=================================================================== + ds = hash_const(salt * (16 + byte_elem_value(da[0]))).digest()[:salt_len] + assert len(ds) == salt_len, "salt_len somehow > hash_len!" + + #=================================================================== + # digest C - for a variable number of rounds, combine A, S, and P + # digests in various ways; in order to burn CPU time. + #=================================================================== + + # NOTE: the original SHA256/512-Crypt specification performs the C digest + # calculation using the following loop: + # + ##dc = da + ##i = 0 + ##while i < rounds: + ## tmp_ctx = hash_const(dp if i & 1 else dc) + ## if i % 3: + ## tmp_ctx.update(ds) + ## if i % 7: + ## tmp_ctx.update(dp) + ## tmp_ctx.update(dc if i & 1 else dp) + ## dc = tmp_ctx.digest() + ## i += 1 + # + # The code Passlib uses (below) implements an equivalent algorithm, + # it's just been heavily optimized to pre-calculate a large number + # of things beforehand. It works off of a couple of observations + # about the original algorithm: + # + # 1. each round is a combination of 'dc', 'ds', and 'dp'; determined + # by the whether 'i' a multiple of 2,3, and/or 7. + # 2. since lcm(2,3,7)==42, the series of combinations will repeat + # every 42 rounds. + # 3. even rounds 0-40 consist of 'hash(dc + round-specific-constant)'; + # while odd rounds 1-41 consist of hash(round-specific-constant + dc) + # + # Using these observations, the following code... + # * calculates the round-specific combination of ds & dp for each round 0-41 + # * runs through as many 42-round blocks as possible + # * runs through as many pairs of rounds as possible for remaining rounds + # * performs once last round if the total rounds should be odd. + # + # this cuts out a lot of the control overhead incurred when running the + # original loop 40,000+ times in python, resulting in ~20% increase in + # speed under CPython (though still 2x slower than glibc crypt) + + # prepare the 6 combinations of ds & dp which are needed + # (order of 'perms' must match how _c_digest_offsets was generated) + dp_dp = dp+dp + dp_ds = dp+ds + perms = [dp, dp_dp, dp_ds, dp_ds+dp, ds+dp, ds+dp_dp] + + # build up list of even-round & odd-round constants, + # and store in 21-element list as (even,odd) pairs. + data = [ (perms[even], perms[odd]) for even, odd in _c_digest_offsets] + + # perform as many full 42-round blocks as possible + dc = da + blocks, tail = divmod(rounds, 42) + while blocks: + for even, odd in data: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + blocks -= 1 + + # perform any leftover rounds + if tail: + # perform any pairs of rounds + pairs = tail>>1 + for even, odd in data[:pairs]: + dc = hash_const(odd + hash_const(dc + even).digest()).digest() + + # if rounds was odd, do one last round (since we started at 0, + # last round will be an even-numbered round) + if tail & 1: + dc = hash_const(dc + data[pairs][0]).digest() + + #=================================================================== + # encode digest using appropriate transpose map + #=================================================================== + return h64.encode_transposed_bytes(dc, transpose_map).decode("ascii") + +#============================================================================= +# handlers +#============================================================================= +_UROUNDS = u("rounds=") +_UDOLLAR = u("$") +_UZERO = u("0") + +class _SHA2_Common(uh.HasManyBackends, uh.HasRounds, uh.HasSalt, + uh.GenericHandler): + "class containing common code shared by sha256_crypt & sha512_crypt" + #=================================================================== + # class attrs + #=================================================================== + # name - set by subclass + setting_kwds = ("salt", "rounds", "implicit_rounds", "salt_size") + # ident - set by subclass + checksum_chars = uh.HASH64_CHARS + # checksum_size - set by subclass + + min_salt_size = 0 + max_salt_size = 16 + salt_chars = uh.HASH64_CHARS + + min_rounds = 1000 # bounds set by spec + max_rounds = 999999999 # bounds set by spec + rounds_cost = "linear" + + _cdb_use_512 = False # flag for _calc_digest_builtin() + _rounds_prefix = None # ident + _UROUNDS + + #=================================================================== + # methods + #=================================================================== + implicit_rounds = False + + def __init__(self, implicit_rounds=None, **kwds): + super(_SHA2_Common, self).__init__(**kwds) + # if user calls encrypt() w/ 5000 rounds, default to compact form. + if implicit_rounds is None: + implicit_rounds = (self.use_defaults and self.rounds == 5000) + self.implicit_rounds = implicit_rounds + + @classmethod + def from_string(cls, hash): + # basic format this parses - + # $5$[rounds=$][$] + + # TODO: this *could* use uh.parse_mc3(), except that the rounds + # portion has a slightly different grammar. + + # convert to unicode, check for ident prefix, split on dollar signs. + hash = to_unicode(hash, "ascii", "hash") + ident = cls.ident + if not hash.startswith(ident): + raise uh.exc.InvalidHashError(cls) + assert len(ident) == 3 + parts = hash[3:].split(_UDOLLAR) + + # extract rounds value + if parts[0].startswith(_UROUNDS): + assert len(_UROUNDS) == 7 + rounds = parts.pop(0)[7:] + if rounds.startswith(_UZERO) and rounds != _UZERO: + raise uh.exc.ZeroPaddedRoundsError(cls) + rounds = int(rounds) + implicit_rounds = False + else: + rounds = 5000 + implicit_rounds = True + + # rest should be salt and checksum + if len(parts) == 2: + salt, chk = parts + elif len(parts) == 1: + salt = parts[0] + chk = None + else: + raise uh.exc.MalformedHashError(cls) + + # return new object + return cls( + rounds=rounds, + salt=salt, + checksum=chk or None, + implicit_rounds=implicit_rounds, + relaxed=not chk, # NOTE: relaxing parsing for config strings + # so that out-of-range rounds are clipped, + # since SHA2-Crypt spec treats them this way. + ) + + def to_string(self): + if self.rounds == 5000 and self.implicit_rounds: + hash = u("%s%s$%s") % (self.ident, self.salt, + self.checksum or u('')) + else: + hash = u("%srounds=%d$%s$%s") % (self.ident, self.rounds, + self.salt, self.checksum or u('')) + return uascii_to_str(hash) + + #=================================================================== + # backends + #=================================================================== + backends = ("os_crypt", "builtin") + + _has_backend_builtin = True + + # _has_backend_os_crypt - provided by subclass + + def _calc_checksum_builtin(self, secret): + return _raw_sha2_crypt(secret, self.salt, self.rounds, + self._cdb_use_512) + + def _calc_checksum_os_crypt(self, secret): + hash = safe_crypt(secret, self.to_string()) + if hash: + # NOTE: avoiding full parsing routine via from_string().checksum, + # and just extracting the bit we need. + cs = self.checksum_size + assert hash.startswith(self.ident) and hash[-cs-1] == _UDOLLAR + return hash[-cs:] + else: + return self._calc_checksum_builtin(secret) + + #=================================================================== + # eoc + #=================================================================== + +class sha256_crypt(_SHA2_Common): + """This class implements the SHA256-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 110000, must be between 1000 and 999999999, inclusive. + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sha256_crypt" + ident = u("$5$") + checksum_size = 43 + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 110000 + + #=================================================================== + # backends + #=================================================================== + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", "$5$rounds=1000$test$QmQADEXMG8POI5W" + "Dsaeho0P36yK3Tcrgboabng6bkb/") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# sha 512 crypt +#============================================================================= +class sha512_crypt(_SHA2_Common): + """This class implements the SHA512-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, one will be autogenerated (this is recommended). + If specified, it must be 0-16 characters, drawn from the regexp range ``[./0-9A-Za-z]``. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 100000, must be between 1000 and 999999999, inclusive. + + :type implicit_rounds: bool + :param implicit_rounds: + this is an internal option which generally doesn't need to be touched. + + this flag determines whether the hash should omit the rounds parameter + when encoding it to a string; this is only permitted by the spec for rounds=5000, + and the flag is ignored otherwise. the spec requires the two different + encodings be preserved as they are, instead of normalizing them. + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + + #=================================================================== + # class attrs + #=================================================================== + name = "sha512_crypt" + ident = u("$6$") + checksum_size = 86 + _cdb_use_512 = True + # NOTE: using 25/75 weighting of builtin & os_crypt backends + default_rounds = 100000 + + #=================================================================== + # backend + #=================================================================== + @classproperty + def _has_backend_os_crypt(cls): + return test_crypt("test", "$6$rounds=1000$test$2M/Lx6Mtobqj" + "Ljobw0Wmo4Q5OFx5nVLJvmgseatA6oMn" + "yWeBdRDx4DU.1H3eGmse6pgsOgDisWBG" + "I5c7TZauS0") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/sun_md5_crypt.py b/passlib/handlers/sun_md5_crypt.py new file mode 100644 index 00000000..41d3331b --- /dev/null +++ b/passlib/handlers/sun_md5_crypt.py @@ -0,0 +1,364 @@ +"""passlib.handlers.sun_md5_crypt - Sun's Md5 Crypt, used on Solaris + +.. warning:: + + This implementation may not reproduce + the original Solaris behavior in some border cases. + See documentation for details. +""" + +#============================================================================= +# imports +#============================================================================= +# core +from hashlib import md5 +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import h64, to_unicode +from passlib.utils.compat import b, bytes, byte_elem_value, irange, u, \ + uascii_to_str, unicode, str_to_bascii +import passlib.utils.handlers as uh +# local +__all__ = [ + "sun_md5_crypt", +] + +#============================================================================= +# backend +#============================================================================= +# constant data used by alg - Hamlet act 3 scene 1 + null char +# exact bytes as in http://www.ibiblio.org/pub/docs/books/gutenberg/etext98/2ws2610.txt +# from Project Gutenberg. + +MAGIC_HAMLET = b( + "To be, or not to be,--that is the question:--\n" + "Whether 'tis nobler in the mind to suffer\n" + "The slings and arrows of outrageous fortune\n" + "Or to take arms against a sea of troubles,\n" + "And by opposing end them?--To die,--to sleep,--\n" + "No more; and by a sleep to say we end\n" + "The heartache, and the thousand natural shocks\n" + "That flesh is heir to,--'tis a consummation\n" + "Devoutly to be wish'd. To die,--to sleep;--\n" + "To sleep! perchance to dream:--ay, there's the rub;\n" + "For in that sleep of death what dreams may come,\n" + "When we have shuffled off this mortal coil,\n" + "Must give us pause: there's the respect\n" + "That makes calamity of so long life;\n" + "For who would bear the whips and scorns of time,\n" + "The oppressor's wrong, the proud man's contumely,\n" + "The pangs of despis'd love, the law's delay,\n" + "The insolence of office, and the spurns\n" + "That patient merit of the unworthy takes,\n" + "When he himself might his quietus make\n" + "With a bare bodkin? who would these fardels bear,\n" + "To grunt and sweat under a weary life,\n" + "But that the dread of something after death,--\n" + "The undiscover'd country, from whose bourn\n" + "No traveller returns,--puzzles the will,\n" + "And makes us rather bear those ills we have\n" + "Than fly to others that we know not of?\n" + "Thus conscience does make cowards of us all;\n" + "And thus the native hue of resolution\n" + "Is sicklied o'er with the pale cast of thought;\n" + "And enterprises of great pith and moment,\n" + "With this regard, their currents turn awry,\n" + "And lose the name of action.--Soft you now!\n" + "The fair Ophelia!--Nymph, in thy orisons\n" + "Be all my sins remember'd.\n\x00" #<- apparently null at end of C string is included (test vector won't pass otherwise) +) + +# NOTE: these sequences are pre-calculated iteration ranges used by X & Y loops w/in rounds function below +xr = irange(7) +_XY_ROUNDS = [ + tuple((i,i,i+3) for i in xr), # xrounds 0 + tuple((i,i+1,i+4) for i in xr), # xrounds 1 + tuple((i,i+8,(i+11)&15) for i in xr), # yrounds 0 + tuple((i,(i+9)&15, (i+12)&15) for i in xr), # yrounds 1 +] +del xr + +def raw_sun_md5_crypt(secret, rounds, salt): + "given secret & salt, return encoded sun-md5-crypt checksum" + global MAGIC_HAMLET + assert isinstance(secret, bytes) + assert isinstance(salt, bytes) + + # validate rounds + if rounds <= 0: + rounds = 0 + real_rounds = 4096 + rounds + # NOTE: spec seems to imply max 'rounds' is 2**32-1 + + # generate initial digest to start off round 0. + # NOTE: algorithm 'salt' includes full config string w/ trailing "$" + result = md5(secret + salt).digest() + assert len(result) == 16 + + # NOTE: many things in this function have been inlined (to speed up the loop + # as much as possible), to the point that this code barely resembles + # the algorithm as described in the docs. in particular: + # + # * all accesses to a given bit have been inlined using the formula + # rbitval(bit) = (rval((bit>>3) & 15) >> (bit & 7)) & 1 + # + # * the calculation of coinflip value R has been inlined + # + # * the conditional division of coinflip value V has been inlined as + # a shift right of 0 or 1. + # + # * the i, i+3, etc iterations are precalculated in lists. + # + # * the round-based conditional division of x & y is now performed + # by choosing an appropriate precalculated list, so that it only + # calculates the 7 bits which will actually be used. + # + X_ROUNDS_0, X_ROUNDS_1, Y_ROUNDS_0, Y_ROUNDS_1 = _XY_ROUNDS + + # NOTE: % appears to be *slightly* slower than &, so we prefer & if possible + + round = 0 + while round < real_rounds: + # convert last result byte string to list of byte-ints for easy access + rval = [ byte_elem_value(c) for c in result ].__getitem__ + + # build up X bit by bit + x = 0 + xrounds = X_ROUNDS_1 if (rval((round>>3) & 15)>>(round & 7)) & 1 else X_ROUNDS_0 + for i, ia, ib in xrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + x |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # build up Y bit by bit + y = 0 + yrounds = Y_ROUNDS_1 if (rval(((round+64)>>3) & 15)>>(round & 7)) & 1 else Y_ROUNDS_0 + for i, ia, ib in yrounds: + a = rval(ia) + b = rval(ib) + v = rval((a >> (b % 5)) & 15) >> ((b>>(a&7)) & 1) + y |= ((rval((v>>3)&15)>>(v&7))&1) << i + + # extract x'th and y'th bit, xoring them together to yeild "coin flip" + coin = ((rval(x>>3) >> (x&7)) ^ (rval(y>>3) >> (y&7))) & 1 + + # construct hash for this round + h = md5(result) + if coin: + h.update(MAGIC_HAMLET) + h.update(unicode(round).encode("ascii")) + result = h.digest() + + round += 1 + + # encode output + return h64.encode_transposed_bytes(result, _chk_offsets) + +# NOTE: same offsets as md5_crypt +_chk_offsets = ( + 12,6,0, + 13,7,1, + 14,8,2, + 15,9,3, + 5,10,4, + 11, +) + +#============================================================================= +# handler +#============================================================================= +class sun_md5_crypt(uh.HasRounds, uh.HasSalt, uh.GenericHandler): + """This class implements the Sun-MD5-Crypt password hash, and follows the :ref:`password-hash-api`. + + It supports a variable-length salt, and a variable number of rounds. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept the following optional keywords: + + :type salt: str + :param salt: + Optional salt string. + If not specified, a salt will be autogenerated (this is recommended). + If specified, it must be drawn from the regexp range ``[./0-9A-Za-z]``. + + :type salt_size: int + :param salt_size: + If no salt is specified, this parameter can be used to specify + the size (in characters) of the autogenerated salt. + It currently defaults to 8. + + :type rounds: int + :param rounds: + Optional number of rounds to use. + Defaults to 5500, must be between 0 and 4294963199, inclusive. + + :type bare_salt: bool + :param bare_salt: + Optional flag used to enable an alternate salt digest behavior + used by some hash strings in this scheme. + This flag can be ignored by most users. + Defaults to ``False``. + (see :ref:`smc-bare-salt` for details). + + :type relaxed: bool + :param relaxed: + By default, providing an invalid value for one of the other + keywords will result in a :exc:`ValueError`. If ``relaxed=True``, + and the error can be corrected, a :exc:`~passlib.exc.PasslibHashWarning` + will be issued instead. Correctable errors include ``rounds`` + that are too small or too large, and ``salt`` strings that are too long. + + .. versionadded:: 1.6 + """ + #=================================================================== + # class attrs + #=================================================================== + name = "sun_md5_crypt" + setting_kwds = ("salt", "rounds", "bare_salt", "salt_size") + checksum_chars = uh.HASH64_CHARS + checksum_size = 22 + + # NOTE: docs say max password length is 255. + # release 9u2 + + # NOTE: not sure if original crypt has a salt size limit, + # all instances that have been seen use 8 chars. + default_salt_size = 8 + min_salt_size = 0 + max_salt_size = None + salt_chars = uh.HASH64_CHARS + + default_rounds = 5500 # current passlib default + min_rounds = 0 + max_rounds = 4294963199 ##2**32-1-4096 + # XXX: ^ not sure what it does if past this bound... does 32 int roll over? + rounds_cost = "linear" + + ident_values = (u("$md5$"), u("$md5,")) + + #=================================================================== + # instance attrs + #=================================================================== + bare_salt = False # flag to indicate legacy hashes that lack "$$" suffix + + #=================================================================== + # constructor + #=================================================================== + def __init__(self, bare_salt=False, **kwds): + self.bare_salt = bare_salt + super(sun_md5_crypt, self).__init__(**kwds) + + #=================================================================== + # internal helpers + #=================================================================== + @classmethod + def identify(cls, hash): + hash = uh.to_unicode_for_identify(hash) + return hash.startswith(cls.ident_values) + + @classmethod + def from_string(cls, hash): + hash = to_unicode(hash, "ascii", "hash") + + # + # detect if hash specifies rounds value. + # if so, parse and validate it. + # by end, set 'rounds' to int value, and 'tail' containing salt+chk + # + if hash.startswith(u("$md5$")): + rounds = 0 + salt_idx = 5 + elif hash.startswith(u("$md5,rounds=")): + idx = hash.find(u("$"), 12) + if idx == -1: + raise uh.exc.MalformedHashError(cls, "unexpected end of rounds") + rstr = hash[12:idx] + try: + rounds = int(rstr) + except ValueError: + raise uh.exc.MalformedHashError(cls, "bad rounds") + if rstr != unicode(rounds): + raise uh.exc.ZeroPaddedRoundsError(cls) + if rounds == 0: + # NOTE: not sure if this is forbidden by spec or not; + # but allowing it would complicate things, + # and it should never occur anyways. + raise uh.exc.MalformedHashError(cls, "explicit zero rounds") + salt_idx = idx+1 + else: + raise uh.exc.InvalidHashError(cls) + + # + # salt/checksum separation is kinda weird, + # to deal cleanly with some backward-compatible workarounds + # implemented by original implementation. + # + chk_idx = hash.rfind(u("$"), salt_idx) + if chk_idx == -1: + # ''-config for $-hash + salt = hash[salt_idx:] + chk = None + bare_salt = True + elif chk_idx == len(hash)-1: + if chk_idx > salt_idx and hash[-2] == u("$"): + raise uh.exc.MalformedHashError(cls, "too many '$' separators") + # $-config for $$-hash + salt = hash[salt_idx:-1] + chk = None + bare_salt = False + elif chk_idx > 0 and hash[chk_idx-1] == u("$"): + # $$-hash + salt = hash[salt_idx:chk_idx-1] + chk = hash[chk_idx+1:] + bare_salt = False + else: + # $-hash + salt = hash[salt_idx:chk_idx] + chk = hash[chk_idx+1:] + bare_salt = True + + return cls( + rounds=rounds, + salt=salt, + checksum=chk, + bare_salt=bare_salt, + ) + + def to_string(self, withchk=True): + ss = u('') if self.bare_salt else u('$') + rounds = self.rounds + if rounds > 0: + hash = u("$md5,rounds=%d$%s%s") % (rounds, self.salt, ss) + else: + hash = u("$md5$%s%s") % (self.salt, ss) + if withchk: + chk = self.checksum + if chk: + hash = u("%s$%s") % (hash, chk) + return uascii_to_str(hash) + + #=================================================================== + # primary interface + #=================================================================== + # TODO: if we're on solaris, check for native crypt() support. + # this will require extra testing, to make sure native crypt + # actually behaves correctly. of particular importance: + # when using ""-config, make sure to append "$x" to string. + + def _calc_checksum(self, secret): + # NOTE: no reference for how sun_md5_crypt handles unicode + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + config = str_to_bascii(self.to_string(withchk=False)) + return raw_sun_md5_crypt(secret, self.rounds, config).decode("ascii") + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/handlers/windows.py b/passlib/handlers/windows.py new file mode 100644 index 00000000..3bc3e4f9 --- /dev/null +++ b/passlib/handlers/windows.py @@ -0,0 +1,310 @@ +"""passlib.handlers.nthash - Microsoft Windows -related hashes""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# site +# pkg +from passlib.utils import to_unicode, right_pad_string +from passlib.utils.compat import b, bytes, str_to_uascii, u, unicode, uascii_to_str +from passlib.utils.md4 import md4 +import passlib.utils.handlers as uh +# local +__all__ = [ + "lmhash", + "nthash", + "bsd_nthash", + "msdcc", + "msdcc2", +] + +#============================================================================= +# lanman hash +#============================================================================= +class lmhash(uh.HasEncodingContext, uh.StaticHandler): + """This class implements the Lan Manager Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.verify` methods accept a single + optional keyword: + + :type encoding: str + :param encoding: + + This specifies what character encoding LMHASH should use when + calculating digest. It defaults to ``cp437``, the most + common encoding encountered. + + Note that while this class outputs digests in lower-case hexidecimal, + it will accept upper-case as well. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "lmhash" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + default_encoding = "cp437" + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.encoding)).decode("ascii") + + # magic constant used by LMHASH + _magic = b("KGS!@#$%") + + @classmethod + def raw(cls, secret, encoding=None): + """encode password using LANMAN hash algorithm. + + :type secret: unicode or utf-8 encoded bytes + :arg secret: secret to hash + :type encoding: str + :arg encoding: + optional encoding to use for unicode inputs. + this defaults to ``cp437``, which is the + common case for most situations. + + :returns: returns string of raw bytes + """ + if not encoding: + encoding = cls.default_encoding + # some nice empircal data re: different encodings is at... + # http://www.openwall.com/lists/john-dev/2011/08/01/2 + # http://www.freerainbowtables.com/phpBB3/viewtopic.php?t=387&p=12163 + from passlib.utils.des import des_encrypt_block + MAGIC = cls._magic + if isinstance(secret, unicode): + # perform uppercasing while we're still unicode, + # to give a better shot at getting non-ascii chars right. + # (though some codepages do NOT upper-case the same as unicode). + secret = secret.upper().encode(encoding) + elif isinstance(secret, bytes): + # FIXME: just trusting ascii upper will work? + # and if not, how to do codepage specific case conversion? + # we could decode first using , + # but *that* might not always be right. + secret = secret.upper() + else: + raise TypeError("secret must be unicode or bytes") + secret = right_pad_string(secret, 14) + return des_encrypt_block(secret[0:7], MAGIC) + \ + des_encrypt_block(secret[7:14], MAGIC) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# ntlm hash +#============================================================================= +class nthash(uh.StaticHandler): + """This class implements the NT Password hash, and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + + Note that while this class outputs lower-case hexidecimal digests, + it will accept upper-case digests as well. + """ + #=================================================================== + # class attrs + #=================================================================== + name = "nthash" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + #=================================================================== + # methods + #=================================================================== + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret)).decode("ascii") + + @classmethod + def raw(cls, secret): + """encode password using MD4-based NTHASH algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret") + # XXX: found refs that say only first 128 chars are used. + return md4(secret.encode("utf-16-le")).digest() + + @classmethod + def raw_nthash(cls, secret, hex=False): + warn("nthash.raw_nthash() is deprecated, and will be removed " + "in Passlib 1.8, please use nthash.raw() instead", + DeprecationWarning) + ret = nthash.raw(secret) + return hexlify(ret).decode("ascii") if hex else ret + + #=================================================================== + # eoc + #=================================================================== + +bsd_nthash = uh.PrefixWrapper("bsd_nthash", nthash, prefix="$3$$", ident="$3$$", + doc="""The class support FreeBSD's representation of NTHASH + (which is compatible with the :ref:`modular-crypt-format`), + and follows the :ref:`password-hash-api`. + + It has no salt and a single fixed round. + + The :meth:`~passlib.ifc.PasswordHash.encrypt` and :meth:`~passlib.ifc.PasswordHash.genconfig` methods accept no optional keywords. + """) + +##class ntlm_pair(object): +## "combined lmhash & nthash" +## name = "ntlm_pair" +## setting_kwds = () +## _hash_regex = re.compile(u"^(?P[0-9a-f]{32}):(?P[0-9][a-f]{32})$", +## re.I) +## +## @classmethod +## def identify(cls, hash): +## hash = to_unicode(hash, "latin-1", "hash") +## return len(hash) == 65 and cls._hash_regex.match(hash) is not None +## +## @classmethod +## def genconfig(cls): +## return None +## +## @classmethod +## def genhash(cls, secret, config): +## if config is not None and not cls.identify(config): +## raise uh.exc.InvalidHashError(cls) +## return cls.encrypt(secret) +## +## @classmethod +## def encrypt(cls, secret): +## return lmhash.encrypt(secret) + ":" + nthash.encrypt(secret) +## +## @classmethod +## def verify(cls, secret, hash): +## hash = to_unicode(hash, "ascii", "hash") +## m = cls._hash_regex.match(hash) +## if not m: +## raise uh.exc.InvalidHashError(cls) +## lm, nt = m.group("lm", "nt") +## # NOTE: verify against both in case encoding issue +## # causes one not to match. +## return lmhash.verify(secret, lm) or nthash.verify(secret, nt) + +#============================================================================= +# msdcc v1 +#============================================================================= +class msdcc(uh.HasUserContext, uh.StaticHandler): + """This class implements Microsoft's Domain Cached Credentials password hash, + and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following optional keywords: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + + Note that while this class outputs lower-case hexidecimal digests, + it will accept upper-case digests as well. + """ + name = "msdcc" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using mscash v1 algorithm + + :arg secret: secret as unicode or utf-8 encoded bytes + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + return md4(md4(secret).digest() + user).digest() + +#============================================================================= +# msdcc2 aka mscash2 +#============================================================================= +class msdcc2(uh.HasUserContext, uh.StaticHandler): + """This class implements version 2 of Microsoft's Domain Cached Credentials + password hash, and follows the :ref:`password-hash-api`. + + It has a fixed number of rounds, and uses the associated + username as the salt. + + The :meth:`~passlib.ifc.PasswordHash.encrypt`, :meth:`~passlib.ifc.PasswordHash.genhash`, and :meth:`~passlib.ifc.PasswordHash.verify` methods + have the following extra keyword: + + :type user: str + :param user: + String containing name of user account this password is associated with. + This is required to properly calculate the hash. + + This keyword is case-insensitive, and should contain just the username + (e.g. ``Administrator``, not ``SOMEDOMAIN\\Administrator``). + """ + name = "msdcc2" + checksum_chars = uh.HEX_CHARS + checksum_size = 32 + + @classmethod + def _norm_hash(cls, hash): + return hash.lower() + + def _calc_checksum(self, secret): + return hexlify(self.raw(secret, self.user)).decode("ascii") + + @classmethod + def raw(cls, secret, user): + """encode password using msdcc v2 algorithm + + :type secret: unicode or utf-8 bytes + :arg secret: secret + + :type user: str + :arg user: username to use as salt + + :returns: returns string of raw bytes + """ + from passlib.utils.pbkdf2 import pbkdf2 + secret = to_unicode(secret, "utf-8", param="secret").encode("utf-16-le") + user = to_unicode(user, "utf-8", param="user").lower().encode("utf-16-le") + tmp = md4(md4(secret).digest() + user).digest() + return pbkdf2(tmp, user, 10240, 16, 'hmac-sha1') + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/hash.py b/passlib/hash.py new file mode 100644 index 00000000..8f1b895f --- /dev/null +++ b/passlib/hash.py @@ -0,0 +1,28 @@ +"""passlib.hash - proxy object mapping hash scheme names -> handlers + +Note +==== +This module does not actually contain any hashes. This file +is a stub that replaces itself with a proxy object. + +This proxy object (passlib.registry._PasslibRegistryProxy) +handles lazy-loading hashes as they are requested. + +The actual implementation of the various hashes is store elsewhere, +mainly in the submodules of the ``passlib.handlers`` package. +""" + +# NOTE: could support 'non-lazy' version which just imports +# all schemes known to list_crypt_handlers() + +#============================================================================= +# import proxy object and replace this module +#============================================================================= + +from passlib.registry import _proxy +import sys +sys.modules[__name__] = _proxy + +#============================================================================= +# eoc +#============================================================================= diff --git a/passlib/hosts.py b/passlib/hosts.py new file mode 100644 index 00000000..f6eb0076 --- /dev/null +++ b/passlib/hosts.py @@ -0,0 +1,115 @@ +"""passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +# core +import sys +from warnings import warn +# pkg +from passlib.context import LazyCryptContext +from passlib.exc import PasslibRuntimeWarning +from passlib.registry import get_crypt_handler +from passlib.utils import has_crypt, unix_crypt_schemes +# local +__all__ = [ + "linux_context", "linux2_context", + "openbsd_context", + "netbsd_context", + "freebsd_context", + "host_context", +] + +#============================================================================= +# linux support +#============================================================================= + +# known platform names - linux2 + +linux_context = linux2_context = LazyCryptContext( + schemes = [ "sha512_crypt", "sha256_crypt", "md5_crypt", + "des_crypt", "unix_disabled" ], + deprecated = [ "des_crypt" ], + ) + +#============================================================================= +# bsd support +#============================================================================= + +# known platform names - +# freebsd2 +# freebsd3 +# freebsd4 +# freebsd5 +# freebsd6 +# freebsd7 +# +# netbsd1 + +# referencing source via -http://fxr.googlebit.com +# freebsd 6,7,8 - des, md5, bcrypt, bsd_nthash +# netbsd - des, ext, md5, bcrypt, sha1 +# openbsd - des, ext, md5, bcrypt + +freebsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsd_nthash", + "des_crypt", "unix_disabled"]) + +openbsd_context = LazyCryptContext(["bcrypt", "md5_crypt", "bsdi_crypt", + "des_crypt", "unix_disabled"]) + +netbsd_context = LazyCryptContext(["bcrypt", "sha1_crypt", "md5_crypt", + "bsdi_crypt", "des_crypt", "unix_disabled"]) + +# XXX: include darwin in this list? it's got a BSD crypt variant, +# but that's not what it uses for user passwords. + +#============================================================================= +# current host +#============================================================================= +if has_crypt: + # NOTE: this is basically mimicing the output of os crypt(), + # except that it uses passlib's (usually stronger) defaults settings, + # and can be introspected and used much more flexibly. + + def _iter_os_crypt_schemes(): + "helper which iterates over supported os_crypt schemes" + found = False + for name in unix_crypt_schemes: + handler = get_crypt_handler(name) + if handler.has_backend("os_crypt"): + found = True + yield name + if found: + # only offer disabled handler if there's another scheme in front, + # as this can't actually hash any passwords + yield "unix_disabled" + else: # pragma: no cover -- sanity check + # no idea what OS this could happen on... + warn("crypt.crypt() function is present, but doesn't support any " + "formats known to passlib!", PasslibRuntimeWarning) + + host_context = LazyCryptContext(_iter_os_crypt_schemes()) + +#============================================================================= +# other platforms +#============================================================================= + +# known platform strings - +# aix3 +# aix4 +# atheos +# beos5 +# darwin +# generic +# hp-ux11 +# irix5 +# irix6 +# mac +# next3 +# os2emx +# riscos +# sunos5 +# unixware7 + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/ifc.py b/passlib/ifc.py new file mode 100644 index 00000000..908890aa --- /dev/null +++ b/passlib/ifc.py @@ -0,0 +1,193 @@ +"""passlib.ifc - abstract interfaces used by Passlib""" +#============================================================================= +# imports +#============================================================================= +# core +import logging; log = logging.getLogger(__name__) +import sys +# site +# pkg +# local +__all__ = [ + "PasswordHash", +] + +#============================================================================= +# 2.5-3.2 compatibility helpers +#============================================================================= +if sys.version_info >= (2,6): + from abc import ABCMeta, abstractmethod, abstractproperty +else: + # create stub for python 2.5 + ABCMeta = type + def abstractmethod(func): + return func +# def abstractproperty(): +# return None + +def create_with_metaclass(meta): + "class decorator that re-creates class using metaclass" + # have to do things this way since abc not present in py25, + # and py2/py3 have different ways of doing metaclasses. + def builder(cls): + if meta is type(cls): + return cls + return meta(cls.__name__, cls.__bases__, cls.__dict__.copy()) + return builder + +#============================================================================= +# PasswordHash interface +#============================================================================= +class PasswordHash(object): + """This class describes an abstract interface which all password hashes + in Passlib adhere to. Under Python 2.6 and up, this is an actual + Abstract Base Class built using the :mod:`!abc` module. + + See the Passlib docs for full documentation. + """ + #=================================================================== + # class attributes + #=================================================================== + + #--------------------------------------------------------------- + # general information + #--------------------------------------------------------------- + ##name + ##setting_kwds + ##context_kwds + + #--------------------------------------------------------------- + # salt information -- if 'salt' in setting_kwds + #--------------------------------------------------------------- + ##min_salt_size + ##max_salt_size + ##default_salt_size + ##salt_chars + ##default_salt_chars + + #--------------------------------------------------------------- + # rounds information -- if 'rounds' in setting_kwds + #--------------------------------------------------------------- + ##min_rounds + ##max_rounds + ##default_rounds + ##rounds_cost + + #--------------------------------------------------------------- + # encoding info -- if 'encoding' in context_kwds + #--------------------------------------------------------------- + ##default_encoding + + #=================================================================== + # primary methods + #=================================================================== + @classmethod + @abstractmethod + def encrypt(cls, secret, **setting_and_context_kwds): # pragma: no cover -- abstract method + "encrypt secret, returning resulting hash" + raise NotImplementedError("must be implemented by subclass") + + @classmethod + @abstractmethod + def verify(cls, secret, hash, **context_kwds): # pragma: no cover -- abstract method + "verify secret against hash, returns True/False" + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # additional methods + #=================================================================== + @classmethod + @abstractmethod + def identify(cls, hash): # pragma: no cover -- abstract method + "check if hash belongs to this scheme, returns True/False" + raise NotImplementedError("must be implemented by subclass") + + @classmethod + @abstractmethod + def genconfig(cls, **setting_kwds): # pragma: no cover -- abstract method + "compile settings into a configuration string for genhash()" + raise NotImplementedError("must be implemented by subclass") + + @classmethod + @abstractmethod + def genhash(cls, secret, config, **context_kwds): # pragma: no cover -- abstract method + "generated hash for secret, using settings from config/hash string" + raise NotImplementedError("must be implemented by subclass") + + #=================================================================== + # undocumented methods / attributes + #=================================================================== + # the following entry points are used internally by passlib, + # and aren't documented as part of the exposed interface. + # they are subject to change between releases, + # but are documented here so there's a list of them *somewhere*. + + #--------------------------------------------------------------- + # checksum information - defined for many hashes + #--------------------------------------------------------------- + ## checksum_chars + ## checksum_size + + #--------------------------------------------------------------- + # CryptContext flags + #--------------------------------------------------------------- + + # hack for bsdi_crypt: if True, causes CryptContext to only generate + # odd rounds values. assumed False if not defined. + ## _avoid_even_rounds = False + + ##@classmethod + ##def _bind_needs_update(cls, **setting_kwds): + ## """return helper to detect hashes that need updating. + ## + ## if this method is defined, the CryptContext constructor + ## will invoke it with the settings specified for the context. + ## this method should return either ``None``, or a callable + ## with the signature ``needs_update(hash,secret)->bool``. + ## + ## this ``needs_update`` function should return True if the hash + ## should be re-encrypted, whether due to internal + ## issues or the specified settings. + ## + ## CryptContext will automatically take care of deprecating + ## hashes with insufficient rounds for classes which define fromstring() + ## and a rounds attribute - though the requirements for this last + ## part may change at some point. + ## """ + + #--------------------------------------------------------------- + # experimental methods + #--------------------------------------------------------------- + + ##@classmethod + ##def normhash(cls, hash): + ## """helper to clean up non-canonic instances of hash. + ## currently only provided by bcrypt() to fix an historical passlib issue. + ## """ + + # experimental helper to parse hash into components. + ##@classmethod + ##def parsehash(cls, hash, checksum=True, sanitize=False): + ## """helper to parse hash into components, returns dict""" + + # experiment helper to estimate bitsize of different hashes, + # implement for GenericHandler, but may be currently be off for some hashes. + # want to expand this into a way to programmatically compare + # "strengths" of different hashes and hash algorithms. + # still needs to have some factor for estimate relative cost per round, + # ala in the style of the scrypt whitepaper. + ##@classmethod + ##def bitsize(cls, **kwds): + ## """returns dict mapping component -> bits contributed. + ## components currently include checksum, salt, rounds. + ## """ + + #=================================================================== + # eoc + #=================================================================== + +PasswordHash = create_with_metaclass(ABCMeta)(PasswordHash) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/registry.py b/passlib/registry.py new file mode 100644 index 00000000..938bc5ec --- /dev/null +++ b/passlib/registry.py @@ -0,0 +1,411 @@ +"""passlib.registry - registry for password hash handlers""" +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +from warnings import warn +# pkg +from passlib.exc import ExpectedTypeError, PasslibWarning +from passlib.utils import is_crypt_handler +# local +__all__ = [ + "register_crypt_handler_path", + "register_crypt_handler", + "get_crypt_handler", + "list_crypt_handlers", +] + +#============================================================================= +# proxy object used in place of 'passlib.hash' module +#============================================================================= +class _PasslibRegistryProxy(object): + """proxy module passlib.hash + + this module is in fact an object which lazy-loads + the requested password hash algorithm from wherever it has been stored. + it acts as a thin wrapper around :func:`passlib.registry.get_crypt_handler`. + """ + __name__ = "passlib.hash" + __package__ = None + + def __getattr__(self, attr): + if attr.startswith("_"): + raise AttributeError("missing attribute: %r" % (attr,)) + handler = get_crypt_handler(attr, None) + if handler: + return handler + else: + raise AttributeError("unknown password hash: %r" % (attr,)) + + def __setattr__(self, attr, value): + if attr.startswith("_"): + # writing to private attributes should behave normally. + # (required so GAE can write to the __loader__ attribute). + object.__setattr__(self, attr, value) + else: + # writing to public attributes should be treated + # as attempting to register a handler. + register_crypt_handler(value, _attr=attr) + + def __repr__(self): + return "" + + def __dir__(self): + # this adds in lazy-loaded handler names, + # otherwise this is the standard dir() implementation. + attrs = set(dir(self.__class__)) + attrs.update(self.__dict__) + attrs.update(_locations) + return sorted(attrs) + +# create single instance - available publically as 'passlib.hash' +_proxy = _PasslibRegistryProxy() + +#============================================================================= +# internal registry state +#============================================================================= + +# singleton uses to detect omitted keywords +_UNSET = object() + +# dict mapping name -> loaded handlers (just uses proxy object's internal dict) +_handlers = _proxy.__dict__ + +# dict mapping names -> import path for lazy loading. +# * import path should be "module.path" or "module.path:attr" +# * if attr omitted, "name" used as default. +_locations = dict( + # NOTE: this is a hardcoded list of the handlers built into passlib, + # applications should call register_crypt_handler_path() + apr_md5_crypt = "passlib.handlers.md5_crypt", + atlassian_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + bcrypt = "passlib.handlers.bcrypt", + bcrypt_sha256 = "passlib.handlers.bcrypt", + bigcrypt = "passlib.handlers.des_crypt", + bsd_nthash = "passlib.handlers.windows", + bsdi_crypt = "passlib.handlers.des_crypt", + cisco_pix = "passlib.handlers.cisco", + cisco_type7 = "passlib.handlers.cisco", + cta_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + crypt16 = "passlib.handlers.des_crypt", + des_crypt = "passlib.handlers.des_crypt", + django_bcrypt = "passlib.handlers.django", + django_bcrypt_sha256 = "passlib.handlers.django", + django_pbkdf2_sha256 = "passlib.handlers.django", + django_pbkdf2_sha1 = "passlib.handlers.django", + django_salted_sha1 = "passlib.handlers.django", + django_salted_md5 = "passlib.handlers.django", + django_des_crypt = "passlib.handlers.django", + django_disabled = "passlib.handlers.django", + dlitz_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + fshp = "passlib.handlers.fshp", + grub_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + hex_md4 = "passlib.handlers.digests", + hex_md5 = "passlib.handlers.digests", + hex_sha1 = "passlib.handlers.digests", + hex_sha256 = "passlib.handlers.digests", + hex_sha512 = "passlib.handlers.digests", + htdigest = "passlib.handlers.digests", + ldap_plaintext = "passlib.handlers.ldap_digests", + ldap_md5 = "passlib.handlers.ldap_digests", + ldap_sha1 = "passlib.handlers.ldap_digests", + ldap_hex_md5 = "passlib.handlers.roundup", + ldap_hex_sha1 = "passlib.handlers.roundup", + ldap_salted_md5 = "passlib.handlers.ldap_digests", + ldap_salted_sha1 = "passlib.handlers.ldap_digests", + ldap_des_crypt = "passlib.handlers.ldap_digests", + ldap_bsdi_crypt = "passlib.handlers.ldap_digests", + ldap_md5_crypt = "passlib.handlers.ldap_digests", + ldap_bcrypt = "passlib.handlers.ldap_digests", + ldap_sha1_crypt = "passlib.handlers.ldap_digests", + ldap_sha256_crypt = "passlib.handlers.ldap_digests", + ldap_sha512_crypt = "passlib.handlers.ldap_digests", + ldap_pbkdf2_sha1 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha256 = "passlib.handlers.pbkdf2", + ldap_pbkdf2_sha512 = "passlib.handlers.pbkdf2", + lmhash = "passlib.handlers.windows", + md5_crypt = "passlib.handlers.md5_crypt", + msdcc = "passlib.handlers.windows", + msdcc2 = "passlib.handlers.windows", + mssql2000 = "passlib.handlers.mssql", + mssql2005 = "passlib.handlers.mssql", + mysql323 = "passlib.handlers.mysql", + mysql41 = "passlib.handlers.mysql", + nthash = "passlib.handlers.windows", + oracle10 = "passlib.handlers.oracle", + oracle11 = "passlib.handlers.oracle", + pbkdf2_sha1 = "passlib.handlers.pbkdf2", + pbkdf2_sha256 = "passlib.handlers.pbkdf2", + pbkdf2_sha512 = "passlib.handlers.pbkdf2", + phpass = "passlib.handlers.phpass", + plaintext = "passlib.handlers.misc", + postgres_md5 = "passlib.handlers.postgres", + roundup_plaintext = "passlib.handlers.roundup", + scram = "passlib.handlers.scram", + sha1_crypt = "passlib.handlers.sha1_crypt", + sha256_crypt = "passlib.handlers.sha2_crypt", + sha512_crypt = "passlib.handlers.sha2_crypt", + sun_md5_crypt = "passlib.handlers.sun_md5_crypt", + unix_disabled = "passlib.handlers.misc", + unix_fallback = "passlib.handlers.misc", +) + +# master regexp for detecting valid handler names +_name_re = re.compile("^[a-z][a-z0-9_]+[a-z0-9]$") + +# names which aren't allowed for various reasons +# (mainly keyword conflicts in CryptContext) +_forbidden_names = frozenset(["onload", "policy", "context", "all", + "default", "none", "auto"]) + +#============================================================================= +# registry frontend functions +#============================================================================= +def _validate_handler_name(name): + """helper to validate handler name + + :raises ValueError: + * if empty name + * if name not lower case + * if name contains double underscores + * if name is reserved (e.g. ``context``, ``all``). + """ + if not name: + raise ValueError("handler name cannot be empty: %r" % (name,)) + if name.lower() != name: + raise ValueError("name must be lower-case: %r" % (name,)) + if not _name_re.match(name): + raise ValueError("invalid name (must be 3+ characters, " + " begin with a-z, and contain only underscore, a-z, " + "0-9): %r" % (name,)) + if '__' in name: + raise ValueError("name may not contain double-underscores: %r" % + (name,)) + if name in _forbidden_names: + raise ValueError("that name is not allowed: %r" % (name,)) + return True + +def register_crypt_handler_path(name, path): + """register location to lazy-load handler when requested. + + custom hashes may be registered via :func:`register_crypt_handler`, + or they may be registered by this function, + which will delay actually importing and loading the handler + until a call to :func:`get_crypt_handler` is made for the specified name. + + :arg name: name of handler + :arg path: module import path + + the specified module path should contain a password hash handler + called :samp:`{name}`, or the path may contain a colon, + specifying the module and module attribute to use. + for example, the following would cause ``get_handler("myhash")`` to look + for a class named ``myhash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers") + + ...while this form would cause ``get_handler("myhash")`` to look + for a class name ``MyHash`` within the ``myapp.helpers`` module:: + + >>> from passlib.registry import registry_crypt_handler_path + >>> registry_crypt_handler_path("myhash", "myapp.helpers:MyHash") + """ + # validate name + _validate_handler_name(name) + + # validate path + if path.startswith("."): + raise ValueError("path cannot start with '.'") + if ':' in path: + if path.count(':') > 1: + raise ValueError("path cannot have more than one ':'") + if path.find('.', path.index(':')) > -1: + raise ValueError("path cannot have '.' to right of ':'") + + # store location + _locations[name] = path + log.debug("registered path to %r handler: %r", name, path) + +def register_crypt_handler(handler, force=False, _attr=None): + """register password hash handler. + + this method immediately registers a handler with the internal passlib registry, + so that it will be returned by :func:`get_crypt_handler` when requested. + + :arg handler: the password hash handler to register + :param force: force override of existing handler (defaults to False) + :param _attr: + [internal kwd] if specified, ensures ``handler.name`` + matches this value, or raises :exc:`ValueError`. + + :raises TypeError: + if the specified object does not appear to be a valid handler. + + :raises ValueError: + if the specified object's name (or other required attributes) + contain invalid values. + + :raises KeyError: + if a (different) handler was already registered with + the same name, and ``force=True`` was not specified. + """ + # validate handler + if not is_crypt_handler(handler): + raise ExpectedTypeError(handler, "password hash handler", "handler") + if not handler: + raise AssertionError("``bool(handler)`` must be True") + + # validate name + name = handler.name + _validate_handler_name(name) + if _attr and _attr != name: + raise ValueError("handlers must be stored only under their own name") + + # check for existing handler + other = _handlers.get(name) + if other: + if other is handler: + log.debug("same %r handler already registered: %r", name, handler) + return + elif force: + log.warning("overriding previously registered %r handler: %r", + name, other) + else: + raise KeyError("another %r handler has already been registered: %r" % + (name, other)) + + # register handler + _handlers[name] = handler + log.debug("registered %r handler: %r", name, handler) + +def get_crypt_handler(name, default=_UNSET): + """return handler for specified password hash scheme. + + this method looks up a handler for the specified scheme. + if the handler is not already loaded, + it checks if the location is known, and loads it first. + + :arg name: name of handler to return + :param default: optional default value to return if no handler with specified name is found. + + :raises KeyError: if no handler matching that name is found, and no default specified, a KeyError will be raised. + + :returns: handler attached to name, or default value (if specified). + """ + # catch invalid names before we check _handlers, + # since it's a module dict, and exposes things like __package__, etc. + if name.startswith("_"): + if default is _UNSET: + raise KeyError("invalid handler name: %r" % (name,)) + else: + return default + + # check if handler is already loaded + try: + return _handlers[name] + except KeyError: + pass + + # normalize name (and if changed, check dict again) + assert isinstance(name, str), "name must be str instance" + alt = name.replace("-","_").lower() + if alt != name: + warn("handler names should be lower-case, and use underscores instead " + "of hyphens: %r => %r" % (name, alt), PasslibWarning, + stacklevel=2) + name = alt + + # try to load using new name + try: + return _handlers[name] + except KeyError: + pass + + # check if lazy load mapping has been specified for this driver + path = _locations.get(name) + if path: + if ':' in path: + modname, modattr = path.split(":") + else: + modname, modattr = path, name + ##log.debug("loading %r handler from path: '%s:%s'", name, modname, modattr) + + # try to load the module - any import errors indicate runtime config, usually + # either missing package, or bad path provided to register_crypt_handler_path() + mod = __import__(modname, fromlist=[modattr], level=0) + + # first check if importing module triggered register_crypt_handler(), + # (this is discouraged due to it's magical implicitness) + handler = _handlers.get(name) + if handler: + # XXX: issue deprecation warning here? + assert is_crypt_handler(handler), "unexpected object: name=%r object=%r" % (name, handler) + return handler + + # then get real handler & register it + handler = getattr(mod, modattr) + register_crypt_handler(handler, _attr=name) + return handler + + # fail! + if default is _UNSET: + raise KeyError("no crypt handler found for algorithm: %r" % (name,)) + else: + return default + +def list_crypt_handlers(loaded_only=False): + """return sorted list of all known crypt handler names. + + :param loaded_only: if ``True``, only returns names of handlers which have actually been loaded. + + :returns: list of names of all known handlers + """ + names = set(_handlers) + if not loaded_only: + names.update(_locations) + # strip private attrs out of namespace and sort. + # TODO: make _handlers a separate list, so we don't have module namespace mixed in. + return sorted(name for name in names if not name.startswith("_")) + +# NOTE: these two functions mainly exist just for the unittests... + +def _has_crypt_handler(name, loaded_only=False): + """check if handler name is known. + + this is only useful for two cases: + + * quickly checking if handler has already been loaded + * checking if handler exists, without actually loading it + + :arg name: name of handler + :param loaded_only: if ``True``, returns False if handler exists but hasn't been loaded + """ + return (name in _handlers) or (not loaded_only and name in _locations) + +def _unload_handler_name(name, locations=True): + """unloads a handler from the registry. + + .. warning:: + + this is an internal function, + used only by the unittests. + + if loaded handler is found with specified name, it's removed. + if path to lazy load handler is found, its' removed. + + missing names are a noop. + + :arg name: name of handler to unload + :param locations: if False, won't purge registered handler locations (default True) + """ + if name in _handlers: + del _handlers[name] + if locations and name in _locations: + del _locations[name] + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/__init__.py b/passlib/tests/__init__.py new file mode 100644 index 00000000..389da76e --- /dev/null +++ b/passlib/tests/__init__.py @@ -0,0 +1 @@ +"""passlib tests""" diff --git a/passlib/tests/__main__.py b/passlib/tests/__main__.py new file mode 100644 index 00000000..24245768 --- /dev/null +++ b/passlib/tests/__main__.py @@ -0,0 +1,6 @@ +import os +from nose import run +run( + defaultTest=os.path.dirname(__file__), +) + diff --git a/passlib/tests/_test_bad_register.py b/passlib/tests/_test_bad_register.py new file mode 100644 index 00000000..26cc6bbb --- /dev/null +++ b/passlib/tests/_test_bad_register.py @@ -0,0 +1,15 @@ +"helper for method in test_registry.py" +from passlib.registry import register_crypt_handler +import passlib.utils.handlers as uh + +class dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +class alt_dummy_bad(uh.StaticHandler): + name = "dummy_bad" + +# NOTE: if passlib.tests is being run from symlink (e.g. via gaeunit), +# this module may be imported a second time as test._test_bad_registry. +# we don't want it to do anything in that case. +if __name__.startswith("passlib.tests"): + register_crypt_handler(alt_dummy_bad) diff --git a/passlib/tests/backports.py b/passlib/tests/backports.py new file mode 100644 index 00000000..58ce18f8 --- /dev/null +++ b/passlib/tests/backports.py @@ -0,0 +1,329 @@ +"""backports of needed unittest2 features""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import sys +##from warnings import warn +# site +# pkg +from passlib.utils.compat import base_string_types +# local +__all__ = [ + "TestCase", + "skip", "skipIf", "skipUnless" + "catch_warnings", +] + +#============================================================================= +# import latest unittest module available +#============================================================================= +try: + import unittest2 as unittest + ut_version = 2 +except ImportError: + import unittest + if sys.version_info < (2,7) or (3,0) <= sys.version_info < (3,2): + # older versions of python will need to install the unittest2 + # backport (named unittest2_3k for 3.0/3.1) + ##warn("please install unittest2 for python %d.%d, it will be required " + ## "as of passlib 1.x" % sys.version_info[:2]) + ut_version = 1 + else: + ut_version = 2 + +#============================================================================= +# backport SkipTest support using nose +#============================================================================= +if ut_version < 2: + # used to provide replacement SkipTest() error + from nose.plugins.skip import SkipTest + + # hack up something to simulate skip() decorator + import functools + def skip(reason): + def decorator(test_item): + if isinstance(test_item, type) and issubclass(test_item, unittest.TestCase): + class skip_wrapper(test_item): + def setUp(self): + raise SkipTest(reason) + else: + @functools.wraps(test_item) + def skip_wrapper(*args, **kwargs): + raise SkipTest(reason) + return skip_wrapper + return decorator + + def skipIf(condition, reason): + if condition: + return skip(reason) + else: + return lambda item: item + + def skipUnless(condition, reason): + if condition: + return lambda item: item + else: + return skip(reason) + +else: + skip = unittest.skip + skipIf = unittest.skipIf + skipUnless = unittest.skipUnless + +#============================================================================= +# custom test harness +#============================================================================= +class TestCase(unittest.TestCase): + """backports a number of unittest2 features in TestCase""" + #=================================================================== + # backport some methods from unittest2 + #=================================================================== + if ut_version < 2: + + #---------------------------------------------------------------- + # simplistic backport of addCleanup() framework + #---------------------------------------------------------------- + _cleanups = None + + def addCleanup(self, function, *args, **kwds): + queue = self._cleanups + if queue is None: + queue = self._cleanups = [] + queue.append((function, args, kwds)) + + def doCleanups(self): + queue = self._cleanups + while queue: + func, args, kwds = queue.pop() + func(*args, **kwds) + + def tearDown(self): + self.doCleanups() + unittest.TestCase.tearDown(self) + + #---------------------------------------------------------------- + # backport skipTest (requires nose to work) + #---------------------------------------------------------------- + def skipTest(self, reason): + raise SkipTest(reason) + + #---------------------------------------------------------------- + # backport various assert tests added in unittest2 + #---------------------------------------------------------------- + def assertIs(self, real, correct, msg=None): + if real is not correct: + std = "got %r, expected would be %r" % (real, correct) + msg = self._formatMessage(msg, std) + raise self.failureException(msg) + + def assertIsNot(self, real, correct, msg=None): + if real is correct: + std = "got %r, expected would not be %r" % (real, correct) + msg = self._formatMessage(msg, std) + raise self.failureException(msg) + + def assertIsInstance(self, obj, klass, msg=None): + if not isinstance(obj, klass): + std = "got %r, expected instance of %r" % (obj, klass) + msg = self._formatMessage(msg, std) + raise self.failureException(msg) + + def assertAlmostEqual(self, first, second, places=None, msg=None, delta=None): + """Fail if the two objects are unequal as determined by their + difference rounded to the given number of decimal places + (default 7) and comparing to zero, or by comparing that the + between the two objects is more than the given delta. + + Note that decimal places (from zero) are usually not the same + as significant digits (measured from the most signficant digit). + + If the two objects compare equal then they will automatically + compare almost equal. + """ + if first == second: + # shortcut + return + if delta is not None and places is not None: + raise TypeError("specify delta or places not both") + + if delta is not None: + if abs(first - second) <= delta: + return + + standardMsg = '%s != %s within %s delta' % (repr(first), + repr(second), + repr(delta)) + else: + if places is None: + places = 7 + + if round(abs(second-first), places) == 0: + return + + standardMsg = '%s != %s within %r places' % (repr(first), + repr(second), + places) + msg = self._formatMessage(msg, standardMsg) + raise self.failureException(msg) + + def assertLess(self, left, right, msg=None): + if left >= right: + std = "%r not less than %r" % (left, right) + raise self.failureException(self._formatMessage(msg, std)) + + def assertGreater(self, left, right, msg=None): + if left <= right: + std = "%r not greater than %r" % (left, right) + raise self.failureException(self._formatMessage(msg, std)) + + def assertGreaterEqual(self, left, right, msg=None): + if left < right: + std = "%r less than %r" % (left, right) + raise self.failureException(self._formatMessage(msg, std)) + + def assertIn(self, elem, container, msg=None): + if elem not in container: + std = "%r not found in %r" % (elem, container) + raise self.failureException(self._formatMessage(msg, std)) + + def assertNotIn(self, elem, container, msg=None): + if elem in container: + std = "%r unexpectedly in %r" % (elem, container) + raise self.failureException(self._formatMessage(msg, std)) + + #---------------------------------------------------------------- + # override some unittest1 methods to support _formatMessage + #---------------------------------------------------------------- + def assertEqual(self, real, correct, msg=None): + if real != correct: + std = "got %r, expected would equal %r" % (real, correct) + msg = self._formatMessage(msg, std) + raise self.failureException(msg) + + def assertNotEqual(self, real, correct, msg=None): + if real == correct: + std = "got %r, expected would not equal %r" % (real, correct) + msg = self._formatMessage(msg, std) + raise self.failureException(msg) + + #--------------------------------------------------------------- + # backport assertRegex() alias from 3.2 to 2.7/3.1 + #--------------------------------------------------------------- + if not hasattr(unittest.TestCase, "assertRegex"): + if hasattr(unittest.TestCase, "assertRegexpMatches"): + # was present in 2.7/3.1 under name assertRegexpMatches + assertRegex = unittest.TestCase.assertRegexpMatches + else: + # 3.0 and <= 2.6 didn't have this method at all + def assertRegex(self, text, expected_regex, msg=None): + """Fail the test unless the text matches the regular expression.""" + if isinstance(expected_regex, base_string_types): + assert expected_regex, "expected_regex must not be empty." + expected_regex = re.compile(expected_regex) + if not expected_regex.search(text): + msg = msg or "Regex didn't match: " + std = '%r not found in %r' % (msg, expected_regex.pattern, text) + raise self.failureException(self._formatMessage(msg, std)) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# backport catch_warnings +#============================================================================= +try: + from warnings import catch_warnings +except ImportError: + # catch_warnings wasn't added until py26. + # this adds backported copy from py26's stdlib + # so we can use it under py25. + + class WarningMessage(object): + + """Holds the result of a single showwarning() call.""" + + _WARNING_DETAILS = ("message", "category", "filename", "lineno", "file", + "line") + + def __init__(self, message, category, filename, lineno, file=None, + line=None): + local_values = locals() + for attr in self._WARNING_DETAILS: + setattr(self, attr, local_values[attr]) + self._category_name = category.__name__ if category else None + + def __str__(self): + return ("{message : %r, category : %r, filename : %r, lineno : %s, " + "line : %r}" % (self.message, self._category_name, + self.filename, self.lineno, self.line)) + + + class catch_warnings(object): + + """A context manager that copies and restores the warnings filter upon + exiting the context. + + The 'record' argument specifies whether warnings should be captured by a + custom implementation of warnings.showwarning() and be appended to a list + returned by the context manager. Otherwise None is returned by the context + manager. The objects appended to the list are arguments whose attributes + mirror the arguments to showwarning(). + + The 'module' argument is to specify an alternative module to the module + named 'warnings' and imported under that name. This argument is only useful + when testing the warnings module itself. + + """ + + def __init__(self, record=False, module=None): + """Specify whether to record warnings and if an alternative module + should be used other than sys.modules['warnings']. + + For compatibility with Python 3.0, please consider all arguments to be + keyword-only. + + """ + self._record = record + self._module = sys.modules['warnings'] if module is None else module + self._entered = False + + def __repr__(self): + args = [] + if self._record: + args.append("record=True") + if self._module is not sys.modules['warnings']: + args.append("module=%r" % self._module) + name = type(self).__name__ + return "%s(%s)" % (name, ", ".join(args)) + + def __enter__(self): + if self._entered: + raise RuntimeError("Cannot enter %r twice" % self) + self._entered = True + self._filters = self._module.filters + self._module.filters = self._filters[:] + self._showwarning = self._module.showwarning + if self._record: + log = [] + def showwarning(*args, **kwargs): +# self._showwarning(*args, **kwargs) + log.append(WarningMessage(*args, **kwargs)) + self._module.showwarning = showwarning + return log + else: + return None + + def __exit__(self, *exc_info): + if not self._entered: + raise RuntimeError("Cannot exit %r without entering first" % self) + self._module.filters = self._filters + self._module.showwarning = self._showwarning + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/sample1.cfg b/passlib/tests/sample1.cfg new file mode 100644 index 00000000..c90ba83b --- /dev/null +++ b/passlib/tests/sample1.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25000 +bsdi_crypt__max_rounds = 30000 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/passlib/tests/sample1b.cfg b/passlib/tests/sample1b.cfg new file mode 100644 index 00000000..c90ba83b --- /dev/null +++ b/passlib/tests/sample1b.cfg @@ -0,0 +1,9 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25000 +bsdi_crypt__max_rounds = 30000 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + diff --git a/passlib/tests/sample1c.cfg b/passlib/tests/sample1c.cfg new file mode 100644 index 00000000..c58ce0e3 Binary files /dev/null and b/passlib/tests/sample1c.cfg differ diff --git a/passlib/tests/sample_config_1s.cfg b/passlib/tests/sample_config_1s.cfg new file mode 100644 index 00000000..495a13ea --- /dev/null +++ b/passlib/tests/sample_config_1s.cfg @@ -0,0 +1,8 @@ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 diff --git a/passlib/tests/test_apache.py b/passlib/tests/test_apache.py new file mode 100644 index 00000000..68a992ff --- /dev/null +++ b/passlib/tests/test_apache.py @@ -0,0 +1,564 @@ +"""tests for passlib.apache -- (c) Assurance Technologies 2008-2011""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +from logging import getLogger +import os +import time +# site +# pkg +from passlib import apache +from passlib.utils.compat import irange, unicode +from passlib.tests.utils import TestCase, get_file, set_file, catch_warnings, ensure_mtime_changed +from passlib.utils.compat import b, bytes, u +# module +log = getLogger(__name__) + +def backdate_file_mtime(path, offset=10): + "backdate file's mtime by specified amount" + # NOTE: this is used so we can test code which detects mtime changes, + # without having to actually *pause* for that long. + atime = os.path.getatime(path) + mtime = os.path.getmtime(path)-offset + os.utime(path, (atime, mtime)) + +#============================================================================= +# htpasswd +#============================================================================= +class HtpasswdFileTest(TestCase): + "test HtpasswdFile class" + descriptionPrefix = "HtpasswdFile" + + # sample with 4 users + sample_01 = b('user2:2CHkkwa2AtqGs\n' + 'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + 'user4:pass4\n' + 'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = b('user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\nuser4:pass4\n') + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = b('user2:pass2x\n' + 'user3:{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=\n' + 'user4:pass4\n' + 'user1:$apr1$t4tc7jTh$GPIWVUo8sQKJlUdV8V5vu0\n' + 'user5:pass5\n') + + # standalone sample with 8-bit username + sample_04_utf8 = b('user\xc3\xa6:2CHkkwa2AtqGs\n') + sample_04_latin1 = b('user\xe6:2CHkkwa2AtqGs\n') + + sample_dup = b('user1:pass1\nuser1:pass2\n') + + def test_00_constructor_autoload(self): + "test constructor autoload" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, path) + self.assertTrue(ht.mtime) + + # check changing path + ht.path = path + "x" + self.assertEqual(ht.path, path + "x") + self.assertFalse(ht.mtime) + + # check new=True + ht = apache.HtpasswdFile(path, new=True) + self.assertEqual(ht.to_string(), b("")) + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check autoload=False (deprecated alias for new=True) + with self.assertWarningList("``autoload=False`` is deprecated"): + ht = apache.HtpasswdFile(path, autoload=False) + self.assertEqual(ht.to_string(), b("")) + self.assertEqual(ht.path, path) + self.assertFalse(ht.mtime) + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtpasswdFile, path) + + # NOTE: "default_scheme" option checked via set_password() test, among others + + def test_00_from_path(self): + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile.from_path(path) + self.assertEqual(ht.to_string(), self.sample_01) + self.assertEqual(ht.path, None) + self.assertFalse(ht.mtime) + + def test_01_delete(self): + "test delete()" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1")) # should delete both entries + self.assertTrue(ht.delete("user2")) + self.assertFalse(ht.delete("user5")) # user not present + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:") + + def test_01_delete_autosave(self): + path = self.mktemp() + sample = b('user1:pass1\nuser2:pass2\n') + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.delete("user1") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, autosave=True) + ht.delete("user1") + self.assertEqual(get_file(path), b("user2:pass2\n")) + + def test_02_set_password(self): + "test set_password()" + ht = apache.HtpasswdFile.from_string( + self.sample_01, default_scheme="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # test legacy default kwd + with self.assertWarningList("``default`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_01, default="plaintext") + self.assertTrue(ht.set_password("user2", "pass2x")) + self.assertFalse(ht.set_password("user5", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + def test_02_set_password_autosave(self): + path = self.mktemp() + sample = b('user1:pass1\n') + set_file(path, sample) + + ht = apache.HtpasswdFile(path) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), sample) + + ht = apache.HtpasswdFile(path, default_scheme="plaintext", autosave=True) + ht.set_password("user1", "pass2") + self.assertEqual(get_file(path), b("user1:pass2\n")) + + def test_03_users(self): + "test users()" + ht = apache.HtpasswdFile.from_string(self.sample_01) + ht.set_password("user5", "pass5") + ht.delete("user3") + ht.set_password("user3", "pass3") + self.assertEqual(ht.users(), ["user2", "user4", "user1", "user5", + "user3"]) + + def test_04_check_password(self): + "test check_password()" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertRaises(TypeError, ht.check_password, 1, 'pass5') + self.assertTrue(ht.check_password("user5","pass5") is None) + for i in irange(1,5): + i = str(i) + self.assertTrue(ht.check_password("user"+i, "pass"+i)) + self.assertTrue(ht.check_password("user"+i, "pass5") is False) + + self.assertRaises(ValueError, ht.check_password, "user:", "pass") + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "pass1")) + self.assertFalse(ht.verify("user1", "pass2")) + + def test_05_load(self): + "test load()" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtpasswdFile(path, default_scheme="plaintext") + self.assertEqual(ha.to_string(), b("")) + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b("user1:pass1\n")) + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load() overwrites them + ha.set_password("user5", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtpasswdFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ dups and explicit path + set_file(path, self.sample_dup) + hc = apache.HtpasswdFile() + hc.load(path) + self.assertTrue(hc.check_password('user1','pass1')) + + # NOTE: load_string() tested via from_string(), which is used all over this file + + def test_06_save(self): + "test save()" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtpasswdFile(path) + + # make changes, check they saved + ht.delete("user1") + ht.delete("user2") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtpasswdFile(default_scheme="plaintext") + hb.set_password("user1", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), b("user1:pass1\n")) + + def test_07_encodings(self): + "test 'encoding' kwd" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtpasswdFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding="utf-8", + return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + # test deprecated encoding=None + with self.assertWarningList("``encoding=None`` is deprecated"): + ht = apache.HtpasswdFile.from_string(self.sample_04_utf8, encoding=None) + self.assertEqual(ht.users(), [ b('user\xc3\xa6') ]) + + # check sample latin-1 + ht = apache.HtpasswdFile.from_string(self.sample_04_latin1, + encoding="latin-1", return_unicode=True) + self.assertEqual(ht.users(), [ u("user\u00e6") ]) + + def test_08_get_hash(self): + "test get_hash()" + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3"), b("{SHA}3ipNV1GrBtxPmHFC21fCbVCSXIo=")) + self.assertEqual(ht.get_hash("user4"), b("pass4")) + self.assertEqual(ht.get_hash("user5"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4"), b("pass4")) + + def test_09_to_string(self): + "test to_string" + + # check with known sample + ht = apache.HtpasswdFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # test blank + ht = apache.HtpasswdFile() + self.assertEqual(ht.to_string(), b("")) + + def test_10_repr(self): + ht = apache.HtpasswdFile("fakepath", autosave=True, new=True, encoding="latin-1") + repr(ht) + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b('realm:user1:pass1\n')) + self.assertRaises(ValueError, apache.HtpasswdFile.from_string, + b('pass1\n')) + + def test_12_from_string(self): + # forbid path kwd + self.assertRaises(TypeError, apache.HtpasswdFile.from_string, + b(''), path=None) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# htdigest +#============================================================================= +class HtdigestFileTest(TestCase): + "test HtdigestFile class" + descriptionPrefix = "HtdigestFile" + + # sample with 4 users + sample_01 = b('user2:realm:549d2a5f4659ab39a80dac99e159ab19\n' + 'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + 'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + 'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n') + + # sample 1 with user 1, 2 deleted; 4 changed + sample_02 = b('user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + 'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n') + + # sample 1 with user2 updated, user 1 first entry removed, and user 5 added + sample_03 = b('user2:realm:5ba6d8328943c23c64b50f8b29566059\n' + 'user3:realm:a500bb8c02f6a9170ae46af10c898744\n' + 'user4:realm:ab7b5d5f28ccc7666315f508c7358519\n' + 'user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n' + 'user5:realm:03c55fdc6bf71552356ad401bdb9af19\n') + + # standalone sample with 8-bit username & realm + sample_04_utf8 = b('user\xc3\xa6:realm\xc3\xa6:549d2a5f4659ab39a80dac99e159ab19\n') + sample_04_latin1 = b('user\xe6:realm\xe6:549d2a5f4659ab39a80dac99e159ab19\n') + + def test_00_constructor_autoload(self): + "test constructor autoload" + # check with existing file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + self.assertEqual(ht.to_string(), self.sample_01) + + # check without autoload + ht = apache.HtdigestFile(path, new=True) + self.assertEqual(ht.to_string(), b("")) + + # check missing file + os.remove(path) + self.assertRaises(IOError, apache.HtdigestFile, path) + + # NOTE: default_realm option checked via other tests. + + def test_01_delete(self): + "test delete()" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.delete("user1", "realm")) + self.assertTrue(ht.delete("user2", "realm")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertEqual(ht.to_string(), self.sample_02) + + # invalid user + self.assertRaises(ValueError, ht.delete, "user:", "realm") + + # invalid realm + self.assertRaises(ValueError, ht.delete, "user", "realm:") + + def test_01_delete_autosave(self): + path = self.mktemp() + set_file(path, self.sample_01) + + ht = apache.HtdigestFile(path) + self.assertTrue(ht.delete("user1", "realm")) + self.assertFalse(ht.delete("user3", "realm5")) + self.assertFalse(ht.delete("user5", "realm")) + self.assertEqual(get_file(path), self.sample_01) + + ht.autosave = True + self.assertTrue(ht.delete("user2", "realm")) + self.assertEqual(get_file(path), self.sample_02) + + def test_02_set_password(self): + "test update()" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertTrue(ht.set_password("user2", "realm", "pass2x")) + self.assertFalse(ht.set_password("user5", "realm", "pass5")) + self.assertEqual(ht.to_string(), self.sample_03) + + # default realm + self.assertRaises(TypeError, ht.set_password, "user2", "pass3") + ht.default_realm = "realm2" + ht.set_password("user2", "pass3") + ht.check_password("user2", "realm2", "pass3") + + # invalid user + self.assertRaises(ValueError, ht.set_password, "user:", "realm", "pass") + self.assertRaises(ValueError, ht.set_password, "u"*256, "realm", "pass") + + # invalid realm + self.assertRaises(ValueError, ht.set_password, "user", "realm:", "pass") + self.assertRaises(ValueError, ht.set_password, "user", "r"*256, "pass") + + # test that legacy update() still works + with self.assertWarningList("update\(\) is deprecated"): + ht.update("user2", "realm2", "test") + self.assertTrue(ht.check_password("user2", "test")) + + # TODO: test set_password autosave + + def test_03_users(self): + "test users()" + ht = apache.HtdigestFile.from_string(self.sample_01) + ht.set_password("user5", "realm", "pass5") + ht.delete("user3", "realm") + ht.set_password("user3", "realm", "pass3") + self.assertEqual(ht.users("realm"), ["user2", "user4", "user1", "user5", "user3"]) + + self.assertRaises(TypeError, ht.users, 1) + + def test_04_check_password(self): + "test check_password()" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertRaises(TypeError, ht.check_password, 1, 'realm', 'pass5') + self.assertRaises(TypeError, ht.check_password, 'user', 1, 'pass5') + self.assertIs(ht.check_password("user5", "realm","pass5"), None) + for i in irange(1,5): + i = str(i) + self.assertTrue(ht.check_password("user"+i, "realm", "pass"+i)) + self.assertIs(ht.check_password("user"+i, "realm", "pass5"), False) + + # default realm + self.assertRaises(TypeError, ht.check_password, "user5", "pass5") + ht.default_realm = "realm" + self.assertTrue(ht.check_password("user1", "pass1")) + self.assertIs(ht.check_password("user5", "pass5"), None) + + # test that legacy verify() still works + with self.assertWarningList(["verify\(\) is deprecated"]*2): + self.assertTrue(ht.verify("user1", "realm", "pass1")) + self.assertFalse(ht.verify("user1", "realm", "pass2")) + + # invalid user + self.assertRaises(ValueError, ht.check_password, "user:", "realm", "pass") + + def test_05_load(self): + "test load()" + # setup empty file + path = self.mktemp() + set_file(path, "") + backdate_file_mtime(path, 5) + ha = apache.HtdigestFile(path) + self.assertEqual(ha.to_string(), b("")) + + # make changes, check load_if_changed() does nothing + ha.set_password("user1", "realm", "pass1") + ha.load_if_changed() + self.assertEqual(ha.to_string(), b('user1:realm:2a6cf53e7d8f8cf39d946dc880b14128\n')) + + # change file + set_file(path, self.sample_01) + ha.load_if_changed() + self.assertEqual(ha.to_string(), self.sample_01) + + # make changes, check load_if_changed overwrites them + ha.set_password("user5", "realm", "pass5") + ha.load() + self.assertEqual(ha.to_string(), self.sample_01) + + # test load w/ no path + hb = apache.HtdigestFile() + self.assertRaises(RuntimeError, hb.load) + self.assertRaises(RuntimeError, hb.load_if_changed) + + # test load w/ explicit path + hc = apache.HtdigestFile() + hc.load(path) + self.assertEqual(hc.to_string(), self.sample_01) + + # change file, test deprecated force=False kwd + ensure_mtime_changed(path) + set_file(path, "") + with self.assertWarningList(r"load\(force=False\) is deprecated"): + ha.load(force=False) + self.assertEqual(ha.to_string(), b("")) + + def test_06_save(self): + "test save()" + # load from file + path = self.mktemp() + set_file(path, self.sample_01) + ht = apache.HtdigestFile(path) + + # make changes, check they saved + ht.delete("user1", "realm") + ht.delete("user2", "realm") + ht.save() + self.assertEqual(get_file(path), self.sample_02) + + # test save w/ no path + hb = apache.HtdigestFile() + hb.set_password("user1", "realm", "pass1") + self.assertRaises(RuntimeError, hb.save) + + # test save w/ explicit path + hb.save(path) + self.assertEqual(get_file(path), hb.to_string()) + + def test_07_realms(self): + "test realms() & delete_realm()" + ht = apache.HtdigestFile.from_string(self.sample_01) + + self.assertEqual(ht.delete_realm("x"), 0) + self.assertEqual(ht.realms(), ['realm']) + + self.assertEqual(ht.delete_realm("realm"), 4) + self.assertEqual(ht.realms(), []) + self.assertEqual(ht.to_string(), b("")) + + def test_08_get_hash(self): + "test get_hash()" + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.get_hash("user3", "realm"), "a500bb8c02f6a9170ae46af10c898744") + self.assertEqual(ht.get_hash("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + self.assertEqual(ht.get_hash("user5", "realm"), None) + + with self.assertWarningList("find\(\) is deprecated"): + self.assertEqual(ht.find("user4", "realm"), "ab7b5d5f28ccc7666315f508c7358519") + + def test_09_encodings(self): + "test encoding parameter" + # test bad encodings cause failure in constructor + self.assertRaises(ValueError, apache.HtdigestFile, encoding="utf-16") + + # check sample utf-8 + ht = apache.HtdigestFile.from_string(self.sample_04_utf8, encoding="utf-8", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + # check sample latin-1 + ht = apache.HtdigestFile.from_string(self.sample_04_latin1, encoding="latin-1", return_unicode=True) + self.assertEqual(ht.realms(), [ u("realm\u00e6") ]) + self.assertEqual(ht.users(u("realm\u00e6")), [ u("user\u00e6") ]) + + def test_10_to_string(self): + "test to_string()" + + # check sample + ht = apache.HtdigestFile.from_string(self.sample_01) + self.assertEqual(ht.to_string(), self.sample_01) + + # check blank + ht = apache.HtdigestFile() + self.assertEqual(ht.to_string(), b("")) + + def test_11_malformed(self): + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b('realm:user1:pass1:other\n')) + self.assertRaises(ValueError, apache.HtdigestFile.from_string, + b('user1:pass1\n')) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_apps.py b/passlib/tests/test_apps.py new file mode 100644 index 00000000..421f83b0 --- /dev/null +++ b/passlib/tests/test_apps.py @@ -0,0 +1,128 @@ +"""test passlib.apps""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib import apps, hash as hashmod +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class AppsTest(TestCase): + "perform general tests to make sure contexts work" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def test_master_context(self): + ctx = apps.master_context + self.assertGreater(len(ctx.schemes()), 50) + + def test_custom_app_context(self): + ctx = apps.custom_app_context + self.assertEqual(ctx.schemes(), ("sha512_crypt", "sha256_crypt")) + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_django_context(self): + ctx = apps.django_context + for hash in [ + 'sha1$0d082$cdb462ae8b6be8784ef24b20778c4d0c82d5957f', + 'md5$b887a$37767f8a745af10612ad44c80ff52e92', + 'crypt$95a6d$95x74hLDQKXI2', + '098f6bcd4621d373cade4e832627b4f6', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertEqual(ctx.identify("!"), "django_disabled") + self.assertFalse(ctx.verify("test", "!")) + + def test_ldap_nocrypt_context(self): + ctx = apps.ldap_nocrypt_context + for hash in [ + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + self.assertIs(ctx.identify('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5' + 'n6$p4E.pdPBWx19OajgjLRiOW0itGnyxDGgMlDcOsfaI17'), None) + + def test_ldap_context(self): + ctx = apps.ldap_context + for hash in [ + ('{CRYPT}$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0' + 'itGnyxDGgMlDcOsfaI17'), + '{SSHA}cPusOzd6d5n3OjSVK3R329ZGCNyFcC7F', + 'test', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_ldap_mysql_context(self): + ctx = apps.mysql_context + for hash in [ + '*94BDCEBE19083CE2A1F959FD02F964C7AF4CFC29', + '378b243e220ca493', + ]: + self.assertTrue(ctx.verify("test", hash)) + + def test_postgres_context(self): + ctx = apps.postgres_context + hash = 'md55d9c68c6c50ed3d02a2fcf54f63993b6' + self.assertTrue(ctx.verify("test", hash, user='user')) + + def test_phppass_context(self): + ctx = apps.phpass_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + '_cD..aBxeRhYFJvtUvsI', + ]: + self.assertTrue(ctx.verify("test", hash)) + + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + self.assertEqual(ctx.default_scheme(), "bcrypt") + self.assertEqual(ctx.handler().name, "bcrypt") + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.assertEqual(ctx.default_scheme(), "phpass") + self.assertEqual(ctx.handler().name, "phpass") + + def test_phpbb3_context(self): + ctx = apps.phpbb3_context + for hash in [ + '$P$8Ja1vJsKa5qyy/b3mCJGXM7GyBnt6..', + '$H$8b95CoYQnQ9Y6fSTsACyphNh5yoM02.', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.assertTrue(ctx.encrypt("test").startswith("$H$")) + + def test_roundup_context(self): + ctx = apps.roundup_context + for hash in [ + '{PBKDF2}9849$JMTYu3eOUSoFYExprVVqbQ$N5.gV.uR1.BTgLSvi0qyPiRlGZ0', + '{SHA}a94a8fe5ccb19ba61c4c0873d391e987982fbbd3', + '{CRYPT}dptOmKDriOGfU', + '{plaintext}test', + ]: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_context.py b/passlib/tests/test_context.py new file mode 100644 index 00000000..cdd8746b --- /dev/null +++ b/passlib/tests/test_context.py @@ -0,0 +1,1623 @@ +"""tests for passlib.context""" +#============================================================================= +# imports +#============================================================================= +# core +from __future__ import with_statement +from passlib.utils.compat import PY3 +if PY3: + from configparser import NoSectionError +else: + from ConfigParser import NoSectionError +import hashlib +import logging; log = logging.getLogger(__name__) +import re +import os +import time +import warnings +import sys +# site +# pkg +from passlib import hash +from passlib.context import CryptContext, LazyCryptContext +from passlib.exc import PasslibConfigWarning +from passlib.utils import tick, to_bytes, to_unicode +from passlib.utils.compat import irange, u, unicode, str_to_uascii, PY2 +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase, catch_warnings, set_file, TICK_RESOLUTION, quicksleep +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + get_crypt_handler, + ) +# local +#============================================================================= +# support +#============================================================================= +here = os.path.abspath(os.path.dirname(__file__)) + +def merge_dicts(first, *args, **kwds): + target = first.copy() + for arg in args: + target.update(arg) + if kwds: + target.update(kwds) + return target + +#============================================================================= +# +#============================================================================= +class CryptContextTest(TestCase): + descriptionPrefix = "CryptContext" + + # TODO: these unittests could really use a good cleanup + # and reorganizing, to ensure they're getting everything. + + #=================================================================== + # sample configurations used in tests + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - typical configuration + #--------------------------------------------------------------- + sample_1_schemes = ["des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"] + sample_1_handlers = [get_crypt_handler(name) for name in sample_1_schemes] + + sample_1_dict = dict( + schemes = sample_1_schemes, + default = "md5_crypt", + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_1_resolved_dict = merge_dicts(sample_1_dict, + schemes = sample_1_handlers) + + sample_1_unnormalized = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +; this is using %... +all__vary_rounds = 10%% +; this is using 'rounds' instead of 'default_rounds' +bsdi_crypt__rounds = 25000 +bsdi_crypt__max_rounds = 30000 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 +""") + + sample_1_unicode = u("""\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all__vary_rounds = 0.1 +bsdi_crypt__default_rounds = 25000 +bsdi_crypt__max_rounds = 30000 +sha512_crypt__max_rounds = 50000 +sha512_crypt__min_rounds = 40000 + +""") + + #--------------------------------------------------------------- + # sample 1 external files + #--------------------------------------------------------------- + + # sample 1 string with '\n' linesep + sample_1_path = os.path.join(here, "sample1.cfg") + + # sample 1 with '\r\n' linesep + sample_1b_unicode = sample_1_unicode.replace(u("\n"), u("\r\n")) + sample_1b_path = os.path.join(here, "sample1b.cfg") + + # sample 1 using UTF-16 and alt section + sample_1c_bytes = sample_1_unicode.replace(u("[passlib]"), + u("[mypolicy]")).encode("utf-16") + sample_1c_path = os.path.join(here, "sample1c.cfg") + + # enable to regenerate sample files + if False: + set_file(sample_1_path, sample_1_unicode) + set_file(sample_1b_path, sample_1b_unicode) + set_file(sample_1c_path, sample_1c_bytes) + + #--------------------------------------------------------------- + # sample 2 & 12 - options patch + #--------------------------------------------------------------- + sample_2_dict = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_2_unicode = """\ +[passlib] +bsdi_crypt__min_rounds = 29000 +bsdi_crypt__max_rounds = 35000 +bsdi_crypt__default_rounds = 31000 +sha512_crypt__min_rounds = 45000 +""" + + # sample 2 overlayed on top of sample 1 + sample_12_dict = merge_dicts(sample_1_dict, sample_2_dict) + + #--------------------------------------------------------------- + # sample 3 & 123 - just changing default from sample 1 + #--------------------------------------------------------------- + sample_3_dict = dict( + default="sha512_crypt", + ) + + # sample 3 overlayed on 2 overlayed on 1 + sample_123_dict = merge_dicts(sample_12_dict, sample_3_dict) + + #--------------------------------------------------------------- + # sample 4 - used by api tests + #--------------------------------------------------------------- + sample_4_dict = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 30, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + #=================================================================== + # constructors + #=================================================================== + def test_01_constructor(self): + "test class constructor" + + # test blank constructor works correctly + ctx = CryptContext() + self.assertEqual(ctx.to_dict(), {}) + + # test sample 1 with scheme=names + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with scheme=handlers + ctx = CryptContext(**self.sample_1_resolved_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 2: options w/o schemes + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + # test sample 3: default only + ctx = CryptContext(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_3_dict) + + def test_02_from_string(self): + "test from_string() constructor" + # test sample 1 unicode + ctx = CryptContext.from_string(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 with unnormalized inputs + ctx = CryptContext.from_string(self.sample_1_unnormalized) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 utf-8 + ctx = CryptContext.from_string(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_string(self.sample_1b_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 using UTF-16 and alt section + ctx = CryptContext.from_string(self.sample_1c_bytes, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test wrong type + self.assertRaises(TypeError, CryptContext.from_string, None) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_string, + self.sample_1_unicode, section="fakesection") + + def test_03_from_path(self): + "test from_path() constructor" + # make sure sample files exist + if not os.path.exists(self.sample_1_path): + raise RuntimeError("can't find data file: %r" % self.sample_1_path) + + # test sample 1 + ctx = CryptContext.from_path(self.sample_1_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 w/ '\r\n' linesep + ctx = CryptContext.from_path(self.sample_1b_path) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test sample 1 encoding using UTF-16 and alt section + ctx = CryptContext.from_path(self.sample_1c_path, section="mypolicy", + encoding="utf-16") + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test missing file + self.assertRaises(EnvironmentError, CryptContext.from_path, + os.path.join(here, "sample1xxx.cfg")) + + # test missing section + self.assertRaises(NoSectionError, CryptContext.from_path, + self.sample_1_path, section="fakesection") + + def test_04_copy(self): + "test copy() method" + cc1 = CryptContext(**self.sample_1_dict) + + # overlay sample 2 onto copy + cc2 = cc1.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2.to_dict(), self.sample_12_dict) + + # check that repeating overlay makes no change + cc2b = cc2.copy(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc2b.to_dict(), self.sample_12_dict) + + # overlay sample 3 on copy + cc3 = cc2.copy(**self.sample_3_dict) + self.assertEqual(cc3.to_dict(), self.sample_123_dict) + + # test empty copy creates separate copy + cc4 = cc1.copy() + self.assertIsNot(cc4, cc1) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_1_dict) + + # ... and that modifying copy doesn't affect original + cc4.update(**self.sample_2_dict) + self.assertEqual(cc1.to_dict(), self.sample_1_dict) + self.assertEqual(cc4.to_dict(), self.sample_12_dict) + + def test_09_repr(self): + "test repr()" + cc1 = CryptContext(**self.sample_1_dict) + self.assertRegex(repr(cc1), "^$") + + #=================================================================== + # modifiers + #=================================================================== + def test_10_load(self): + "test load() / load_path() method" + # NOTE: load() is the workhorse that handles all policy parsing, + # compilation, and validation. most of it's features are tested + # elsewhere, since all the constructors and modifiers are just + # wrappers for it. + + # source_type 'auto' + ctx = CryptContext() + + # detect dict + ctx.load(self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect unicode string + ctx.load(self.sample_1_unicode) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # detect bytes string + ctx.load(self.sample_1_unicode.encode("utf-8")) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # anything else - TypeError + self.assertRaises(TypeError, ctx.load, None) + + # NOTE: load_path() tested by from_path() + # NOTE: additional string tests done by from_string() + + # update flag - tested by update() method tests + # encoding keyword - tested by from_string() & from_path() + # section keyword - tested by from_string() & from_path() + + # test load empty + ctx = CryptContext(**self.sample_1_dict) + ctx.load({}, update=True) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # multiple loads should clear the state + ctx = CryptContext() + ctx.load(self.sample_1_dict) + ctx.load(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_2_dict) + + def test_11_load_rollback(self): + "test load() errors restore old state" + # create initial context + cc = CryptContext(["des_crypt", "sha256_crypt"], + sha256_crypt__default_rounds=5000, + all__vary_rounds=0.1, + ) + result = cc.to_string() + + # do an update operation that should fail during parsing + # XXX: not sure what the right error type is here. + self.assertRaises(TypeError, cc.update, too__many__key__parts=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during extraction + # FIXME: this isn't failing even in broken case, need to figure out + # way to ensure some keys come after this one. + self.assertRaises(KeyError, cc.update, fake_context_option=True) + self.assertEqual(cc.to_string(), result) + + # do an update operation that should fail during compilation + self.assertRaises(ValueError, cc.update, sha256_crypt__min_rounds=10000) + self.assertEqual(cc.to_string(), result) + + def test_12_update(self): + "test update() method" + + # empty overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update() + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + + # test basic overlay + ctx = CryptContext(**self.sample_1_dict) + ctx.update(**self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # ... and again + ctx.update(**self.sample_3_dict) + self.assertEqual(ctx.to_dict(), self.sample_123_dict) + + # overlay w/ dict arg + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_dict) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # overlay w/ string + ctx = CryptContext(**self.sample_1_dict) + ctx.update(self.sample_2_unicode) + self.assertEqual(ctx.to_dict(), self.sample_12_dict) + + # too many args + self.assertRaises(TypeError, ctx.update, {}, {}) + self.assertRaises(TypeError, ctx.update, {}, schemes=['des_crypt']) + + # wrong arg type + self.assertRaises(TypeError, ctx.update, None) + + #=================================================================== + # option parsing + #=================================================================== + def test_20_options(self): + "test basic option parsing" + def parse(**kwds): + return CryptContext(**kwds).to_dict() + + # + # common option parsing tests + # + + # test keys with blank fields are rejected + # blank option + self.assertRaises(TypeError, CryptContext, __=0.1) + self.assertRaises(TypeError, CryptContext, default__scheme__='x') + + # blank scheme + self.assertRaises(TypeError, CryptContext, __option='x') + self.assertRaises(TypeError, CryptContext, default____option='x') + + # blank category + self.assertRaises(TypeError, CryptContext, __scheme__option='x') + + # test keys with too many field are rejected + self.assertRaises(TypeError, CryptContext, + category__scheme__option__invalid = 30000) + + # keys with mixed separators should be handled correctly. + # (testing actual data, not to_dict(), since re-render hid original bug) + self.assertRaises(KeyError, parse, + **{"admin.context__schemes":"md5_crypt"}) + ctx = CryptContext(**{"schemes":"md5_crypt,des_crypt", + "admin.context__default":"des_crypt"}) + self.assertEqual(ctx.default_scheme("admin"), "des_crypt") + + # + # context option -specific tests + # + + # test context option key parsing + result = dict(default="md5_crypt") + self.assertEqual(parse(default="md5_crypt"), result) + self.assertEqual(parse(context__default="md5_crypt"), result) + self.assertEqual(parse(default__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"context.default":"md5_crypt"}), result) + self.assertEqual(parse(**{"default.context.default":"md5_crypt"}), result) + + # test context option key parsing w/ category + result = dict(admin__context__default="md5_crypt") + self.assertEqual(parse(admin__context__default="md5_crypt"), result) + self.assertEqual(parse(**{"admin.context.default":"md5_crypt"}), result) + + # + # hash option -specific tests + # + + # test hash option key parsing + result = dict(all__vary_rounds=0.1) + self.assertEqual(parse(all__vary_rounds=0.1), result) + self.assertEqual(parse(default__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"all.vary_rounds":0.1}), result) + self.assertEqual(parse(**{"default.all.vary_rounds":0.1}), result) + + # test hash option key parsing w/ category + result = dict(admin__all__vary_rounds=0.1) + self.assertEqual(parse(admin__all__vary_rounds=0.1), result) + self.assertEqual(parse(**{"admin.all.vary_rounds":0.1}), result) + + # settings not allowed if not in hash.settings_kwds + ctx = CryptContext(["phpass", "md5_crypt"], phpass__ident="P") + self.assertRaises(KeyError, ctx.copy, md5_crypt__ident="P") + + # hash options 'salt' and 'rounds' not allowed + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + des_crypt__salt="xx") + self.assertRaises(KeyError, CryptContext, schemes=["des_crypt"], + all__salt="xx") + + def test_21_schemes(self): + "test 'schemes' context option parsing" + + # schemes can be empty + cc = CryptContext(schemes=None) + self.assertEqual(cc.schemes(), ()) + + # schemes can be list of names + cc = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be comma-sep string + cc = CryptContext(schemes=" des_crypt, md5_crypt, ") + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # schemes can be list of handlers + cc = CryptContext(schemes=[hash.des_crypt, hash.md5_crypt]) + self.assertEqual(cc.schemes(), ("des_crypt", "md5_crypt")) + + # scheme must be name or handler + self.assertRaises(TypeError, CryptContext, schemes=[uh.StaticHandler]) + + # handlers must have a name + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptContext, schemes=[nameless]) + + # names must be unique + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptContext, schemes=[dummy_1, dummy_1]) + + # schemes not allowed per-category + self.assertRaises(KeyError, CryptContext, + admin__context__schemes=["md5_crypt"]) + + def test_22_deprecated(self): + "test 'deprecated' context option parsing" + def getdep(ctx, category=None): + return [name for name in ctx.schemes() + if ctx._is_deprecated_scheme(name, category)] + + # no schemes - all deprecated values allowed + cc = CryptContext(deprecated=["md5_crypt"]) + cc.update(schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc),["md5_crypt"]) + + # deprecated values allowed if subset of schemes + cc = CryptContext(deprecated=["md5_crypt"], schemes=["md5_crypt", "des_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt"]) + + # can be handler + # XXX: allow handlers in deprecated list? not for now. + self.assertRaises(TypeError, CryptContext, deprecated=[hash.md5_crypt], + schemes=["md5_crypt", "des_crypt"]) +## cc = CryptContext(deprecated=[hash.md5_crypt], schemes=["md5_crypt", "des_crypt"]) +## self.assertEqual(getdep(cc), ["md5_crypt"]) + + # comma sep list + cc = CryptContext(deprecated="md5_crypt,des_crypt", schemes=["md5_crypt", "des_crypt", "sha256_crypt"]) + self.assertEqual(getdep(cc), ["md5_crypt", "des_crypt"]) + + # values outside of schemes not allowed + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # deprecating ALL schemes should cause ValueError + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt'], + deprecated=['des_crypt']) + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__deprecated=['des_crypt', 'md5_crypt']) + + # deprecating explicit default scheme should cause ValueError + + # ... default listed as deprecated + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + deprecated="md5_crypt") + + # ... global default deprecated per-category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # ... category default deprecated globally + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + deprecated="md5_crypt") + + # ... category default deprecated in category + self.assertRaises(ValueError, CryptContext, + schemes=['des_crypt', 'md5_crypt'], + admin__context__default="md5_crypt", + admin__context__deprecated="md5_crypt") + + # category deplist should shadow default deplist + CryptContext( + schemes=['des_crypt', 'md5_crypt'], + deprecated="md5_crypt", + admin__context__default="md5_crypt", + admin__context__deprecated=[]) + + # wrong type + self.assertRaises(TypeError, CryptContext, deprecated=123) + + # deprecated per-category + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=["des_crypt"], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), ["des_crypt"]) + + # blank per-category deprecated list, shadowing default list + cc = CryptContext(deprecated=["md5_crypt"], + schemes=["md5_crypt", "des_crypt"], + admin__context__deprecated=[], + ) + self.assertEqual(getdep(cc), ["md5_crypt"]) + self.assertEqual(getdep(cc, "user"), ["md5_crypt"]) + self.assertEqual(getdep(cc, "admin"), []) + + def test_23_default(self): + "test 'default' context option parsing" + + # anything allowed if no schemes + self.assertEqual(CryptContext(default="md5_crypt").to_dict(), + dict(default="md5_crypt")) + + # default allowed if in scheme list + ctx = CryptContext(default="md5_crypt", schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # default can be handler + # XXX: sure we want to allow this ? maybe deprecate in future. + ctx = CryptContext(default=hash.md5_crypt, schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # implicit default should be first non-deprecated scheme + ctx = CryptContext(schemes=["des_crypt", "md5_crypt"]) + self.assertEqual(ctx.default_scheme(), "des_crypt") + ctx.update(deprecated="des_crypt") + self.assertEqual(ctx.default_scheme(), "md5_crypt") + + # error if not in scheme list + self.assertRaises(KeyError, CryptContext, schemes=['des_crypt'], + default='md5_crypt') + + # wrong type + self.assertRaises(TypeError, CryptContext, default=1) + + # per-category + ctx = CryptContext(default="des_crypt", + schemes=["des_crypt", "md5_crypt"], + admin__context__default="md5_crypt") + self.assertEqual(ctx.default_scheme(), "des_crypt") + self.assertEqual(ctx.default_scheme("user"), "des_crypt") + self.assertEqual(ctx.default_scheme("admin"), "md5_crypt") + + def test_24_vary_rounds(self): + "test 'vary_rounds' hash option parsing" + def parse(v): + return CryptContext(all__vary_rounds=v).to_dict()['all__vary_rounds'] + + # floats should be preserved + self.assertEqual(parse(0.1), 0.1) + self.assertEqual(parse('0.1'), 0.1) + + # 'xx%' should be converted to float + self.assertEqual(parse('10%'), 0.1) + + # ints should be preserved + self.assertEqual(parse(1000), 1000) + self.assertEqual(parse('1000'), 1000) + + #=================================================================== + # inspection & serialization + #=================================================================== + def test_30_schemes(self): + "test schemes() method" + # NOTE: also checked under test_21 + + # test empty + ctx = CryptContext() + self.assertEqual(ctx.schemes(), ()) + self.assertEqual(ctx.schemes(resolve=True), ()) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.schemes(), tuple(self.sample_1_schemes)) + self.assertEqual(ctx.schemes(resolve=True), tuple(self.sample_1_handlers)) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertEqual(ctx.schemes(), ()) + + def test_31_default_scheme(self): + "test default_scheme() method" + # NOTE: also checked under test_23 + + # test empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.default_scheme) + + # test sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.default_scheme(), "md5_crypt") + self.assertEqual(ctx.default_scheme(resolve=True), hash.md5_crypt) + + # test sample 2 + ctx = CryptContext(**self.sample_2_dict) + self.assertRaises(KeyError, ctx.default_scheme) + + # test defaults to first in scheme + ctx = CryptContext(schemes=self.sample_1_schemes) + self.assertEqual(ctx.default_scheme(), "des_crypt") + + # categories tested under test_23 + + def test_32_handler(self): + "test handler() method" + + # default for empty + ctx = CryptContext() + self.assertRaises(KeyError, ctx.handler) + self.assertRaises(KeyError, ctx.handler, "md5_crypt") + + # default for sample 1 + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.handler(), hash.md5_crypt) + + # by name + self.assertEqual(ctx.handler("des_crypt"), hash.des_crypt) + + # name not in schemes + self.assertRaises(KeyError, ctx.handler, "mysql323") + + # check handler() honors category default + ctx = CryptContext("sha256_crypt,md5_crypt", admin__context__default="md5_crypt") + self.assertEqual(ctx.handler(), hash.sha256_crypt) + self.assertEqual(ctx.handler(category="staff"), hash.sha256_crypt) + self.assertEqual(ctx.handler(category="admin"), hash.md5_crypt) + + # test unicode category strings are accepted under py2 + if PY2: + self.assertEqual(ctx.handler(category=u("staff")), hash.sha256_crypt) + self.assertEqual(ctx.handler(category=u("admin")), hash.md5_crypt) + + def test_33_options(self): + "test internal _get_record_options() method" + def options(ctx, scheme, category=None): + return ctx._config._get_record_options_with_flag(scheme, category)[0] + + # this checks that (3 schemes, 3 categories) inherit options correctly. + # the 'user' category is not present in the options. + cc4 = CryptContext( + schemes = [ "sha512_crypt", "des_crypt", "bsdi_crypt"], + deprecated = ["sha512_crypt", "des_crypt"], + all__vary_rounds = 0.1, + bsdi_crypt__vary_rounds=0.2, + sha512_crypt__max_rounds = 20000, + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + admin__all__vary_rounds = 0.05, + admin__bsdi_crypt__vary_rounds=0.3, + admin__sha512_crypt__max_rounds = 40000, + ) + self.assertEqual(cc4._config.categories, ("admin",)) + + # + # sha512_crypt + # + self.assertEqual(options(cc4, "sha512_crypt"), dict( + deprecated=True, + vary_rounds=0.1, # inherited from all__ + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(options(cc4, "sha512_crypt", "admin"), dict( + # NOT deprecated - context option overridden per-category + vary_rounds=0.05, # global overridden per-cateogry + max_rounds=40000, # overridden per-category + )) + + # + # des_crypt + # + self.assertEqual(options(cc4, "des_crypt"), dict( + deprecated=True, + vary_rounds=0.1, + )) + + self.assertEqual(options(cc4, "des_crypt", "user"), dict( + deprecated=True, # unconfigured category inherits from default + vary_rounds=0.1, + )) + + self.assertEqual(options(cc4, "des_crypt", "admin"), dict( + deprecated=True, # unchanged though overidden + vary_rounds=0.05, # global overridden per-cateogry + )) + + # + # bsdi_crypt + # + self.assertEqual(options(cc4, "bsdi_crypt"), dict( + vary_rounds=0.2, # overridden from all__vary_rounds + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "user"), dict( + vary_rounds=0.2, # unconfigured category inherits from default + )) + + self.assertEqual(options(cc4, "bsdi_crypt", "admin"), dict( + vary_rounds=0.3, + deprecated=True, # deprecation set per-category + )) + + def test_34_to_dict(self): + "test to_dict() method" + # NOTE: this is tested all throughout this test case. + ctx = CryptContext(**self.sample_1_dict) + self.assertEqual(ctx.to_dict(), self.sample_1_dict) + self.assertEqual(ctx.to_dict(resolve=True), self.sample_1_resolved_dict) + + def test_35_to_string(self): + "test to_string() method" + + # create ctx and serialize + ctx = CryptContext(**self.sample_1_dict) + dump = ctx.to_string() + + # check ctx->string returns canonical format. + # NOTE: ConfigParser for PY26 and earlier didn't use OrderedDict, + # so to_string() won't get order correct. + # so we skip this test. + import sys + if sys.version_info >= (2,7): + self.assertEqual(dump, self.sample_1_unicode) + + # check ctx->string->ctx->dict returns original + ctx2 = CryptContext.from_string(dump) + self.assertEqual(ctx2.to_dict(), self.sample_1_dict) + + # test section kwd is honored + other = ctx.to_string(section="password-security") + self.assertEqual(other, dump.replace("[passlib]","[password-security]")) + + # test unmanaged handler warning + from passlib import hash + from passlib.tests.test_utils_handlers import UnsaltedHash + ctx3 = CryptContext([UnsaltedHash, "md5_crypt"]) + dump = ctx3.to_string() + self.assertRegex(dump, r"# NOTE: the 'unsalted_test_hash' handler\(s\)" + r" are not registered with Passlib") + + #=================================================================== + # password hash api + #=================================================================== + nonstring_vectors = [ + (None, {}), + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ] + + def test_40_basic(self): + "test basic encrypt/identify/verify functionality" + handlers = [hash.md5_crypt, hash.des_crypt, hash.bsdi_crypt] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # run through handlers + for crypt in handlers: + h = cc.encrypt("test", scheme=crypt.name) + self.assertEqual(cc.identify(h), crypt.name) + self.assertEqual(cc.identify(h, resolve=True), crypt) + self.assertTrue(cc.verify('test', h)) + self.assertFalse(cc.verify('notest', h)) + + # test default + h = cc.encrypt("test") + self.assertEqual(cc.identify(h), "md5_crypt") + + # test genhash + h = cc.genhash('secret', cc.genconfig()) + self.assertEqual(cc.identify(h), 'md5_crypt') + + h = cc.genhash('secret', cc.genconfig(), scheme='md5_crypt') + self.assertEqual(cc.identify(h), 'md5_crypt') + + self.assertRaises(ValueError, cc.genhash, 'secret', cc.genconfig(), scheme="des_crypt") + + def test_41_genconfig(self): + "test genconfig() method" + cc = CryptContext(schemes=["md5_crypt", "phpass"], + phpass__ident="H", + phpass__default_rounds=7, + admin__phpass__ident="P", + ) + + # uses default scheme + self.assertTrue(cc.genconfig().startswith("$1$")) + + # override scheme + self.assertTrue(cc.genconfig(scheme="phpass").startswith("$H$5")) + + # category override + self.assertTrue(cc.genconfig(scheme="phpass", category="admin").startswith("$P$5")) + self.assertTrue(cc.genconfig(scheme="phpass", category="staff").startswith("$H$5")) + + # override scheme & custom settings + self.assertEqual( + cc.genconfig(scheme="phpass", salt='.'*8, rounds=8, ident='P'), + '$P$6........', + ) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # test unicode category strings are accepted under py2 + # this tests basic _get_record() used by encrypt/genhash/verify. + # we have to omit scheme=xxx so codepath is tested fully + if PY2: + c2 = cc.copy(default="phpass") + self.assertTrue(c2.genconfig(category=u("admin")).startswith("$P$5")) + self.assertTrue(c2.genconfig(category=u("staff")).startswith("$H$5")) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genconfig) + self.assertRaises(KeyError, CryptContext().genconfig, scheme='md5_crypt') + + # bad scheme values + self.assertRaises(KeyError, cc.genconfig, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genconfig, scheme=1, category='staff') + self.assertRaises(TypeError, cc.genconfig, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, category=1) + + + def test_42_genhash(self): + "test genhash() method" + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = cc.encrypt('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.genhash, secret, hash, **kwds) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.genhash, 'secret', hash, **kwds) + + # .. but should accept None if default scheme lacks config string + cc = CryptContext(["mysql323"]) + self.assertIsInstance(cc.genhash("stub", None), str) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().genhash, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.genhash, 'secret', hash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.genhash, 'secret', hash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.genconfig, 'secret', hash, category=1) + + + def test_43_encrypt(self): + "test encrypt() method" + cc = CryptContext(**self.sample_4_dict) + + # hash specific settings + self.assertEqual( + cc.encrypt("password", scheme="phpass", salt='.'*8), + '$H$5........De04R5Egz0aq8Tf.1eVhY/', + ) + self.assertEqual( + cc.encrypt("password", scheme="phpass", salt='.'*8, ident="P"), + '$P$5........De04R5Egz0aq8Tf.1eVhY/', + ) + + # NOTE: more thorough job of rounds limits done below. + + # min rounds + with self.assertWarningList(PasslibConfigWarning): + self.assertEqual( + cc.encrypt("password", rounds=1999, salt="nacl"), + '$5$rounds=2000$nacl$9/lTZ5nrfPuz8vphznnmHuDGFuvjSNvOEDsGmGfsS97', + ) + + with self.assertWarningList([]): + self.assertEqual( + cc.encrypt("password", rounds=2001, salt="nacl"), + '$5$rounds=2001$nacl$8PdeoPL4aXQnJ0woHhqgIw/efyfCKC2WHneOpnvF.31' + ) + + # NOTE: max rounds, etc tested in genconfig() + + # make default > max throws error if attempted + self.assertRaises(ValueError, cc.copy, + sha256_crypt__default_rounds=4000) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.encrypt, secret, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().encrypt, 'secret') + + # bad scheme values + self.assertRaises(KeyError, cc.encrypt, 'secret', scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.encrypt, 'secret', scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.encrypt, 'secret', category=1) + + + def test_44_identify(self): + "test identify() border cases" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + # check unknown hash + self.assertEqual(cc.identify('$9$232323123$1287319827'), None) + self.assertRaises(ValueError, cc.identify, '$9$232323123$1287319827', required=True) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.identify, hash, **kwds) + + # throws error without schemes + cc = CryptContext() + self.assertIs(cc.identify('hash'), None) + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + + # bad category values + self.assertRaises(TypeError, cc.identify, None, category=1) + + def test_45_verify(self): + "test verify() scheme kwd" + handlers = ["md5_crypt", "des_crypt", "bsdi_crypt"] + cc = CryptContext(handlers, bsdi_crypt__default_rounds=5) + + h = hash.md5_crypt.encrypt("test") + + # check base verify + self.assertTrue(cc.verify("test", h)) + self.assertTrue(not cc.verify("notest", h)) + + # check verify using right alg + self.assertTrue(cc.verify('test', h, scheme='md5_crypt')) + self.assertTrue(not cc.verify('notest', h, scheme='md5_crypt')) + + # check verify using wrong alg + self.assertRaises(ValueError, cc.verify, 'test', h, scheme='bsdi_crypt') + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # unknown hash should throw error + self.assertRaises(ValueError, cc.verify, 'stub', '$6$232323123$1287319827') + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + h = refhash = cc.encrypt('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify, secret, h, **kwds) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for h, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify, 'secret', h, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify, 'secret', refhash, category=1) + + def test_46_needs_update(self): + "test needs_update() method" + cc = CryptContext(**self.sample_4_dict) + + # check deprecated scheme + self.assertTrue(cc.needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #-------------------------------------------------------------- + # test _bind_needs_update() framework + #-------------------------------------------------------------- + bind_state = [] + check_state = [] + class dummy(uh.StaticHandler): + name = 'dummy' + _hash_prefix = '@' + + @classmethod + def _bind_needs_update(cls, **settings): + bind_state.append(settings) + return cls._needs_update + + @classmethod + def _needs_update(cls, hash, secret): + check_state.append((hash,secret)) + return secret == "nu" + + def _calc_checksum(self, secret): + from hashlib import md5 + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + return str_to_uascii(md5(secret).hexdigest()) + + # creating context should call bind function w/ settings + ctx = CryptContext([dummy]) + self.assertEqual(bind_state, [{}]) + + # calling needs_update should query callback + hash = refhash = dummy.encrypt("test") + self.assertFalse(ctx.needs_update(hash)) + self.assertEqual(check_state, [(hash,None)]) + del check_state[:] + + # now with a password + self.assertFalse(ctx.needs_update(hash, secret='bob')) + self.assertEqual(check_state, [(hash,'bob')]) + del check_state[:] + + # now when it returns True + self.assertTrue(ctx.needs_update(hash, secret='nu')) + self.assertEqual(check_state, [(hash,'nu')]) + del check_state[:] + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.needs_update, hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().needs_update, 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.needs_update, refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.needs_update, refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.needs_update, refhash, category=1) + + def test_47_verify_and_update(self): + "test verify_and_update()" + cc = CryptContext(**self.sample_4_dict) + + # create some hashes + h1 = cc.encrypt("password", scheme="des_crypt") + h2 = cc.encrypt("password", scheme="sha256_crypt") + + # check bad password, deprecated hash + ok, new_hash = cc.verify_and_update("wrongpass", h1) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check bad password, good hash + ok, new_hash = cc.verify_and_update("wrongpass", h2) + self.assertFalse(ok) + self.assertIs(new_hash, None) + + # check right password, deprecated hash + ok, new_hash = cc.verify_and_update("password", h1) + self.assertTrue(ok) + self.assertTrue(cc.identify(new_hash), "sha256_crypt") + + # check right password, good hash + ok, new_hash = cc.verify_and_update("password", h2) + self.assertTrue(ok) + self.assertIs(new_hash, None) + + #-------------------------------------------------------------- + # border cases + #-------------------------------------------------------------- + + # rejects non-string secrets + cc = CryptContext(["des_crypt"]) + hash = refhash = cc.encrypt('stub') + for secret, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify_and_update, secret, hash, **kwds) + + # rejects non-string hashes + cc = CryptContext(["des_crypt"]) + for hash, kwds in self.nonstring_vectors: + self.assertRaises(TypeError, cc.verify_and_update, 'secret', hash, **kwds) + + # throws error without schemes + self.assertRaises(KeyError, CryptContext().verify_and_update, 'secret', 'hash') + + # bad scheme values + self.assertRaises(KeyError, cc.verify_and_update, 'secret', refhash, scheme="fake") # XXX: should this be ValueError? + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, scheme=1) + + # bad category values + self.assertRaises(TypeError, cc.verify_and_update, 'secret', refhash, category=1) + + #=================================================================== + # rounds options + #=================================================================== + # NOTE: the follow tests check how _CryptRecord handles + # the min/max/default/vary_rounds options, via the output of + # genconfig(). it's assumed encrypt() takes the same codepath. + + def test_50_rounds_limits(self): + "test rounds limits" + cc = CryptContext(schemes=["sha256_crypt"], + all__min_rounds=2000, + all__max_rounds=3000, + all__default_rounds=2500, + ) + + #-------------------------------------------------- + # min_rounds + #-------------------------------------------------- + + # set below handler minimum + with self.assertWarningList([PasslibConfigWarning]*2): + c2 = cc.copy(all__min_rounds=500, all__max_rounds=None, + all__default_rounds=500) + self.assertEqual(c2.genconfig(salt="nacl"), "$5$rounds=1000$nacl$") + + # below policy minimum + with self.assertWarningList(PasslibConfigWarning): + self.assertEqual( + cc.genconfig(rounds=1999, salt="nacl"), + '$5$rounds=2000$nacl$', + ) + + # equal to policy minimum + self.assertEqual( + cc.genconfig(rounds=2000, salt="nacl"), + '$5$rounds=2000$nacl$', + ) + + # above policy minimum + self.assertEqual( + cc.genconfig(rounds=2001, salt="nacl"), + '$5$rounds=2001$nacl$' + ) + + #-------------------------------------------------- + # max rounds + #-------------------------------------------------- + + # set above handler max + with self.assertWarningList([PasslibConfigWarning]*2): + c2 = cc.copy(all__max_rounds=int(1e9)+500, all__min_rounds=None, + all__default_rounds=int(1e9)+500) + + self.assertEqual(c2.genconfig(salt="nacl"), + "$5$rounds=999999999$nacl$") + + # above policy max + with self.assertWarningList(PasslibConfigWarning): + self.assertEqual( + cc.genconfig(rounds=3001, salt="nacl"), + '$5$rounds=3000$nacl$' + ) + + # equal policy max + self.assertEqual( + cc.genconfig(rounds=3000, salt="nacl"), + '$5$rounds=3000$nacl$' + ) + + # below policy max + self.assertEqual( + cc.genconfig(rounds=2999, salt="nacl"), + '$5$rounds=2999$nacl$', + ) + + #-------------------------------------------------- + # default_rounds + #-------------------------------------------------- + + # explicit default rounds + self.assertEqual(cc.genconfig(salt="nacl"), '$5$rounds=2500$nacl$') + + # fallback default rounds - use handler's + df = hash.sha256_crypt.default_rounds + c2 = cc.copy(all__default_rounds=None, all__max_rounds=df<<1) + self.assertEqual(c2.genconfig(salt="nacl"), + '$5$rounds=%d$nacl$' % df) + + # fallback default rounds - use handler's, but clipped to max rounds + c2 = cc.copy(all__default_rounds=None, all__max_rounds=3000) + self.assertEqual(c2.genconfig(salt="nacl"), '$5$rounds=3000$nacl$') + + # TODO: test default falls back to mx / mn if handler has no default. + + # default rounds - out of bounds + self.assertRaises(ValueError, cc.copy, all__default_rounds=1999) + cc.copy(all__default_rounds=2000) + cc.copy(all__default_rounds=3000) + self.assertRaises(ValueError, cc.copy, all__default_rounds=3001) + + #-------------------------------------------------- + # border cases + #-------------------------------------------------- + + # invalid min/max bounds + c2 = CryptContext(schemes=["sha256_crypt"]) + self.assertRaises(ValueError, c2.copy, all__min_rounds=-1) + self.assertRaises(ValueError, c2.copy, all__max_rounds=-1) + self.assertRaises(ValueError, c2.copy, all__min_rounds=2000, + all__max_rounds=1999) + + # test bad values + self.assertRaises(ValueError, CryptContext, all__min_rounds='x') + self.assertRaises(ValueError, CryptContext, all__max_rounds='x') + self.assertRaises(ValueError, CryptContext, all__vary_rounds='x') + self.assertRaises(ValueError, CryptContext, all__default_rounds='x') + + # test bad types rejected + bad = NotImplemented + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__min_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__max_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__vary_rounds=bad) + self.assertRaises(TypeError, CryptContext, "sha256_crypt", all__default_rounds=bad) + + def test_51_linear_vary_rounds(self): + "test linear vary rounds" + cc = CryptContext(schemes=["sha256_crypt"], + all__min_rounds=1995, + all__max_rounds=2005, + all__default_rounds=2000, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + c2 = cc.copy(all__vary_rounds="0%") + self.assert_rounds_range(c2, "sha256_crypt", 2000, 2000) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assert_rounds_range(c2, "sha256_crypt", 1999, 2001) + c2 = cc.copy(all__vary_rounds=100) + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + # test relative + c2 = cc.copy(all__vary_rounds="0.1%") + self.assert_rounds_range(c2, "sha256_crypt", 1998, 2002) + c2 = cc.copy(all__vary_rounds="100%") + self.assert_rounds_range(c2, "sha256_crypt", 1995, 2005) + + def test_52_log2_vary_rounds(self): + "test log2 vary rounds" + cc = CryptContext(schemes=["bcrypt"], + all__min_rounds=15, + all__max_rounds=25, + all__default_rounds=20, + ) + + # test negative + self.assertRaises(ValueError, cc.copy, all__vary_rounds=-1) + self.assertRaises(ValueError, cc.copy, all__vary_rounds="-1%") + self.assertRaises(ValueError, cc.copy, all__vary_rounds="101%") + + # test static + c2 = cc.copy(all__vary_rounds=0) + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="0%") + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + # test absolute + c2 = cc.copy(all__vary_rounds=1) + self.assert_rounds_range(c2, "bcrypt", 19, 21) + c2 = cc.copy(all__vary_rounds=100) + self.assert_rounds_range(c2, "bcrypt", 15, 25) + + # test relative - should shift over at 50% mark + c2 = cc.copy(all__vary_rounds="1%") + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="49%") + self.assert_rounds_range(c2, "bcrypt", 20, 20) + + c2 = cc.copy(all__vary_rounds="50%") + self.assert_rounds_range(c2, "bcrypt", 19, 20) + + c2 = cc.copy(all__vary_rounds="100%") + self.assert_rounds_range(c2, "bcrypt", 15, 21) + + def assert_rounds_range(self, context, scheme, lower, upper): + "helper to check vary_rounds covers specified range" + # NOTE: this runs enough times the min and max *should* be hit, + # though there's a faint chance it will randomly fail. + handler = context.handler(scheme) + salt = handler.default_salt_chars[0:1] * handler.max_salt_size + seen = set() + for i in irange(300): + h = context.genconfig(scheme, salt=salt) + r = handler.from_string(h).rounds + seen.add(r) + self.assertEqual(min(seen), lower, "vary_rounds had wrong lower limit:") + self.assertEqual(max(seen), upper, "vary_rounds had wrong upper limit:") + + #=================================================================== + # feature tests + #=================================================================== + def test_60_min_verify_time(self): + "test verify() honors min_verify_time" + delta = .05 + if TICK_RESOLUTION >= delta/10: + raise self.skipTest("timer not accurate enough") + min_delay = 2*delta + min_verify_time = 5*delta + max_delay = 8*delta + + class TimedHash(uh.StaticHandler): + "psuedo hash that takes specified amount of time" + name = "timed_hash" + delay = 0 + + @classmethod + def identify(cls, hash): + return True + + def _calc_checksum(self, secret): + quicksleep(self.delay) + return to_unicode(secret + 'x') + + # check mvt issues a warning, and then filter for remainder of test + with self.assertWarningList(["'min_verify_time' is deprecated"]*2): + cc = CryptContext([TimedHash], min_verify_time=min_verify_time, + admin__context__min_verify_time=min_verify_time*2) + warnings.filterwarnings("ignore", "'min_verify_time' is deprecated") + + def timecall(func, *args, **kwds): + start = tick() + result = func(*args, **kwds) + return tick()-start, result + + # verify genhash delay works + TimedHash.delay = min_delay + elapsed, result = timecall(TimedHash.genhash, 'stub', None) + self.assertEqual(result, 'stubx') + self.assertAlmostEqual(elapsed, min_delay, delta=delta) + + # ensure min verify time is honored + + # correct password + elapsed, result = timecall(cc.verify, "stub", "stubx") + self.assertTrue(result) + self.assertAlmostEqual(elapsed, min_delay, delta=delta) + + # incorrect password + elapsed, result = timecall(cc.verify, "blob", "stubx") + self.assertFalse(result) + self.assertAlmostEqual(elapsed, min_verify_time, delta=delta) + + # incorrect password w/ special category setting + elapsed, result = timecall(cc.verify, "blob", "stubx", category="admin") + self.assertFalse(result) + self.assertAlmostEqual(elapsed, min_verify_time*2, delta=delta) + + # ensure taking longer emits a warning. + TimedHash.delay = max_delay + with self.assertWarningList(".*verify exceeded min_verify_time"): + elapsed, result = timecall(cc.verify, "blob", "stubx") + self.assertFalse(result) + self.assertAlmostEqual(elapsed, max_delay, delta=delta) + + # reject values < 0 + self.assertRaises(ValueError, CryptContext, min_verify_time=-1) + + def test_61_autodeprecate(self): + "test deprecated='auto' is handled correctly" + + def getstate(ctx, category=None): + return [ctx._is_deprecated_scheme(scheme, category) for scheme in ctx.schemes()] + + # correctly reports default + ctx = CryptContext("sha256_crypt,md5_crypt,des_crypt", deprecated="auto") + self.assertEqual(getstate(ctx, None), [False, True, True]) + self.assertEqual(getstate(ctx, "admin"), [False, True, True]) + + # correctly reports changed default + ctx.update(default="md5_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, False, True]) + + # category default is handled correctly + ctx.update(admin__context__default="des_crypt") + self.assertEqual(getstate(ctx, None), [True, False, True]) + self.assertEqual(getstate(ctx, "admin"), [True, True, False]) + + # handles 1 scheme + ctx = CryptContext(["sha256_crypt"], deprecated="auto") + self.assertEqual(getstate(ctx, None), [False]) + self.assertEqual(getstate(ctx, "admin"), [False]) + + # disallow auto & other deprecated schemes at same time. + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="auto,md5_crypt") + self.assertRaises(ValueError, CryptContext, "sha256_crypt,md5_crypt", + deprecated="md5_crypt,auto") + + #=================================================================== + # handler deprecation detectors + #=================================================================== + def test_62_bcrypt_update(self): + "test verify_and_update / needs_update corrects bcrypt padding" + # see issue 25. + bcrypt = hash.bcrypt + + PASS1 = "test" + BAD1 = "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + GOOD1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + ctx = CryptContext(["bcrypt"], bcrypt__rounds=4) + + self.assertTrue(ctx.needs_update(BAD1)) + self.assertFalse(ctx.needs_update(GOOD1)) + + if bcrypt.has_backend(): + self.assertEqual(ctx.verify_and_update(PASS1,GOOD1), (True,None)) + with self.assertWarningList(["incorrect.*padding bits"]*2): + self.assertEqual(ctx.verify_and_update("x",BAD1), (False,None)) + ok, new_hash = ctx.verify_and_update(PASS1, BAD1) + self.assertTrue(ok) + self.assertTrue(new_hash and new_hash != BAD1) + + def test_63_bsdi_crypt_update(self): + "test verify_and_update / needs_update corrects bsdi even rounds" + even_hash = '_Y/../cG0zkJa6LY6k4c' + odd_hash = '_Z/..TgFg0/ptQtpAgws' + secret = 'test' + ctx = CryptContext(['bsdi_crypt'], bsdi_crypt__min_rounds=5) + + self.assertTrue(ctx.needs_update(even_hash)) + self.assertFalse(ctx.needs_update(odd_hash)) + + self.assertEqual(ctx.verify_and_update(secret, odd_hash), (True,None)) + self.assertEqual(ctx.verify_and_update("x", even_hash), (False,None)) + ok, new_hash = ctx.verify_and_update(secret, even_hash) + self.assertTrue(ok) + self.assertTrue(new_hash and new_hash != even_hash) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + def test_kwd_constructor(self): + "test plain kwds" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc._is_deprecated_scheme("des_crypt")) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def onload(flag=False): + self.assertTrue(flag) + return dict(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(onload=onload, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertEqual(cc.schemes(), ("dummy_2", "des_crypt")) + self.assertTrue(cc._is_deprecated_scheme("des_crypt")) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_context_deprecated.py b/passlib/tests/test_context_deprecated.py new file mode 100644 index 00000000..db0c49d9 --- /dev/null +++ b/passlib/tests/test_context_deprecated.py @@ -0,0 +1,752 @@ +"""tests for passlib.context + +this file is a clone of the 1.5 test_context.py, +containing the tests using the legacy CryptPolicy api. +it's being preserved here to ensure the old api doesn't break +(until Passlib 1.8, when this and the legacy api will be removed). +""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +from logging import getLogger +import os +import time +import warnings +import sys +# site +try: + from pkg_resources import resource_filename +except ImportError: + resource_filename = None +# pkg +from passlib import hash +from passlib.context import CryptContext, CryptPolicy, LazyCryptContext +from passlib.exc import PasslibConfigWarning +from passlib.utils import tick, to_bytes, to_unicode +from passlib.utils.compat import irange, u, bytes +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase, catch_warnings, set_file +from passlib.registry import (register_crypt_handler_path, + _has_crypt_handler as has_crypt_handler, + _unload_handler_name as unload_handler_name, + get_crypt_handler, + ) +# module +log = getLogger(__name__) + +#============================================================================= +# +#============================================================================= +class CryptPolicyTest(TestCase): + "test CryptPolicy object" + + # TODO: need to test user categories w/in all this + + descriptionPrefix = "CryptPolicy" + + #=================================================================== + # sample crypt policies used for testing + #=================================================================== + + #--------------------------------------------------------------- + # sample 1 - average config file + #--------------------------------------------------------------- + # NOTE: copy of this is stored in file passlib/tests/sample_config_1s.cfg + sample_config_1s = """\ +[passlib] +schemes = des_crypt, md5_crypt, bsdi_crypt, sha512_crypt +default = md5_crypt +all.vary_rounds = 10%% +bsdi_crypt.max_rounds = 30000 +bsdi_crypt.default_rounds = 25000 +sha512_crypt.max_rounds = 50000 +sha512_crypt.min_rounds = 40000 +""" + sample_config_1s_path = os.path.abspath(os.path.join( + os.path.dirname(__file__), "sample_config_1s.cfg")) + if not os.path.exists(sample_config_1s_path) and resource_filename: + # in case we're zipped up in an egg. + sample_config_1s_path = resource_filename("passlib.tests", + "sample_config_1s.cfg") + + # make sure sample_config_1s uses \n linesep - tests rely on this + assert sample_config_1s.startswith("[passlib]\nschemes") + + sample_config_1pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + sample_config_1pid = { + "schemes": "des_crypt, md5_crypt, bsdi_crypt, sha512_crypt", + "default": "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + "all.vary_rounds": 0.1, + "bsdi_crypt.max_rounds": 30000, + "bsdi_crypt.default_rounds": 25000, + "sha512_crypt.max_rounds": 50000, + "sha512_crypt.min_rounds": 40000, + } + + sample_config_1prd = dict( + schemes = [ hash.des_crypt, hash.md5_crypt, hash.bsdi_crypt, hash.sha512_crypt], + default = "md5_crypt", # NOTE: passlib <= 1.5 was handler obj. + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__max_rounds = 30000, + bsdi_crypt__default_rounds = 25000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 2 - partial policy & result of overlay on sample 1 + #--------------------------------------------------------------- + sample_config_2s = """\ +[passlib] +bsdi_crypt.min_rounds = 29000 +bsdi_crypt.max_rounds = 35000 +bsdi_crypt.default_rounds = 31000 +sha512_crypt.min_rounds = 45000 +""" + + sample_config_2pd = dict( + # using this to test full replacement of existing options + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + # using this to test partial replacement of existing options + sha512_crypt__min_rounds=45000, + ) + + sample_config_12pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "md5_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 3 - just changing default + #--------------------------------------------------------------- + sample_config_3pd = dict( + default="sha512_crypt", + ) + + sample_config_123pd = dict( + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + default = "sha512_crypt", + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + bsdi_crypt__min_rounds = 29000, + bsdi_crypt__max_rounds = 35000, + bsdi_crypt__default_rounds = 31000, + sha512_crypt__max_rounds = 50000, + sha512_crypt__min_rounds=45000, + ) + + #--------------------------------------------------------------- + # sample 4 - category specific + #--------------------------------------------------------------- + sample_config_4s = """ +[passlib] +schemes = sha512_crypt +all.vary_rounds = 10%% +default.sha512_crypt.max_rounds = 20000 +admin.all.vary_rounds = 5%% +admin.sha512_crypt.max_rounds = 40000 +""" + + sample_config_4pd = dict( + schemes = [ "sha512_crypt" ], + # NOTE: not maintaining backwards compat for rendering to "10%" + all__vary_rounds = 0.1, + sha512_crypt__max_rounds = 20000, + # NOTE: not maintaining backwards compat for rendering to "5%" + admin__all__vary_rounds = 0.05, + admin__sha512_crypt__max_rounds = 40000, + ) + + #--------------------------------------------------------------- + # sample 5 - to_string & deprecation testing + #--------------------------------------------------------------- + sample_config_5s = sample_config_1s + """\ +deprecated = des_crypt +admin__context__deprecated = des_crypt, bsdi_crypt +""" + + sample_config_5pd = sample_config_1pd.copy() + sample_config_5pd.update( + deprecated = [ "des_crypt" ], + admin__context__deprecated = [ "des_crypt", "bsdi_crypt" ], + ) + + sample_config_5pid = sample_config_1pid.copy() + sample_config_5pid.update({ + "deprecated": "des_crypt", + "admin.context.deprecated": "des_crypt, bsdi_crypt", + }) + + sample_config_5prd = sample_config_1prd.copy() + sample_config_5prd.update({ + # XXX: should deprecated return the actual handlers in this case? + # would have to modify how policy stores info, for one. + "deprecated": ["des_crypt"], + "admin__context__deprecated": ["des_crypt", "bsdi_crypt"], + }) + + #=================================================================== + # constructors + #=================================================================== + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"The CryptPolicy class has been deprecated") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + + def test_00_constructor(self): + "test CryptPolicy() constructor" + policy = CryptPolicy(**self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + policy = CryptPolicy(self.sample_config_1pd) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + self.assertRaises(TypeError, CryptPolicy, {}, {}) + self.assertRaises(TypeError, CryptPolicy, {}, dummy=1) + + # check key with too many separators is rejected + self.assertRaises(TypeError, CryptPolicy, + schemes = [ "des_crypt", "md5_crypt", "bsdi_crypt", "sha512_crypt"], + bad__key__bsdi_crypt__max_rounds = 30000, + ) + + # check nameless handler rejected + class nameless(uh.StaticHandler): + name = None + self.assertRaises(ValueError, CryptPolicy, schemes=[nameless]) + + # check scheme must be name or crypt handler + self.assertRaises(TypeError, CryptPolicy, schemes=[uh.StaticHandler]) + + # check name conflicts are rejected + class dummy_1(uh.StaticHandler): + name = 'dummy_1' + self.assertRaises(KeyError, CryptPolicy, schemes=[dummy_1, dummy_1]) + + # with unknown deprecated value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + deprecated=['md5_crypt']) + + # with unknown default value + self.assertRaises(KeyError, CryptPolicy, + schemes=['des_crypt'], + default='md5_crypt') + + def test_01_from_path_simple(self): + "test CryptPolicy.from_path() constructor" + # NOTE: this is separate so it can also run under GAE + + # test preset stored in existing file + path = self.sample_config_1s_path + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test if path missing + self.assertRaises(EnvironmentError, CryptPolicy.from_path, path + 'xxx') + + def test_01_from_path(self): + "test CryptPolicy.from_path() constructor with encodings" + path = self.mktemp() + + # test "\n" linesep + set_file(path, self.sample_config_1s) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + set_file(path, self.sample_config_1s.replace("\n","\r\n")) + policy = CryptPolicy.from_path(path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with custom encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + set_file(path, uc2) + policy = CryptPolicy.from_path(path, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + def test_02_from_string(self): + "test CryptPolicy.from_string() constructor" + # test "\n" linesep + policy = CryptPolicy.from_string(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test "\r\n" linesep + policy = CryptPolicy.from_string( + self.sample_config_1s.replace("\n","\r\n")) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with unicode + data = to_unicode(self.sample_config_1s) + policy = CryptPolicy.from_string(data) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test with non-ascii-compatible encoding + uc2 = to_bytes(self.sample_config_1s, "utf-16", source_encoding="utf-8") + policy = CryptPolicy.from_string(uc2, encoding="utf-16") + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # test category specific options + policy = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(policy.to_dict(), self.sample_config_4pd) + + def test_03_from_source(self): + "test CryptPolicy.from_source() constructor" + # pass it a path + policy = CryptPolicy.from_source(self.sample_config_1s_path) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a string + policy = CryptPolicy.from_source(self.sample_config_1s) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it a dict (NOTE: make a copy to detect in-place modifications) + policy = CryptPolicy.from_source(self.sample_config_1pd.copy()) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass it existing policy + p2 = CryptPolicy.from_source(policy) + self.assertIs(policy, p2) + + # pass it something wrong + self.assertRaises(TypeError, CryptPolicy.from_source, 1) + self.assertRaises(TypeError, CryptPolicy.from_source, []) + + def test_04_from_sources(self): + "test CryptPolicy.from_sources() constructor" + + # pass it empty list + self.assertRaises(ValueError, CryptPolicy.from_sources, []) + + # pass it one-element list + policy = CryptPolicy.from_sources([self.sample_config_1s]) + self.assertEqual(policy.to_dict(), self.sample_config_1pd) + + # pass multiple sources + policy = CryptPolicy.from_sources( + [ + self.sample_config_1s_path, + self.sample_config_2s, + self.sample_config_3pd, + ]) + self.assertEqual(policy.to_dict(), self.sample_config_123pd) + + def test_05_replace(self): + "test CryptPolicy.replace() constructor" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check overlaying sample 2 + p2 = p1.replace(**self.sample_config_2pd) + self.assertEqual(p2.to_dict(), self.sample_config_12pd) + + # check repeating overlay makes no change + p2b = p2.replace(**self.sample_config_2pd) + self.assertEqual(p2b.to_dict(), self.sample_config_12pd) + + # check overlaying sample 3 + p3 = p2.replace(self.sample_config_3pd) + self.assertEqual(p3.to_dict(), self.sample_config_123pd) + + def test_06_forbidden(self): + "test CryptPolicy() forbidden kwds" + + # salt not allowed to be set + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + des_crypt__salt="xx", + ) + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + all__salt="xx", + ) + + # schemes not allowed for category + self.assertRaises(KeyError, CryptPolicy, + schemes=["des_crypt"], + user__context__schemes=["md5_crypt"], + ) + + #=================================================================== + # reading + #=================================================================== + def test_10_has_schemes(self): + "test has_schemes() method" + + p1 = CryptPolicy(**self.sample_config_1pd) + self.assertTrue(p1.has_schemes()) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertTrue(not p3.has_schemes()) + + def test_11_iter_handlers(self): + "test iter_handlers() method" + + p1 = CryptPolicy(**self.sample_config_1pd) + s = self.sample_config_1prd['schemes'] + self.assertEqual(list(p1.iter_handlers()), s) + + p3 = CryptPolicy(**self.sample_config_3pd) + self.assertEqual(list(p3.iter_handlers()), []) + + def test_12_get_handler(self): + "test get_handler() method" + + p1 = CryptPolicy(**self.sample_config_1pd) + + # check by name + self.assertIs(p1.get_handler("bsdi_crypt"), hash.bsdi_crypt) + + # check by missing name + self.assertIs(p1.get_handler("sha256_crypt"), None) + self.assertRaises(KeyError, p1.get_handler, "sha256_crypt", required=True) + + # check default + self.assertIs(p1.get_handler(), hash.md5_crypt) + + def test_13_get_options(self): + "test get_options() method" + + p12 = CryptPolicy(**self.sample_config_12pd) + + self.assertEqual(p12.get_options("bsdi_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 29000, + max_rounds = 35000, + default_rounds = 31000, + )) + + self.assertEqual(p12.get_options("sha512_crypt"),dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds = 0.1, + min_rounds = 45000, + max_rounds = 50000, + )) + + p4 = CryptPolicy.from_string(self.sample_config_4s) + self.assertEqual(p4.get_options("sha512_crypt"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "user"), dict( + # NOTE: not maintaining backwards compat for rendering to "10%" + vary_rounds=0.1, + max_rounds=20000, + )) + + self.assertEqual(p4.get_options("sha512_crypt", "admin"), dict( + # NOTE: not maintaining backwards compat for rendering to "5%" + vary_rounds=0.05, + max_rounds=40000, + )) + + def test_14_handler_is_deprecated(self): + "test handler_is_deprecated() method" + pa = CryptPolicy(**self.sample_config_1pd) + pb = CryptPolicy(**self.sample_config_5pd) + + self.assertFalse(pa.handler_is_deprecated("des_crypt")) + self.assertFalse(pa.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pa.handler_is_deprecated("sha512_crypt")) + + self.assertTrue(pb.handler_is_deprecated("des_crypt")) + self.assertFalse(pb.handler_is_deprecated(hash.bsdi_crypt)) + self.assertFalse(pb.handler_is_deprecated("sha512_crypt")) + + # check categories as well + self.assertTrue(pb.handler_is_deprecated("des_crypt", "user")) + self.assertFalse(pb.handler_is_deprecated("bsdi_crypt", "user")) + self.assertTrue(pb.handler_is_deprecated("des_crypt", "admin")) + self.assertTrue(pb.handler_is_deprecated("bsdi_crypt", "admin")) + + # check deprecation is overridden per category + pc = CryptPolicy( + schemes=["md5_crypt", "des_crypt"], + deprecated=["md5_crypt"], + user__context__deprecated=["des_crypt"], + ) + self.assertTrue(pc.handler_is_deprecated("md5_crypt")) + self.assertFalse(pc.handler_is_deprecated("des_crypt")) + self.assertFalse(pc.handler_is_deprecated("md5_crypt", "user")) + self.assertTrue(pc.handler_is_deprecated("des_crypt", "user")) + + def test_15_min_verify_time(self): + "test get_min_verify_time() method" + # silence deprecation warnings for min verify time + warnings.filterwarnings("ignore", category=DeprecationWarning) + + pa = CryptPolicy() + self.assertEqual(pa.get_min_verify_time(), 0) + self.assertEqual(pa.get_min_verify_time('admin'), 0) + + pb = pa.replace(min_verify_time=.1) + self.assertEqual(pb.get_min_verify_time(), .1) + self.assertEqual(pb.get_min_verify_time('admin'), .1) + + pc = pa.replace(admin__context__min_verify_time=.2) + self.assertEqual(pc.get_min_verify_time(), 0) + self.assertEqual(pc.get_min_verify_time('admin'), .2) + + pd = pb.replace(admin__context__min_verify_time=.2) + self.assertEqual(pd.get_min_verify_time(), .1) + self.assertEqual(pd.get_min_verify_time('admin'), .2) + + #=================================================================== + # serialization + #=================================================================== + def test_20_iter_config(self): + "test iter_config() method" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config()), self.sample_config_5pd) + self.assertEqual(dict(p5.iter_config(resolve=True)), self.sample_config_5prd) + self.assertEqual(dict(p5.iter_config(ini=True)), self.sample_config_5pid) + + def test_21_to_dict(self): + "test to_dict() method" + p5 = CryptPolicy(**self.sample_config_5pd) + self.assertEqual(p5.to_dict(), self.sample_config_5pd) + self.assertEqual(p5.to_dict(resolve=True), self.sample_config_5prd) + + def test_22_to_string(self): + "test to_string() method" + pa = CryptPolicy(**self.sample_config_5pd) + s = pa.to_string() # NOTE: can't compare string directly, ordering etc may not match + pb = CryptPolicy.from_string(s) + self.assertEqual(pb.to_dict(), self.sample_config_5pd) + + s = pa.to_string(encoding="latin-1") + self.assertIsInstance(s, bytes) + + #=================================================================== + # + #=================================================================== + +#============================================================================= +# CryptContext +#============================================================================= +class CryptContextTest(TestCase): + "test CryptContext class" + descriptionPrefix = "CryptContext" + + def setUp(self): + TestCase.setUp(self) + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated.*") + warnings.filterwarnings("ignore", + r"The CryptContext ``policy`` keyword has been deprecated.*") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + warnings.filterwarnings("ignore", + r"the method.*hash_needs_update.*is deprecated") + + #=================================================================== + # constructor + #=================================================================== + def test_00_constructor(self): + "test constructor" + # create crypt context using handlers + cc = CryptContext([hash.md5_crypt, hash.bsdi_crypt, hash.des_crypt]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # create context using names + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + c,b,a = cc.policy.iter_handlers() + self.assertIs(a, hash.des_crypt) + self.assertIs(b, hash.bsdi_crypt) + self.assertIs(c, hash.md5_crypt) + + # policy kwd + policy = cc.policy + cc = CryptContext(policy=policy) + self.assertEqual(cc.to_dict(), policy.to_dict()) + + cc = CryptContext(policy=policy, default="bsdi_crypt") + self.assertNotEqual(cc.to_dict(), policy.to_dict()) + self.assertEqual(cc.to_dict(), dict(schemes=["md5_crypt","bsdi_crypt","des_crypt"], + default="bsdi_crypt")) + + self.assertRaises(TypeError, setattr, cc, 'policy', None) + self.assertRaises(TypeError, CryptContext, policy='x') + + def test_01_replace(self): + "test replace()" + + cc = CryptContext(["md5_crypt", "bsdi_crypt", "des_crypt"]) + self.assertIs(cc.policy.get_handler(), hash.md5_crypt) + + cc2 = cc.replace() + self.assertIsNot(cc2, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc2.policy, cc.policy) + + cc3 = cc.replace(default="bsdi_crypt") + self.assertIsNot(cc3, cc) + # NOTE: was not able to maintain backward compatibility with this... + ##self.assertIs(cc3.policy, cc.policy) + self.assertIs(cc3.policy.get_handler(), hash.bsdi_crypt) + + def test_02_no_handlers(self): + "test no handlers" + + # check constructor... + cc = CryptContext() + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.encrypt, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + # check updating policy after the fact... + cc = CryptContext(['md5_crypt']) + p = CryptPolicy(schemes=[]) + cc.policy = p + + self.assertRaises(KeyError, cc.identify, 'hash', required=True) + self.assertRaises(KeyError, cc.encrypt, 'secret') + self.assertRaises(KeyError, cc.verify, 'secret', 'hash') + + #=================================================================== + # policy adaptation + #=================================================================== + sample_policy_1 = dict( + schemes = [ "des_crypt", "md5_crypt", "phpass", "bsdi_crypt", + "sha256_crypt"], + deprecated = [ "des_crypt", ], + default = "sha256_crypt", + bsdi_crypt__max_rounds = 30, + bsdi_crypt__default_rounds = 25, + bsdi_crypt__vary_rounds = 0, + sha256_crypt__max_rounds = 3000, + sha256_crypt__min_rounds = 2000, + sha256_crypt__default_rounds = 3000, + phpass__ident = "H", + phpass__default_rounds = 7, + ) + + def test_12_hash_needs_update(self): + "test hash_needs_update() method" + cc = CryptContext(**self.sample_policy_1) + + # check deprecated scheme + self.assertTrue(cc.hash_needs_update('9XXD4trGYeGJA')) + self.assertFalse(cc.hash_needs_update('$1$J8HC2RCr$HcmM.7NxB2weSvlw2FgzU0')) + + # check min rounds + self.assertTrue(cc.hash_needs_update('$5$rounds=1999$jD81UCoo.zI.UETs$Y7qSTQ6mTiU9qZB4fRr43wRgQq4V.5AAf7F97Pzxey/')) + self.assertFalse(cc.hash_needs_update('$5$rounds=2000$228SSRje04cnNCaQ$YGV4RYu.5sNiBvorQDlO0WWQjyJVGKBcJXz3OtyQ2u8')) + + # check max rounds + self.assertFalse(cc.hash_needs_update('$5$rounds=3000$fS9iazEwTKi7QPW4$VasgBC8FqlOvD7x2HhABaMXCTh9jwHclPA9j5YQdns.')) + self.assertTrue(cc.hash_needs_update('$5$rounds=3001$QlFHHifXvpFX4PLs$/0ekt7lSs/lOikSerQ0M/1porEHxYq7W/2hdFpxA3fA')) + + #=================================================================== + # border cases + #=================================================================== + def test_30_nonstring_hash(self): + "test non-string hash values cause error" + # + # test hash=None or some other non-string causes TypeError + # and that explicit-scheme code path behaves the same. + # + cc = CryptContext(["des_crypt"]) + for hash, kwds in [ + (None, {}), + (None, {"scheme": "des_crypt"}), + (1, {}), + ((), {}), + ]: + + self.assertRaises(TypeError, cc.hash_needs_update, hash, **kwds) + + cc2 = CryptContext(["mysql323"]) + self.assertRaises(TypeError, cc2.hash_needs_update, None) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# LazyCryptContext +#============================================================================= +class dummy_2(uh.StaticHandler): + name = "dummy_2" + +class LazyCryptContextTest(TestCase): + descriptionPrefix = "LazyCryptContext" + + def setUp(self): + TestCase.setUp(self) + + # make sure this isn't registered before OR after + unload_handler_name("dummy_2") + self.addCleanup(unload_handler_name, "dummy_2") + + # silence some warnings + warnings.filterwarnings("ignore", + r"CryptContext\(\)\.replace\(\) has been deprecated") + warnings.filterwarnings("ignore", ".*(CryptPolicy|context\.policy).*(has|have) been deprecated.*") + + def test_kwd_constructor(self): + "test plain kwds" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + cc = LazyCryptContext(iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + + def test_callable_constructor(self): + "test create_policy() hook, returning CryptPolicy" + self.assertFalse(has_crypt_handler("dummy_2")) + register_crypt_handler_path("dummy_2", "passlib.tests.test_context") + + def create_policy(flag=False): + self.assertTrue(flag) + return CryptPolicy(schemes=iter(["dummy_2", "des_crypt"]), deprecated=["des_crypt"]) + + cc = LazyCryptContext(create_policy=create_policy, flag=True) + + self.assertFalse(has_crypt_handler("dummy_2", True)) + + self.assertTrue(cc.policy.handler_is_deprecated("des_crypt")) + self.assertEqual(cc.policy.schemes(), ["dummy_2", "des_crypt"]) + + self.assertTrue(has_crypt_handler("dummy_2", True)) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_ext_django.py b/passlib/tests/test_ext_django.py new file mode 100644 index 00000000..33a67385 --- /dev/null +++ b/passlib/tests/test_ext_django.py @@ -0,0 +1,976 @@ +"""test passlib.ext.django""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import sys +import warnings +# site +# pkg +from passlib.apps import django10_context, django14_context, django16_context +from passlib.context import CryptContext +import passlib.exc as exc +from passlib.utils.compat import iteritems, unicode, get_method_function, u, PY3 +from passlib.utils import memoized_property +from passlib.registry import get_crypt_handler +# tests +from passlib.tests.utils import TestCase, skipUnless, catch_warnings, TEST_MODE, has_active_backend +from passlib.tests.test_handlers import get_handler_case +# local + +#============================================================================= +# configure django settings for testcases +#============================================================================= +from passlib.ext.django.utils import DJANGO_VERSION + +# disable all Django integration tests under py3, +# since Django doesn't support py3 yet. +if PY3 and DJANGO_VERSION < (1,5): + DJANGO_VERSION = () + +# convert django version to some cheap flags +has_django = bool(DJANGO_VERSION) +has_django0 = has_django and DJANGO_VERSION < (1,0) +has_django1 = DJANGO_VERSION >= (1,0) +has_django14 = DJANGO_VERSION >= (1,4) + +# import and configure empty django settings +if has_django: + from django.conf import settings, LazySettings + + if not isinstance(settings, LazySettings): + # this probably means django globals have been configured already, + # which we don't want, since test cases reset and manipulate settings. + raise RuntimeError("expected django.conf.settings to be LazySettings: %r" % (settings,)) + + # else configure a blank settings instance for the unittests + if has_django0: + if settings._target is None: + from django.conf import UserSettingsHolder, global_settings + settings._target = UserSettingsHolder(global_settings) + elif not settings.configured: + settings.configure() + +#============================================================================= +# support funcs +#============================================================================= + +# flag for update_settings() to remove specified key entirely +UNSET = object() + +def update_settings(**kwds): + """helper to update django settings from kwds""" + for k,v in iteritems(kwds): + if v is UNSET: + if hasattr(settings, k): + if has_django0: + delattr(settings._target, k) + else: + delattr(settings, k) + else: + setattr(settings, k, v) + +if has_django: + from django.contrib.auth.models import User + + class FakeUser(User): + "mock user object for use in testing" + # NOTE: this mainly just overrides .save() to test commit behavior. + + @memoized_property + def saved_passwords(self): + return [] + + def pop_saved_passwords(self): + try: + return self.saved_passwords[:] + finally: + del self.saved_passwords[:] + + def save(self, update_fields=None): + # NOTE: ignoring update_fields for test purposes + self.saved_passwords.append(self.password) + +def create_mock_setter(): + state = [] + def setter(password): + state.append(password) + def popstate(): + try: + return state[:] + finally: + del state[:] + setter.popstate = popstate + return setter + +#============================================================================= +# work up stock django config +#============================================================================= +sample_hashes = {} # override sample hashes used in test cases +if DJANGO_VERSION >= (1,6): + stock_config = django16_context.to_dict() + stock_config.update( + deprecated="auto", + django_pbkdf2_sha1__default_rounds=12000, + django_pbkdf2_sha256__default_rounds=12000, + ) + sample_hashes.update( + django_pbkdf2_sha256=("not a password", "pbkdf2_sha256$12000$rpUPFQOVetrY$cEcWG4DjjDpLrDyXnduM+XJUz25U63RcM3//xaFnBnw="), + ) +elif DJANGO_VERSION >= (1,4): + stock_config = django14_context.to_dict() + stock_config.update( + deprecated="auto", + django_pbkdf2_sha1__default_rounds=10000, + django_pbkdf2_sha256__default_rounds=10000, + ) +elif DJANGO_VERSION >= (1,0): + stock_config = django10_context.to_dict() +else: + # 0.9.6 config + stock_config = dict( + schemes=["django_salted_sha1", "django_salted_md5", "hex_md5"], + deprecated=["hex_md5"] + ) + +#============================================================================= +# test utils +#============================================================================= +class _ExtensionSupport(object): + "support funcs for loading/unloading extension" + #=================================================================== + # support funcs + #=================================================================== + @classmethod + def _iter_patch_candidates(cls): + """helper to scan for monkeypatches. + + returns tuple containing: + * object (module or class) + * attribute of object + * value of attribute + * whether it should or should not be patched + """ + # XXX: this and assert_unpatched() could probably be refactored to use + # the PatchManager class to do the heavy lifting. + from django.contrib.auth import models + user_attrs = ["check_password", "set_password"] + model_attrs = ["check_password"] + objs = [(models, model_attrs), (models.User, user_attrs)] + if has_django14: + from django.contrib.auth import hashers + model_attrs.append("make_password") + objs.append((hashers, ["check_password", "make_password", + "get_hasher", "identify_hasher"])) + if has_django0: + user_attrs.extend(["has_usable_password", "set_unusable_password"]) + for obj, patched in objs: + for attr in dir(obj): + if attr.startswith("_"): + continue + value = obj.__dict__.get(attr, UNSET) # can't use getattr() due to GAE + if value is UNSET and attr not in patched: + continue + value = get_method_function(value) + source = getattr(value, "__module__", None) + if source: + yield obj, attr, source, (attr in patched) + + #=================================================================== + # verify current patch state + #=================================================================== + def assert_unpatched(self): + "test that django is in unpatched state" + # make sure we aren't currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertFalse(mod and mod._patched, "patch should not be enabled") + + # make sure no objects have been replaced, by checking __module__ + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source.startswith("django.contrib.auth."), + "obj=%r attr=%r was not reverted: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + def assert_patched(self, context=None): + "helper to ensure django HAS been patched, and is using specified config" + # make sure we're currently patched + mod = sys.modules.get("passlib.ext.django.models") + self.assertTrue(mod and mod._patched, "patch should have been enabled") + + # make sure only the expected objects have been patched + for obj, attr, source, patched in self._iter_patch_candidates(): + if patched: + self.assertTrue(source == "passlib.ext.django.models", + "obj=%r attr=%r should have been patched: %r" % + (obj, attr, source)) + else: + self.assertFalse(source.startswith("passlib."), + "obj=%r attr=%r should not have been patched: %r" % + (obj, attr, source)) + + # check context matches + if context is not None: + context = CryptContext._norm_source(context) + self.assertEqual(mod.password_context.to_dict(resolve=True), + context.to_dict(resolve=True)) + + #=================================================================== + # load / unload the extension (and verify it worked) + #=================================================================== + _config_keys = ["PASSLIB_CONFIG", "PASSLIB_CONTEXT", "PASSLIB_GET_CATEGORY"] + + def load_extension(self, check=True, **kwds): + "helper to load extension with specified config & patch django" + self.unload_extension() + if check: + config = kwds.get("PASSLIB_CONFIG") or kwds.get("PASSLIB_CONTEXT") + for key in self._config_keys: + kwds.setdefault(key, UNSET) + update_settings(**kwds) + import passlib.ext.django.models + if check: + self.assert_patched(context=config) + + def unload_extension(self): + "helper to remove patches and unload extension" + # remove patches and unload module + mod = sys.modules.get("passlib.ext.django.models") + if mod: + mod._remove_patch() + del sys.modules["passlib.ext.django.models"] + # wipe config from django settings + update_settings(**dict((key, UNSET) for key in self._config_keys)) + # check everything's gone + self.assert_unpatched() + + #=================================================================== + # eoc + #=================================================================== + +# XXX: rename to ExtensionFixture? +class _ExtensionTest(TestCase, _ExtensionSupport): + + def setUp(self): + super(_ExtensionTest, self).setUp() + + self.require_TEST_MODE("default") + + if not has_django: + raise self.skipTest("Django not installed") + + # reset to baseline, and verify it worked + self.unload_extension() + + # and do the same when the test exits + self.addCleanup(self.unload_extension) + +#============================================================================= +# extension tests +#============================================================================= +class DjangoBehaviorTest(_ExtensionTest): + "tests model to verify it matches django's behavior" + descriptionPrefix = "verify django behavior" + patched = False + config = stock_config + + # NOTE: if this test fails, it means we're not accounting for + # some part of django's hashing logic, or that this is + # running against an untested version of django with a new + # hashing policy. + + @property + def context(self): + return CryptContext._norm_source(self.config) + + def assert_unusable_password(self, user): + """check that user object is set to 'unusable password' constant""" + if DJANGO_VERSION >= (1,6): + # 1.6 on adds a random(?) suffix + self.assertTrue(user.password.startswith("!")) + else: + self.assertEqual(user.password, "!") + if has_django1 or self.patched: + self.assertFalse(user.has_usable_password()) + self.assertEqual(user.pop_saved_passwords(), []) + + def assert_valid_password(self, user, hash=UNSET, saved=None): + """check that user object has a usuable password hash. + + :param hash: optionally check it has this exact hash + :param saved: check that mock commit history + for user.password matches this list + """ + if hash is UNSET: + self.assertNotEqual(user.password, "!") + self.assertNotEqual(user.password, None) + else: + self.assertEqual(user.password, hash) + if has_django1 or self.patched: + self.assertTrue(user.has_usable_password()) + self.assertEqual(user.pop_saved_passwords(), + [] if saved is None else [saved]) + + def test_config(self): + """test hashing interface + + this function is run against both the actual django code, to + verify the assumptions of the unittests are correct; + and run against the passlib extension, to verify it matches + those assumptions. + """ + patched, config = self.patched, self.config + # this tests the following methods: + # User.set_password() + # User.check_password() + # make_password() -- 1.4 only + # check_password() + # identify_hasher() + # User.has_usable_password() + # User.set_unusable_password() + # XXX: this take a while to run. what could be trimmed? + + # TODO: get_hasher() + + #======================================================= + # setup helpers & imports + #======================================================= + ctx = self.context + setter = create_mock_setter() + PASS1 = "toomanysecrets" + WRONG1 = "letmein" + + has_hashers = False + has_identify_hasher = False + if has_django14: + from passlib.ext.django.utils import hasher_to_passlib_name, passlib_to_hasher_name + from django.contrib.auth.hashers import check_password, make_password, is_password_usable + if patched or DJANGO_VERSION > (1,5): + # identify_hasher() + # django 1.4 -- not present + # django 1.5 -- present (added in django ticket 18184) + # passlib integration -- present even under 1.4 + from django.contrib.auth.hashers import identify_hasher + has_identify_hasher = True + hash_hashers = True + else: + from django.contrib.auth.models import check_password + + #======================================================= + # make sure extension is configured correctly + #======================================================= + if patched: + # contexts should match + from passlib.ext.django.models import password_context + self.assertEqual(password_context.to_dict(resolve=True), + ctx.to_dict(resolve=True)) + + # should have patched both places + if has_django14: + from django.contrib.auth.models import check_password as check_password2 + self.assertIs(check_password2, check_password) + + #======================================================= + # default algorithm + #======================================================= + # User.set_password() should use default alg + user = FakeUser() + user.set_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, user.password)) + self.assert_valid_password(user) + + # User.check_password() - n/a + + # make_password() should use default alg + if has_django14: + hash = make_password(PASS1) + self.assertTrue(ctx.handler().verify(PASS1, hash)) + + # check_password() - n/a + + #======================================================= + # empty password behavior + #======================================================= + if (1,4) <= DJANGO_VERSION < (1,6): + # NOTE: django 1.4-1.5 treat empty password as invalid + + # User.set_password() should set unusable flag + user = FakeUser() + user.set_password('') + self.assert_unusable_password(user) + + # User.check_password() should never return True + user = FakeUser() + user.password = hash = ctx.encrypt("") + self.assertFalse(user.check_password("")) + self.assert_valid_password(user, hash) + + # make_password() should reject empty passwords + self.assertEqual(make_password(""), "!") + + # check_password() should never return True + self.assertFalse(check_password("", hash)) + + else: + # User.set_password() should use default alg + user = FakeUser() + user.set_password('') + hash = user.password + self.assertTrue(ctx.handler().verify('', hash)) + self.assert_valid_password(user, hash) + + # User.check_password() should return True + self.assertTrue(user.check_password("")) + self.assert_valid_password(user, hash) + + # no make_password() + + # check_password() should return True + self.assertTrue(check_password("", hash)) + + #======================================================= + # 'unusable flag' behavior + #======================================================= + if has_django1 or patched: + + # sanity check via user.set_unusable_password() + user = FakeUser() + user.set_unusable_password() + self.assert_unusable_password(user) + + # ensure User.set_password() sets unusable flag + user = FakeUser() + user.set_password(None) + if DJANGO_VERSION < (1,2): + # would set password to hash of "None" + self.assert_valid_password(user) + else: + self.assert_unusable_password(user) + + # User.check_password() should always fail + if DJANGO_VERSION < (1,2): + self.assertTrue(user.check_password(None)) + self.assertTrue(user.check_password('None')) + self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(PASS1)) + self.assertFalse(user.check_password(WRONG1)) + else: + self.assertFalse(user.check_password(None)) + self.assertFalse(user.check_password('None')) + self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(PASS1)) + self.assertFalse(user.check_password(WRONG1)) + self.assert_unusable_password(user) + + # make_password() should also set flag + if has_django14: + if DJANGO_VERSION >= (1,6): + self.assertTrue(make_password(None).startswith("!")) + else: + self.assertEqual(make_password(None), "!") + + # check_password() should return False (didn't handle disabled under 1.3) + if has_django14 or patched: + self.assertFalse(check_password(PASS1, '!')) + + # identify_hasher() and is_password_usable() should reject it + if has_django14: + self.assertFalse(is_password_usable(user.password)) + if has_identify_hasher: + self.assertRaises(ValueError, identify_hasher, user.password) + + #======================================================= + # hash=None + #======================================================= + # User.set_password() - n/a + + # User.check_password() - returns False + user = FakeUser() + user.password = None + if has_django14 or patched: + self.assertFalse(user.check_password(PASS1)) + else: + self.assertRaises(TypeError, user.check_password, PASS1) + if has_django1 or patched: + if DJANGO_VERSION < (1,2): + self.assertTrue(user.has_usable_password()) + else: + self.assertFalse(user.has_usable_password()) + + # make_password() - n/a + + # check_password() - error + if has_django14 or patched: + self.assertFalse(check_password(PASS1, None)) + else: + self.assertRaises(AttributeError, check_password, PASS1, None) + + # identify_hasher() - error + if has_identify_hasher: + self.assertRaises(TypeError, identify_hasher, None) + + #======================================================= + # empty & invalid hash values + # NOTE: django 1.5 behavior change due to django ticket 18453 + # NOTE: passlib integration tries to match current django version + #======================================================= + for hash in ("", # empty hash + "$789$foo", # empty identifier + ): + # User.set_password() - n/a + + # User.check_password() + # empty + # ----- + # django 1.3 and earlier -- blank hash returns False + # django 1.4 -- blank threw error (fixed in 1.5) + # django 1.5 -- blank hash returns False + # + # invalid + # ------- + # django 1.4 and earlier -- invalid hash threw error (fixed in 1.5) + # django 1.5 -- invalid hash returns False + user = FakeUser() + user.password = hash + if DJANGO_VERSION >= (1,5) or (not hash and DJANGO_VERSION < (1,4)): + # returns False for hash + self.assertFalse(user.check_password(PASS1)) + else: + # throws error for hash + self.assertRaises(ValueError, user.check_password, PASS1) + + # verify hash wasn't changed/upgraded during check_password() call + self.assertEqual(user.password, hash) + self.assertEqual(user.pop_saved_passwords(), []) + + # User.has_usable_password() + # passlib shim for django 0.x -- invalid/empty usable, to match 1.0-1.4 + # django 1.0-1.4 -- invalid/empty usable (fixed in 1.5) + # django 1.5 -- invalid/empty no longer usable + if has_django1 or self.patched: + if DJANGO_VERSION < (1,5): + self.assertTrue(user.has_usable_password()) + else: + self.assertFalse(user.has_usable_password()) + + # make_password() - n/a + + # check_password() + # django 1.4 and earlier -- invalid/empty hash threw error (fixed in 1.5) + # django 1.5 -- invalid/empty hash now returns False + if DJANGO_VERSION < (1,5): + self.assertRaises(ValueError, check_password, PASS1, hash) + else: + self.assertFalse(check_password(PASS1, hash)) + + # identify_hasher() - throws error + if has_identify_hasher: + self.assertRaises(ValueError, identify_hasher, hash) + + #======================================================= + # run through all the schemes in the context, + # testing various bits of per-scheme behavior. + #======================================================= + for scheme in ctx.schemes(): + #------------------------------------------------------- + # setup constants & imports, pick a sample secret/hash combo + #------------------------------------------------------- + handler = ctx.handler(scheme) + deprecated = ctx._is_deprecated_scheme(scheme) + assert not deprecated or scheme != ctx.default_scheme() + try: + testcase = get_handler_case(scheme) + except exc.MissingBackendError: + assert scheme == "bcrypt" + continue + assert testcase.handler is handler + if testcase.is_disabled_handler: + continue + if not has_active_backend(handler): + assert scheme == "django_bcrypt" + continue + try: + secret, hash = sample_hashes[scheme] + except KeyError: + while True: + secret, hash = testcase('setUp').get_sample_hash() + if secret: # don't select blank passwords, especially under django 1.4/1.5 + break + other = 'dontletmein' + + # User.set_password() - n/a + + #------------------------------------------------------- + # User.check_password()+migration against known hash + #------------------------------------------------------- + user = FakeUser() + user.password = hash + + # check against invalid password + if has_django1 or patched: + self.assertFalse(user.check_password(None)) + else: + self.assertRaises(TypeError, user.check_password, None) + ##self.assertFalse(user.check_password('')) + self.assertFalse(user.check_password(other)) + self.assert_valid_password(user, hash) + + # check against valid password + if has_django0 and isinstance(secret, unicode): + secret = secret.encode("utf-8") + self.assertTrue(user.check_password(secret)) + + # check if it upgraded the hash + # NOTE: needs_update kept separate in case we need to test rounds. + needs_update = deprecated + if needs_update: + self.assertNotEqual(user.password, hash) + self.assertFalse(handler.identify(user.password)) + self.assertTrue(ctx.handler().verify(secret, user.password)) + self.assert_valid_password(user, saved=user.password) + else: + self.assert_valid_password(user, hash) + + # don't need to check rest for most deployments + if TEST_MODE(max="default"): + continue + + #------------------------------------------------------- + # make_password() correctly selects algorithm + #------------------------------------------------------- + if has_django14: + hash2 = make_password(secret, hasher=passlib_to_hasher_name(scheme)) + self.assertTrue(handler.verify(secret, hash2)) + + #------------------------------------------------------- + # check_password()+setter against known hash + #------------------------------------------------------- + if has_django14 or patched: + # should call setter only if it needs_update + self.assertTrue(check_password(secret, hash, setter=setter)) + self.assertEqual(setter.popstate(), [secret] if needs_update else []) + + # should not call setter + self.assertFalse(check_password(other, hash, setter=setter)) + self.assertEqual(setter.popstate(), []) + + ### check preferred kwd is ignored (django 1.4 feature we don't support) + ##self.assertTrue(check_password(secret, hash, setter=setter, preferred='fooey')) + ##self.assertEqual(setter.popstate(), [secret]) + + elif patched or scheme != "hex_md5": + # django 1.3 never called check_password() for hex_md5 + self.assertTrue(check_password(secret, hash)) + self.assertFalse(check_password(other, hash)) + + # TODO: get_hasher() + + #------------------------------------------------------- + # identify_hasher() recognizes known hash + #------------------------------------------------------- + if has_identify_hasher: + self.assertTrue(is_password_usable(hash)) + name = hasher_to_passlib_name(identify_hasher(hash).algorithm) + self.assertEqual(name, scheme) + +class ExtensionBehaviorTest(DjangoBehaviorTest): + "test model to verify passlib.ext.django conforms to it" + descriptionPrefix = "verify extension behavior" + patched = True + config = dict( + schemes="sha256_crypt,md5_crypt,des_crypt", + deprecated="des_crypt", + ) + + def setUp(self): + super(ExtensionBehaviorTest, self).setUp() + self.load_extension(PASSLIB_CONFIG=self.config) + +class DjangoExtensionTest(_ExtensionTest): + """test the ``passlib.ext.django`` plugin""" + descriptionPrefix = "passlib.ext.django plugin" + + #=================================================================== + # monkeypatch testing + #=================================================================== + def test_00_patch_control(self): + "test set_django_password_context patch/unpatch" + + # check config="disabled" + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + # check legacy config=None + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # try stock django 1.0 context + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(context=django10_context) + + # try to remove patch + self.unload_extension() + + # patch to use stock django 1.4 context + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(context=django14_context) + + # try to remove patch again + self.unload_extension() + + def test_01_overwrite_detection(self): + "test detection of foreign monkeypatching" + # NOTE: this sets things up, and spot checks two methods, + # this should be enough to verify patch manager is working. + # TODO: test unpatch behavior honors flag. + + # configure plugin to use sample context + config = "[passlib]\nschemes=des_crypt\n" + self.load_extension(PASSLIB_CONFIG=config) + + # setup helpers + import django.contrib.auth.models as models + from passlib.ext.django.models import _manager + def dummy(): + pass + + # mess with User.set_password, make sure it's detected + orig = models.User.set_password + models.User.set_password = dummy + with self.assertWarningList("another library has patched.*User\.set_password"): + _manager.check_all() + models.User.set_password = orig + + # mess with models.check_password, make sure it's detected + orig = models.check_password + models.check_password = dummy + with self.assertWarningList("another library has patched.*models:check_password"): + _manager.check_all() + models.check_password = orig + + def test_02_handler_wrapper(self): + "test Hasher-compatible handler wrappers" + if not has_django14: + raise self.skipTest("Django >= 1.4 not installed") + from passlib.ext.django.utils import get_passlib_hasher + from django.contrib.auth import hashers + + # should return native django hasher if available + hasher = get_passlib_hasher("hex_md5") + self.assertIsInstance(hasher, hashers.UnsaltedMD5PasswordHasher) + + hasher = get_passlib_hasher("django_bcrypt") + self.assertIsInstance(hasher, hashers.BCryptPasswordHasher) + + # otherwise should return wrapper + from passlib.hash import sha256_crypt + hasher = get_passlib_hasher("sha256_crypt") + self.assertEqual(hasher.algorithm, "passlib_sha256_crypt") + + # and wrapper should return correct hash + encoded = hasher.encode("stub") + self.assertTrue(sha256_crypt.verify("stub", encoded)) + self.assertTrue(hasher.verify("stub", encoded)) + self.assertFalse(hasher.verify("xxxx", encoded)) + + # test wrapper accepts options + encoded = hasher.encode("stub", "abcd"*4, iterations=1234) + self.assertEqual(encoded, "$5$rounds=1234$abcdabcdabcdabcd$" + "v2RWkZQzctPdejyRqmmTDQpZN6wTh7.RUy9zF2LftT6") + self.assertEqual(hasher.safe_summary(encoded), + {'algorithm': 'sha256_crypt', + 'salt': u('abcdab**********'), + 'iterations': 1234, + 'hash': u('v2RWkZ*************************************'), + }) + + #=================================================================== + # PASSLIB_CONFIG settings + #=================================================================== + def test_11_config_disabled(self): + "test PASSLIB_CONFIG='disabled'" + # test config=None (deprecated) + with self.assertWarningList("PASSLIB_CONFIG=None is deprecated"): + self.load_extension(PASSLIB_CONFIG=None, check=False) + self.assert_unpatched() + + # test disabled config + self.load_extension(PASSLIB_CONFIG="disabled", check=False) + self.assert_unpatched() + + def test_12_config_presets(self): + "test PASSLIB_CONFIG=''" + # test django presets + self.load_extension(PASSLIB_CONTEXT="django-default", check=False) + if DJANGO_VERSION >= (1,6): + ctx = django16_context + elif DJANGO_VERSION >= (1,4): + ctx = django14_context + else: + ctx = django10_context + self.assert_patched(ctx) + + self.load_extension(PASSLIB_CONFIG="django-1.0", check=False) + self.assert_patched(django10_context) + + self.load_extension(PASSLIB_CONFIG="django-1.4", check=False) + self.assert_patched(django14_context) + + def test_13_config_defaults(self): + "test PASSLIB_CONFIG default behavior" + # check implicit default + from passlib.ext.django.utils import PASSLIB_DEFAULT + default = CryptContext.from_string(PASSLIB_DEFAULT) + self.load_extension() + self.assert_patched(PASSLIB_DEFAULT) + + # check default preset + self.load_extension(PASSLIB_CONTEXT="passlib-default", check=False) + self.assert_patched(PASSLIB_DEFAULT) + + # check explicit string + self.load_extension(PASSLIB_CONTEXT=PASSLIB_DEFAULT, check=False) + self.assert_patched(PASSLIB_DEFAULT) + + def test_14_config_invalid(self): + "test PASSLIB_CONFIG type checks" + update_settings(PASSLIB_CONTEXT=123, PASSLIB_CONFIG=UNSET) + self.assertRaises(TypeError, __import__, 'passlib.ext.django.models') + + self.unload_extension() + update_settings(PASSLIB_CONFIG="missing-preset", PASSLIB_CONTEXT=UNSET) + self.assertRaises(ValueError, __import__, 'passlib.ext.django.models') + + #=================================================================== + # PASSLIB_GET_CATEGORY setting + #=================================================================== + def test_21_category_setting(self): + "test PASSLIB_GET_CATEGORY parameter" + # define config where rounds can be used to detect category + config = dict( + schemes = ["sha256_crypt"], + sha256_crypt__default_rounds = 1000, + staff__sha256_crypt__default_rounds = 2000, + superuser__sha256_crypt__default_rounds = 3000, + ) + from passlib.hash import sha256_crypt + + def run(**kwds): + "helper to take in user opts, return rounds used in password" + user = FakeUser(**kwds) + user.set_password("stub") + return sha256_crypt.from_string(user.password).rounds + + # test default get_category + self.load_extension(PASSLIB_CONFIG=config) + self.assertEqual(run(), 1000) + self.assertEqual(run(is_staff=True), 2000) + self.assertEqual(run(is_superuser=True), 3000) + + # test patch uses explicit get_category function + def get_category(user): + return user.first_name or None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff'), 2000) + self.assertEqual(run(first_name='superuser'), 3000) + + # test patch can disable get_category entirely + def get_category(user): + return None + self.load_extension(PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY=get_category) + self.assertEqual(run(), 1000) + self.assertEqual(run(first_name='other'), 1000) + self.assertEqual(run(first_name='staff', is_staff=True), 1000) + self.assertEqual(run(first_name='superuser', is_superuser=True), 1000) + + # test bad value + self.assertRaises(TypeError, self.load_extension, PASSLIB_CONTEXT=config, + PASSLIB_GET_CATEGORY='x') + + #=================================================================== + # eoc + #=================================================================== + +from passlib.context import CryptContext +class ContextWithHook(CryptContext): + """subclass which invokes update_hook(self) before major actions""" + + @staticmethod + def update_hook(self): + pass + + def encrypt(self, *args, **kwds): + self.update_hook(self) + return super(ContextWithHook, self).encrypt(*args, **kwds) + + def verify(self, *args, **kwds): + self.update_hook(self) + return super(ContextWithHook, self).verify(*args, **kwds) + +# hack up the some of the real django tests to run w/ extension loaded, +# to ensure we mimic their behavior. +if has_django14: + from passlib.tests.utils import patchAttr + if DJANGO_VERSION >= (1,6): + from django.contrib.auth.tests import test_hashers as _thmod + else: + from django.contrib.auth.tests import hashers as _thmod + + class HashersTest(_thmod.TestUtilsHashPass, _ExtensionSupport): + """run django's hasher unittests against passlib's extension + and workalike implementations""" + def setUp(self): + # NOTE: omitted orig setup, want to install our extension, + # and load hashers through it instead. + self.load_extension(PASSLIB_CONTEXT=stock_config, check=False) + from passlib.ext.django.models import password_context + + # update test module to use our versions of some hasher funcs + from django.contrib.auth import hashers + for attr in ["make_password", + "check_password", + "identify_hasher", + "get_hasher"]: + patchAttr(self, _thmod, attr, getattr(hashers, attr)) + + # django 1.5 tests expect empty django_des_crypt salt field + if DJANGO_VERSION > (1,4): + from passlib.hash import django_des_crypt + patchAttr(self, django_des_crypt, "use_duplicate_salt", False) + + # hack: need password_context to keep up to date with hasher.iterations + if DJANGO_VERSION >= (1,6): + def update_hook(self): + rounds = _thmod.get_hasher("pbkdf2_sha256").iterations + self.update( + django_pbkdf2_sha256__min_rounds=rounds, + django_pbkdf2_sha256__default_rounds=rounds, + django_pbkdf2_sha256__max_rounds=rounds, + ) + patchAttr(self, password_context, "__class__", ContextWithHook) + patchAttr(self, password_context, "update_hook", update_hook) + + # omitting this test, since it depends on updated to django hasher settings + test_pbkdf2_upgrade_new_hasher = lambda self: self.skipTest("omitted by passlib") + + def tearDown(self): + self.unload_extension() + super(HashersTest, self).tearDown() + + HashersTest = skipUnless(TEST_MODE("default"), + "requires >= 'default' test mode")(HashersTest) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_handlers.py b/passlib/tests/test_handlers.py new file mode 100644 index 00000000..d300a848 --- /dev/null +++ b/passlib/tests/test_handlers.py @@ -0,0 +1,2217 @@ +"""passlib.tests.test_handlers - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +import logging; log = logging.getLogger(__name__) +import os +import sys +import warnings +# site +# pkg +from passlib import hash +from passlib.utils import repeat_string +from passlib.utils.compat import irange, PY3, u, get_method_function +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, \ + TEST_MODE, b, catch_warnings, UserHandlerMixin, randintgauss, EncodingHandlerMixin +# module + +#============================================================================= +# constants & support +#============================================================================= + +# some common unicode passwords which used as test cases +UPASS_WAV = u('\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2') +UPASS_USD = u("\u20AC\u00A5$") +UPASS_TABLE = u("t\u00e1\u0411\u2113\u0259") + +PASS_TABLE_UTF8 = b('t\xc3\xa1\xd0\x91\xe2\x84\x93\xc9\x99') # utf-8 + +def get_handler_case(scheme): + "return HandlerCase instance for scheme, used by other tests" + from passlib.registry import get_crypt_handler + handler = get_crypt_handler(scheme) + if hasattr(handler, "backends") and not hasattr(handler, "wrapped") and handler.name != "django_bcrypt_sha256": + backend = handler.get_backend() + name = "%s_%s_test" % (scheme, backend) + else: + name = "%s_test" % scheme + try: + return globals()[name] + except KeyError: + pass + for suffix in ("handlers_django", "handlers_bcrypt"): + modname = "passlib.tests.test_" + suffix + __import__(modname) + mod = sys.modules[modname] + try: + return getattr(mod, name) + except AttributeError: + pass + raise KeyError("test case %r not found" % name) + +#============================================================================= +# apr md5 crypt +#============================================================================= +class apr_md5_crypt_test(HandlerCase): + handler = hash.apr_md5_crypt + + known_correct_hashes = [ + # + # http://httpd.apache.org/docs/2.2/misc/password_encryptions.html + # + ('myPassword', '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA/'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$apr1$bzYrOHUx$a1FcpXuQDJV3vPY20CS6N1'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash ----\/ + '$apr1$r31.....$HqJZimcKQFAMYayBlzkrA!' + ] + +#============================================================================= +# bigcrypt +#============================================================================= +class bigcrypt_test(HandlerCase): + handler = hash.bigcrypt + + # TODO: find an authoritative source of test vectors + known_correct_hashes = [ + + # + # various docs & messages on the web. + # + ("passphrase", "qiyh4XPJGsOZ2MEAyLkfWqeQ"), + ("This is very long passwd", "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5c"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'SEChBAyMbMNhgGLyP7kD1HZU'), + ] + + known_unidentified_hashes = [ + # one char short (10 % 11) + "qiyh4XPJGsOZ2MEAyLkfWqe" + + # one char too many (1 % 11) + "f8.SVpL2fvwjkAnxn8/rgTkwvrif6bjYB5cd" + ] + + # omit des_crypt from known_other since it's a valid bigcrypt hash too. + known_other_hashes = [row for row in HandlerCase.known_other_hashes + if row[0] != "des_crypt"] + + def test_90_internal(self): + # check that _norm_checksum() also validates checksum size. + # (current code uses regex in parser) + self.assertRaises(ValueError, hash.bigcrypt, use_defaults=True, + checksum=u('yh4XPJGsOZ')) + +#============================================================================= +# bsdi crypt +#============================================================================= +class _bsdi_crypt_test(HandlerCase): + "test BSDiCrypt algorithm" + handler = hash.bsdi_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '_J9..CCCCXBrJUJV154M'), + ('U*U***U', '_J9..CCCCXUhOBTXzaiE'), + ('U*U***U*', '_J9..CCCC4gQ.mB/PffM'), + ('*U*U*U*U', '_J9..XXXXvlzQGqpPPdk'), + ('*U*U*U*U*', '_J9..XXXXsqM/YSSP..Y'), + ('*U*U*U*U*U*U*U*U', '_J9..XXXXVL7qJCnku0I'), + ('*U*U*U*U*U*U*U*U*', '_J9..XXXXAj8cFbP5scI'), + ('ab1234567', '_J9..SDizh.vll5VED9g'), + ('cr1234567', '_J9..SDizRjWQ/zePPHc'), + ('zxyDPWgydbQjgq', '_J9..SDizxmRI1GjnQuE'), + ('726 even', '_K9..SaltNrQgIYUAeoY'), + ('', '_J9..SDSD5YGyRCr4W4c'), + + # + # custom + # + (" ", "_K1..crsmZxOLzfJH8iw"), + ("my", '_KR/.crsmykRplHbAvwA'), # <-- to detect old 12-bit rounds bug + ("my socra", "_K1..crsmf/9NzZr1fLM"), + ("my socrates", '_K1..crsmOv1rbde9A9o'), + ("my socrates note", "_K1..crsm/2qeAhdISMA"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '_7C/.ABw0WIKy0ILVqo2'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + # \/ + "_K1.!crsmZxOLzfJH8iw" + ] + + platform_crypt_support = [ + ("freebsd|openbsd|netbsd|darwin", True), + ("linux|solaris", False), + ] + + def setUp(self): + super(_bsdi_crypt_test, self).setUp() + warnings.filterwarnings("ignore", "bsdi_crypt rounds should be odd.*") + +bsdi_crypt_os_crypt_test, bsdi_crypt_builtin_test = \ + _bsdi_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# cisco pix +#============================================================================= +class cisco_pix_test(UserHandlerMixin, HandlerCase): + handler = hash.cisco_pix + secret_size = 16 + requires_user = False + + known_correct_hashes = [ + # + # http://www.perlmonks.org/index.pl?node_id=797623 + # + ("cisco", "2KFQnbNIdI.2KYOU"), + + # + # http://www.hsc.fr/ressources/breves/pix_crack.html.en + # + ("hsc", "YtT8/k6Np8F1yz2c"), + + # + # www.freerainbowtables.com/phpBB3/viewtopic.php?f=2&t=1441 + # + ("", "8Ry2YjIyt7RRXU24"), + (("cisco", "john"), "hN7LzeyYjw12FSIU"), + (("cisco", "jack"), "7DrfeZ7cyOj/PslD"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/2529 + # + (("ripper", "alex"), "h3mJrcH0901pqX/m"), + (("cisco", "cisco"), "3USUcOPFUiMCO4Jk"), + (("cisco", "cisco1"), "3USUcOPFUiMCO4Jk"), + (("CscFw-ITC!", "admcom"), "lZt7HSIXw3.QP7.R"), + ("cangetin", "TynyB./ftknE77QP"), + (("cangetin", "rramsey"), "jgBZqYtsWfGcUKDi"), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (("phonehome", "rharris"), "zyIIMSYjiPm0L7a6"), + + # + # from JTR 1.7.9 + # + ("test1", "TRPEas6f/aa6JSPL"), + ("test2", "OMT6mXmAvGyzrCtp"), + ("test3", "gTC7RIy1XJzagmLm"), + ("test4", "oWC1WRwqlBlbpf/O"), + ("password", "NuLKvvWGg.x9HEKO"), + ("0123456789abcdef", ".7nfVBEIEu4KbF/1"), + + # + # custom + # + (("cisco1", "cisco1"), "jmINXNH6p1BxUppp"), + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'CaiIvkLMu2TOHXGT'), + ] + +#============================================================================= +# cisco type 7 +#============================================================================= +class cisco_type7_test(HandlerCase): + handler = hash.cisco_type7 + salt_bits = 4 + salt_type = int + + known_correct_hashes = [ + # + # http://mccltd.net/blog/?p=1034 + # + ("secure ", "04480E051A33490E"), + + # + # http://insecure.org/sploits/cisco.passwords.html + # + ("Its time to go to lunch!", + "153B1F1F443E22292D73212D5300194315591954465A0D0B59"), + + # + # http://blog.ioshints.info/2007/11/type-7-decryption-in-cisco-ios.html + # + ("t35t:pa55w0rd", "08351F1B1D431516475E1B54382F"), + + # + # http://www.m00nie.com/2011/09/cisco-type-7-password-decryption-and-encryption-with-perl/ + # + ("hiImTesting:)", "020E0D7206320A325847071E5F5E"), + + # + # http://packetlife.net/forums/thread/54/ + # + ("cisco123", "060506324F41584B56"), + ("cisco123", "1511021F07257A767B"), + + # + # source ? + # + ('Supe&8ZUbeRp4SS', "06351A3149085123301517391C501918"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '0958EDC8A9F495F6F8A5FD'), + ] + + known_unidentified_hashes = [ + # salt with hex value + "0A480E051A33490E", + + # salt value > 52. this may in fact be valid, but we reject it for now + # (see docs for more). + '99400E4812', + ] + + def test_90_decode(self): + "test cisco_type7.decode()" + from passlib.utils import to_unicode, to_bytes + + handler = self.handler + for secret, hash in self.known_correct_hashes: + usecret = to_unicode(secret) + bsecret = to_bytes(secret) + self.assertEqual(handler.decode(hash), usecret) + self.assertEqual(handler.decode(hash, None), bsecret) + + self.assertRaises(UnicodeDecodeError, handler.decode, + '0958EDC8A9F495F6F8A5FD', 'ascii') + + def test_91_salt(self): + "test salt value border cases" + handler = self.handler + self.assertRaises(TypeError, handler, salt=None) + handler(salt=None, use_defaults=True) + self.assertRaises(TypeError, handler, salt='abc') + self.assertRaises(ValueError, handler, salt=-10) + with self.assertWarningList("salt/offset must be.*"): + h = handler(salt=100, relaxed=True) + self.assertEqual(h.salt, 52) + +#============================================================================= +# crypt16 +#============================================================================= +class crypt16_test(HandlerCase): + handler = hash.crypt16 + secret_size = 16 + + # TODO: find an authortative source of test vectors + known_correct_hashes = [ + # + # from messages around the web, including + # http://seclists.org/bugtraq/1999/Mar/76 + # + ("passphrase", "qi8H8R7OM4xMUNMPuRAZxlY."), + ("printf", "aaCjFz4Sh8Eg2QSqAReePlq6"), + ("printf", "AA/xje2RyeiSU0iBY3PDwjYo"), + ("LOLOAQICI82QB4IP", "/.FcK3mad6JwYt8LVmDqz9Lc"), + ("LOLOAQICI", "/.FcK3mad6JwYSaRHJoTPzY2"), + ("LOLOAQIC", "/.FcK3mad6JwYelhbtlysKy6"), + ("L", "/.CIu/PzYCkl6elhbtlysKy6"), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, 'YeDc9tKkkmDvwP7buzpwhoqQ'), + ] + +#============================================================================= +# des crypt +#============================================================================= +class _des_crypt_test(HandlerCase): + "test des-crypt algorithm" + handler = hash.des_crypt + secret_size = 8 + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', 'CCNf8Sbh3HDfQ'), + ('U*U***U', 'CCX.K.MFy4Ois'), + ('U*U***U*', 'CC4rMpbg9AMZ.'), + ('*U*U*U*U', 'XXxzOu6maQKqQ'), + ('', 'SDbsugeBiC58A'), + + # + # custom + # + ('', 'OgAwTx2l6NADI'), + (' ', '/Hk.VPuwQTXbc'), + ('test', 'N1tQbOFcM5fpg'), + ('Compl3X AlphaNu3meric', 'um.Wguz3eVCx2'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', 'sNYqfOyauIyic'), + ('AlOtBsOl', 'cEpWz5IUCShqM'), + + # ensures utf-8 used for unicode + (u('hell\u00D6'), 'saykDgk3BPZ9E'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correctly formatted hash + #\/ + '!gAwTx2l6NADI', + + # wrong size + 'OgAwTx2l6NAD', + 'OgAwTx2l6NADIj', + ] + + platform_crypt_support = [ + ("freebsd|openbsd|netbsd|linux|solaris|darwin", True), + ] + +des_crypt_os_crypt_test, des_crypt_builtin_test = \ + _des_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# fshp +#============================================================================= +class fshp_test(HandlerCase): + "test fshp algorithm" + handler = hash.fshp + + known_correct_hashes = [ + # + # test vectors from FSHP reference implementation + # https://github.com/bdd/fshp-is-not-secure-anymore/blob/master/python/test.py + # + ('test', '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M='), + + ('test', + '{FSHP1|8|4096}MTIzNDU2NzjTdHcmoXwNc0f' + 'f9+ArUHoN0CvlbPZpxFi1C6RDM/MHSA==' + ), + + ('OrpheanBeholderScryDoubt', + '{FSHP1|8|4096}GVSUFDAjdh0vBosn1GUhz' + 'GLHP7BmkbCZVH/3TQqGIjADXpc+6NCg3g==' + ), + ('ExecuteOrder66', + '{FSHP3|16|8192}0aY7rZQ+/PR+Rd5/I9ss' + 'RM7cjguyT8ibypNaSp/U1uziNO3BVlg5qPU' + 'ng+zHUDQC3ao/JbzOnIBUtAeWHEy7a2vZeZ' + '7jAwyJJa2EqOsq4Io=' + ), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{FSHP1|16|16384}9v6/l3Lu/d9by5nznpOS' + 'cqQo8eKu/b/CKli3RCkgYg4nRTgZu5y659YV8cCZ68UL'), + ] + + known_unidentified_hashes = [ + # incorrect header + '{FSHX0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + 'FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + known_malformed_hashes = [ + # bad base64 padding + '{FSHP0|0|1}qUqP5cyxm6YcTAhz05Hph5gvu9M', + + # wrong salt size + '{FSHP0|1|1}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + + # bad rounds + '{FSHP0|0|A}qUqP5cyxm6YcTAhz05Hph5gvu9M=', + ] + + def test_90_variant(self): + "test variant keyword" + handler = self.handler + kwds = dict(salt=b('a'), rounds=1) + + # accepts ints + handler(variant=1, **kwds) + + # accepts bytes or unicode + handler(variant=u('1'), **kwds) + handler(variant=b('1'), **kwds) + + # aliases + handler(variant=u('sha256'), **kwds) + handler(variant=b('sha256'), **kwds) + + # rejects None + self.assertRaises(TypeError, handler, variant=None, **kwds) + + # rejects other types + self.assertRaises(TypeError, handler, variant=complex(1,1), **kwds) + + # invalid variant + self.assertRaises(ValueError, handler, variant='9', **kwds) + self.assertRaises(ValueError, handler, variant=9, **kwds) + +#============================================================================= +# hex digests +#============================================================================= +class hex_md4_test(HandlerCase): + handler = hash.hex_md4 + known_correct_hashes = [ + ("password", '8a9d093f14f8701df17732b2bb182c74'), + (UPASS_TABLE, '876078368c47817ce5f9115f3a42cf74'), + ] + +class hex_md5_test(HandlerCase): + handler = hash.hex_md5 + known_correct_hashes = [ + ("password", '5f4dcc3b5aa765d61d8327deb882cf99'), + (UPASS_TABLE, '05473f8a19f66815e737b33264a0d0b0'), + ] + +class hex_sha1_test(HandlerCase): + handler = hash.hex_sha1 + known_correct_hashes = [ + ("password", '5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8'), + (UPASS_TABLE, 'e059b2628e3a3e2de095679de9822c1d1466e0f0'), + ] + +class hex_sha256_test(HandlerCase): + handler = hash.hex_sha256 + known_correct_hashes = [ + ("password", '5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8'), + (UPASS_TABLE, '6ed729e19bf24d3d20f564375820819932029df05547116cfc2cc868a27b4493'), + ] + +class hex_sha512_test(HandlerCase): + handler = hash.hex_sha512 + known_correct_hashes = [ + ("password", 'b109f3bbbc244eb82441917ed06d618b9008dd09b3befd1b5e07394c' + '706a8bb980b1d7785e5976ec049b46df5f1326af5a2ea6d103fd07c95385ffab0cac' + 'bc86'), + (UPASS_TABLE, 'd91bb0a23d66dca07a1781fd63ae6a05f6919ee5fc368049f350c9f' + '293b078a18165d66097cf0d89fdfbeed1ad6e7dba2344e57348cd6d51308c843a06f' + '29caf'), + ] + +#============================================================================= +# htdigest hash +#============================================================================= +class htdigest_test(UserHandlerMixin, HandlerCase): + handler = hash.htdigest + + known_correct_hashes = [ + # secret, user, realm + + # from RFC 2617 + (("Circle Of Life", "Mufasa", "testrealm@host.com"), + '939e7578ed9e3c518a452acee763bce9'), + + # custom + ((UPASS_TABLE, UPASS_USD, UPASS_WAV), + '4dabed2727d583178777fab468dd1f17'), + ] + + known_unidentified_hashes = [ + # bad char \/ - currently rejecting upper hex chars, may change + '939e7578edAe3c518a452acee763bce9', + + # bad char \/ + '939e7578edxe3c518a452acee763bce9', + ] + + def test_80_user(self): + raise self.skipTest("test case doesn't support 'realm' keyword") + + def populate_context(self, secret, kwds): + "insert username into kwds" + if isinstance(secret, tuple): + secret, user, realm = secret + else: + user, realm = "user", "realm" + kwds.setdefault("user", user) + kwds.setdefault("realm", realm) + return secret + +#============================================================================= +# ldap hashes +#============================================================================= +class ldap_md5_test(HandlerCase): + handler = hash.ldap_md5 + known_correct_hashes = [ + ("helloworld", '{MD5}/F4DjTilcDIIVEHn/nAQsA=='), + (UPASS_TABLE, '{MD5}BUc/ihn2aBXnN7MyZKDQsA=='), + ] + +class ldap_sha1_test(HandlerCase): + handler = hash.ldap_sha1 + known_correct_hashes = [ + ("helloworld", '{SHA}at+xg6SiyUovktq1redipHiJpaE='), + (UPASS_TABLE, '{SHA}4FmyYo46Pi3glWed6YIsHRRm4PA='), + ] + +class ldap_salted_md5_test(HandlerCase): + handler = hash.ldap_salted_md5 + known_correct_hashes = [ + ("testing1234", '{SMD5}UjFY34os/pnZQ3oQOzjqGu4yeXE='), + (UPASS_TABLE, '{SMD5}Z0ioJ58LlzUeRxm3K6JPGAvBGIM='), + + # alternate salt sizes (8, 15, 16) + ('test', '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw'), + ('test', '{SMD5}XRlncfRzvGi0FDzgR98tUgBg7B3jXOs9p9S615qTkg=='), + ('test', '{SMD5}FbAkzOMOxRbMp6Nn4hnZuel9j9Gas7a2lvI+x5hT6j0='), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SMD5}IGVhwK+anvspmfDt2t0vgGjt/Q==', + + # incorrect base64 encoding + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4c', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw' + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P4cw=', + '{SMD5}LnuZPJhiaY95/4lmV=pg548xBsD4P4cw', + '{SMD5}LnuZPJhiaY95/4lmVFpg548xBsD4P===', + ] + +class ldap_salted_sha1_test(HandlerCase): + handler = hash.ldap_salted_sha1 + known_correct_hashes = [ + ("testing123", '{SSHA}0c0blFTXXNuAMHECS4uxrj3ZieMoWImr'), + ("secret", "{SSHA}0H+zTv8o4MR4H43n03eCsvw1luG8LdB7"), + (UPASS_TABLE, '{SSHA}3yCSD1nLZXznra4N8XzZgAL+s1sQYsx5'), + + # alternate salt sizes (8, 15, 16) + ('test', '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=='), + ('test', '{SSHA}/ZMF5KymNM+uEOjW+9STKlfCFj51bg3BmBNCiPHeW2ttbU0='), + ('test', '{SSHA}Pfx6Vf48AT9x3FVv8znbo8WQkEVSipHSWovxXmvNWUvp/d/7'), + ] + + known_malformed_hashes = [ + # salt too small (3) + '{SSHA}ZQK3Yvtvl6wtIRoISgMGPkcWU7Nfq5U=', + + # incorrect base64 encoding + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOckw=', + '{SSHA}P90+qijSp8MJ1tN25j5o1Pf=UvlqjXHOGeOckw==', + '{SSHA}P90+qijSp8MJ1tN25j5o1PflUvlqjXHOGeOck===', + ] + +class ldap_plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.ldap_plaintext + known_correct_hashes = [ + ("password", 'password'), + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + known_unidentified_hashes = [ + "{FOO}bar", + + # NOTE: this hash currently rejects the empty string. + "", + ] + + known_other_hashes = [ + ("ldap_md5", "{MD5}/F4DjTilcDIIVEHn/nAQsA==") + ] + + def get_fuzz_password(self): + # NOTE: this hash currently rejects the empty string. + while True: + pwd = super(ldap_plaintext_test, self).get_fuzz_password() + if pwd: + return pwd + +class _ldap_md5_crypt_test(HandlerCase): + # NOTE: since the ldap_{crypt} handlers are all wrappers, don't need + # separate test; this is just to test the codebase end-to-end + handler = hash.ldap_md5_crypt + + known_correct_hashes = [ + # + # custom + # + ('', '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '{CRYPT}$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '{CRYPT}$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '{CRYPT}$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '{CRYPT}$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '{CRYPT}$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '{CRYPT}$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '{CRYPT}$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + ] + +ldap_md5_crypt_os_crypt_test, ldap_md5_crypt_builtin_test = \ + _ldap_md5_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +class _ldap_sha1_crypt_test(HandlerCase): + # NOTE: this isn't for testing the hash (see ldap_md5_crypt note) + # but as a self-test of the os_crypt patching code in HandlerCase. + handler = hash.ldap_sha1_crypt + + known_correct_hashes = [ + ('password', '{CRYPT}$sha1$10$c.mcTzCw$gF8UeYst9yXX7WNZKc5Fjkq0.au7'), + (UPASS_TABLE, '{CRYPT}$sha1$10$rnqXlOsF$aGJf.cdRPewJAXo1Rn1BkbaYh0fP'), + ] + + def populate_settings(self, kwds): + kwds.setdefault("rounds", 10) + super(_ldap_sha1_crypt_test, self).populate_settings(kwds) + + def test_77_fuzz_input(self): + raise self.skipTest("unneeded") + +ldap_sha1_crypt_os_crypt_test, = _ldap_sha1_crypt_test.create_backend_cases(["os_crypt"]) + +#============================================================================= +# ldap_pbkdf2_{digest} +#============================================================================= +# NOTE: since these are all wrappers for the pbkdf2_{digest} hasehs, +# they don't extensive separate testing. + +class ldap_pbkdf2_test(TestCase): + + def test_wrappers(self): + "test ldap pbkdf2 wrappers" + + self.assertTrue( + hash.ldap_pbkdf2_sha1.verify( + "password", + '{PBKDF2}1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI', + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha256.verify( + "password", + '{PBKDF2-SHA256}1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg' + '.fJPeq1h/gXXY7acBp9/6c.tmQ' + ) + ) + + self.assertTrue( + hash.ldap_pbkdf2_sha512.verify( + "password", + '{PBKDF2-SHA512}1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ) + ) + +#============================================================================= +# lanman +#============================================================================= +class lmhash_test(EncodingHandlerMixin, HandlerCase): + handler = hash.lmhash + secret_size = 14 + secret_case_insensitive = True + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", "c9b81d939d6fd80cd408e6b105741864"), + ("NEWPASSWORD", '09eeab5aa415d6e4d408e6b105741864'), + ("welcome", "c23413a8a1e7665faad3b435b51404ee"), + + # + # custom + # + ('', 'aad3b435b51404eeaad3b435b51404ee'), + ('zzZZZzz', 'a5e6066de61c3e35aad3b435b51404ee'), + ('passphrase', '855c3697d9979e78ac404c4ba2c66533'), + ('Yokohama', '5ecd9236d21095ce7584248b8d2c9f9e'), + + # ensures cp437 used for unicode + (u('ENCYCLOP\xC6DIA'), 'fed6416bffc9750d48462b9d7aaac065'), + (u('encyclop\xE6dia'), 'fed6416bffc9750d48462b9d7aaac065'), + + # test various encoding values + ((u("\xC6"), None), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "cp437"), '25d8ab4a0659c97aaad3b435b51404ee'), + ((u("\xC6"), "latin-1"), '184eecbbe9991b44aad3b435b51404ee'), + ((u("\xC6"), "utf-8"), '00dd240fcfab20b8aad3b435b51404ee'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '855c3697d9979e78ac404c4ba2c6653X', + ] + + def test_90_raw(self): + "test lmhash.raw() method" + from binascii import unhexlify + from passlib.utils.compat import str_to_bascii + lmhash = self.handler + for secret, hash in self.known_correct_hashes: + kwds = {} + secret = self.populate_context(secret, kwds) + data = unhexlify(str_to_bascii(hash)) + self.assertEqual(lmhash.raw(secret, **kwds), data) + self.assertRaises(TypeError, lmhash.raw, 1) + +#============================================================================= +# md5 crypt +#============================================================================= +class _md5_crypt_test(HandlerCase): + handler = hash.md5_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$1$dXc3I7Rw$ctlgjDdWJLMT.qwHsWhXR1'), + ('U*U***U', '$1$dXc3I7Rw$94JPyQc/eAgQ3MFMCoMF.0'), + ('U*U***U*', '$1$dXc3I7Rw$is1mVIAEtAhIzSdfn5JOO0'), + ('*U*U*U*U', '$1$eQT9Hwbt$XtuElNJD.eW5MN5UCWyTQ0'), + ('', '$1$Eu.GHtia$CFkL/nE1BYTlEPiVx1VWX0'), + + # + # custom + # + + # NOTE: would need to patch HandlerCase to coerce hashes + # to native str for this first one to work under py3. +## ('', b('$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.')), + ('', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + (' ', '$1$m/5ee7ol$bZn0kIBFipq39e.KDXX8I0'), + ('test', '$1$ec6XvcoW$ghEtNK2U1MC5l.Dwgi3020'), + ('Compl3X AlphaNu3meric', '$1$nX1e7EeI$ljQn72ZUgt6Wxd9hfvHdV0'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$1$jQS7o98J$V6iTcr71CGgwW2laf17pi1'), + ('test', '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (b('test'), '$1$SuMrG47N$ymvzYjr7QcEQjaK5m1PGx1'), + (u('s'), '$1$ssssssss$YgmLTApYTv12qgTwBoj8i/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$1$d6/Ky1lU$/xpf8m7ftmWLF.TjHCqel0'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash \/ + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o!', + + # too many fields + '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.$', + ] + + platform_crypt_support = [ + ("freebsd|openbsd|netbsd|linux|solaris", True), + ("darwin", False), + ] + +md5_crypt_os_crypt_test, md5_crypt_builtin_test = \ + _md5_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# msdcc 1 & 2 +#============================================================================= +class msdcc_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc + user_case_insensitive = True + + known_correct_hashes = [ + + # + # http://www.jedge.com/wordpress/windows-password-cache/ + # + (("Asdf999", "sevans"), "b1176c2587478785ec1037e5abc916d0"), + + # + # http://infosecisland.com/blogview/12156-Cachedump-for-Meterpreter-in-Action.html + # + (("ASDqwe123", "jdoe"), "592cdfbc3f1ef77ae95c75f851e37166"), + + # + # http://comments.gmane.org/gmane.comp.security.openwall.john.user/1917 + # + (("test1", "test1"), "64cd29e36a8431a2b111378564a10631"), + (("test2", "test2"), "ab60bdb4493822b175486810ac2abe63"), + (("test3", "test3"), "14dd041848e12fc48c0aa7a416a4a00c"), + (("test4", "test4"), "b945d24866af4b01a6d89b9d932a153c"), + + # + # http://ciscoit.wordpress.com/2011/04/13/metasploit-hashdump-vs-cachedump/ + # + (("1234qwer!@#$", "Administrator"), "7b69d06ef494621e3f47b9802fe7776d"), + + # + # http://www.securiteam.com/tools/5JP0I2KFPA.html + # + (("password", "user"), "2d9f0b052932ad18b87f315641921cda"), + + # + # from JTR 1.7.9 + # + (("", "root"), "176a4c2bd45ac73687676c2f09045353"), + (("test1", "TEST1"), "64cd29e36a8431a2b111378564a10631"), + (("okolada", "nineteen_characters"), "290efa10307e36a79b3eebf2a6b29455"), + ((u("\u00FC"), u("\u00FC")), "48f84e6f73d6d5305f6558a33fa2c9bb"), + ((u("\u00FC\u00FC"), u("\u00FC\u00FC")), "593246a8335cf0261799bda2a2a9c623"), + ((u("\u20AC\u20AC"), "user"), "9121790702dda0fa5d353014c334c2ce"), + + # + # custom + # + + # ensures utf-8 used for unicode + ((UPASS_TABLE, 'bob'), 'fcb82eb4212865c7ac3503156ca3f349'), + ] + + known_alternate_hashes = [ + # check uppercase accepted. + ("B1176C2587478785EC1037E5ABC916D0", ("Asdf999", "sevans"), + "b1176c2587478785ec1037e5abc916d0"), + ] + +class msdcc2_test(UserHandlerMixin, HandlerCase): + handler = hash.msdcc2 + user_case_insensitive = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + (("test1", "test1"), "607bbe89611e37446e736f7856515bf8"), + (("qerwt", "Joe"), "e09b38f84ab0be586b730baf61781e30"), + (("12345", "Joe"), "6432f517a900b3fc34ffe57f0f346e16"), + (("", "bin"), "c0cbe0313a861062e29f92ede58f9b36"), + (("w00t", "nineteen_characters"), "87136ae0a18b2dafe4a41d555425b2ed"), + (("w00t", "eighteencharacters"), "fc5df74eca97afd7cd5abb0032496223"), + (("longpassword", "twentyXXX_characters"), "cfc6a1e33eb36c3d4f84e4c2606623d2"), + (("longpassword", "twentyoneX_characters"), "99ff74cea552799da8769d30b2684bee"), + (("longpassword", "twentytwoXX_characters"), "0a721bdc92f27d7fb23b87a445ec562f"), + (("test2", "TEST2"), "c6758e5be7fc943d00b97972a8a97620"), + (("test3", "test3"), "360e51304a2d383ea33467ab0b639cc4"), + (("test4", "test4"), "6f79ee93518306f071c47185998566ae"), + ((u("\u00FC"), "joe"), "bdb80f2c4656a8b8591bd27d39064a54"), + ((u("\u20AC\u20AC"), "joe"), "1e1e20f482ff748038e47d801d0d1bda"), + ((u("\u00FC\u00FC"), "admin"), "0839e4a07c00f18a8c65cf5b985b9e73"), + + # + # custom + # + + # custom unicode test + ((UPASS_TABLE, 'bob'), 'cad511dc9edefcf69201da72efb6bb55'), + ] + +#============================================================================= +# mssql 2000 & 2005 +#============================================================================= +class mssql2000_test(HandlerCase): + handler = hash.mssql2000 + secret_case_insensitive = "verify-only" + # FIXME: fix UT framework - this hash is sensitive to password case, but verify() is not + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED2503412FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.sqlmag.com/forums/aft/68438 + # + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F071DB4BBC213939D484BF7A766E974F03C96524794'), + + # + # http://stackoverflow.com/questions/173329/how-to-decrypt-a-password-from-sql-server + # + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D12625EF019E157120D58DD46569AC7BF4118455D'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + + # + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # XXX: sample is incomplete, password unknown + # https://anthonystechblog.wordpress.com/2011/04/20/password-encryption-in-sql-server-how-to-tell-if-a-user-is-using-a-weak-password/ + # (????, '0x0100813F782D66EF15E40B1A3FDF7AB88B322F51401A87D8D3E3A8483C4351A3D96FC38499E6CDD2B6F?????????'), + # + + # + # from JTR 1.7.9 + # + ('foo', '0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8B6D6261460D3F53B279CC6913CE747006A2E3254'), + ('bar', '0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FBB9644D6901764F999BAB9ECB80DE578D92E3F80D'), + ('canard', '0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED8BC558D22CEDF8801F4503468A80F9C52A12C0A3'), + ('lapin', '0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881E5B9638641A79DBF0F1501647EC941F3355440A2'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7F6D784B03C98585DC634FE2B8CA3A6DFFEC729B4'), + + ] + + known_correct_configs = [ + ('0x010034767D5C00000000000000000000000000000000000000000000000000000000000000000000000000000000', + 'Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED2503412FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong magic value + '0x02005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + + # wrong size + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3', + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3AF', + + # mssql2005 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char -----\/ + b('0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + u('0x01005B200543327G2E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + +class mssql2005_test(HandlerCase): + handler = hash.mssql2005 + + known_correct_hashes = [ + # + # http://hkashfi.blogspot.com/2007/08/breaking-sql-server-2005-hashes.html + # + ('TEST', '0x010034767D5C2FD54D6119FFF04129A1D72E7C3194F7284A7F3A'), + + # + # http://www.openwall.com/lists/john-users/2009/07/14/2 + # + ('toto', '0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908'), + + # + # http://msmvps.com/blogs/gladchenko/archive/2005/04/06/41083.aspx + # + ('123', '0x01004A335DCEDB366D99F564D460B1965B146D6184E4E1025195'), + ('123', '0x0100E11D573F359629B344990DCD3D53DE82CF8AD6BBA7B638B6'), + + # + # XXX: password unknown + # http://www.simple-talk.com/sql/t-sql-programming/temporarily-changing-an-unknown-password-of-the-sa-account-/ + # (???, '0x01004086CEB6301EEC0A994E49E30DA235880057410264030797'), + # + + # + # http://therelentlessfrontend.com/2010/03/26/encrypting-and-decrypting-passwords-in-sql-server/ + # + ('AAAA', '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30'), + + # + # from JTR 1.7.9 + # + ("toto", "0x01004086CEB6BF932BC4151A1AF1F13CD17301D70816A8886908"), + ("titi", "0x01004086CEB60ED526885801C23B366965586A43D3DEAC6DD3FD"), + ("foo", "0x0100A607BA7C54A24D17B565C59F1743776A10250F581D482DA8"), + ("bar", "0x01000508513EADDF6DB7DDD270CCA288BF097F2FF69CC2DB74FB"), + ("canard", "0x01008408C523CF06DCB237835D701C165E68F9460580132E28ED"), + ("lapin", "0x0100BF088517935FC9183FE39FDEC77539FD5CB52BA5F5761881"), + + # + # adapted from mssql2000.known_correct_hashes (above) + # + ('Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED250341'), + ('Test', '0x0100993BF2315F36CC441485B35C4D84687DC02C78B0E680411F'), + ('x', '0x010086489146C46DD7318D2514D1AC706457CBF6CD3DF8407F07'), + ('AAAA', '0x0100CF465B7B12625EF019E157120D58DD46569AC7BF4118455D'), + ('123', '0x01002D60BA07FE612C8DE537DF3BFCFA49CD9968324481C1A8A8'), + ('12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_USD, '0x0100624C0961B28E39FEE13FD0C35F57B4523F0DA1861C11D5A5'), + (UPASS_TABLE, '0x010083104228FAD559BE52477F2131E538BE9734E5C4B0ADEFD7'), + ] + + known_correct_configs = [ + ('0x010034767D5C0000000000000000000000000000000000000000', + 'Test', '0x010034767D5C0CFA5FDCA28C4A56085E65E882E71CB0ED250341'), + ] + + known_alternate_hashes = [ + # lower case hex + ('0x01005b20054332752e1bc2e7c5df0f9ebfe486e9bee063e8d3b3', + '12345', '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3'), + ] + + known_unidentified_hashes = [ + # malformed start + '0X010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong magic value + '0x020036D726AE86834E97F20B198ACD219D60B446AC5E48C54F30', + + # wrong size + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F', + '0x010036D726AE86834E97F20B198ACD219D60B446AC5E48C54F3012', + + # mssql2000 + '0x01005B20054332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B332752E1BC2E7C5DF0F9EBFE486E9BEE063E8D3B3', + ] + + known_malformed_hashes = [ + # non-hex char --\/ + '0x010036D726AE86G34E97F20B198ACD219D60B446AC5E48C54F30', + ] + +#============================================================================= +# mysql 323 & 41 +#============================================================================= +class mysql323_test(HandlerCase): + handler = hash.mysql323 + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('drew', '697a7de87c5390b2'), + ('password', "5d2e19393cc5ef67"), + + # + # custom + # + ('mypass', '6f8c114b58f2ce9e'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '4ef327ca5491c8d7'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '6z8c114b58f2ce9e', + ] + + def test_90_whitespace(self): + "check whitespace is ignored per spec" + h = self.do_encrypt("mypass") + h2 = self.do_encrypt("my pass") + self.assertEqual(h, h2) + + def accept_fuzz_pair(self, secret, other): + # override to handle whitespace + return secret.replace(" ","") != other.replace(" ","") + +class mysql41_test(HandlerCase): + handler = hash.mysql41 + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('verysecretpassword', '*2C905879F74F28F8570989947D06A8429FB943E6'), + ('12345678123456781234567812345678', '*F9F1470004E888963FB466A5452C9CBD9DF6239C'), + ("' OR 1 /*'", '*97CF7A3ACBE0CA58D5391AC8377B5D9AC11D46D9'), + + # + # custom + # + ('mypass', '*6C8989366EAF75BB670AD8EA7A7FC1176A95CEF4'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '*E7AFE21A9CFA2FC9D15D942AE8FB5C240FE5837B'), + ] + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '*6Z8989366EAF75BB670AD8EA7A7FC1176A95CEF4', + ] + +#============================================================================= +# NTHASH +#============================================================================= +class nthash_test(HandlerCase): + handler = hash.nthash + + known_correct_hashes = [ + # + # http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + # + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + + # + # from JTR 1.7.9 + # + + # ascii + ('', '31d6cfe0d16ae931b73c59d7e0c089c0'), + ('tigger', 'b7e0ea9fbffcf6dd83086e905089effd'), + + # utf-8 + (b('\xC3\xBC'), '8bd6e4fb88e01009818749c5443ea712'), + (b('\xC3\xBC\xC3\xBC'), 'cc1260adb6985ca749f150c7e0b22063'), + (b('\xE2\x82\xAC'), '030926b781938db4365d46adc7cfbcb8'), + (b('\xE2\x82\xAC\xE2\x82\xAC'),'682467b963bb4e61943e170a04f7db46'), + + # + # custom + # + ('passphrase', '7f8fe03093cc84b267b109625f6bbf4b'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash + '7f8fe03093cc84b267b109625f6bbfxb', + ] + +class bsd_nthash_test(HandlerCase): + handler = hash.bsd_nthash + + known_correct_hashes = [ + ('passphrase', '$3$$7f8fe03093cc84b267b109625f6bbf4b'), + (b('\xC3\xBC'), '$3$$8bd6e4fb88e01009818749c5443ea712'), + ] + + known_unidentified_hashes = [ + # bad char in otherwise correct hash --\/ + '$3$$7f8fe03093cc84b267b109625f6bbfxb', + ] + +#============================================================================= +# oracle 10 & 11 +#============================================================================= +class oracle10_test(UserHandlerMixin, HandlerCase): + handler = hash.oracle10 + secret_case_insensitive = True + user_case_insensitive = True + + # TODO: get more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # ((secret,user),hash) + + # + # http://www.petefinnigan.com/default/default_password_list.htm + # + (('tiger', 'scott'), 'F894844C34402B67'), + ((u('ttTiGGeR'), u('ScO')), '7AA1A84E31ED7771'), + (("d_syspw", "SYSTEM"), '1B9F1F9A5CB9EB31'), + (("strat_passwd", "strat_user"), 'AEBEDBB4EFB5225B'), + + # + # http://openwall.info/wiki/john/sample-hashes + # + (('#95LWEIGHTS', 'USER'), '000EA4D72A142E29'), + (('CIAO2010', 'ALFREDO'), 'EB026A76F0650F7B'), + + # + # from JTR 1.7.9 + # + (('GLOUGlou', 'Bob'), 'CDC6B483874B875B'), + (('GLOUGLOUTER', 'bOB'), 'EF1F9139DB2D5279'), + (('LONG_MOT_DE_PASSE_OUI', 'BOB'), 'EC8147ABB3373D53'), + + # + # custom + # + ((UPASS_TABLE, 'System'), 'B915A853F297B281'), + ] + + known_unidentified_hashes = [ + # bad char in hash --\ + 'F894844C34402B6Z', + ] + +class oracle11_test(HandlerCase): + handler = hash.oracle11 + # TODO: find more test vectors (especially ones which properly test unicode) + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ("abc123", "S:5FDAB69F543563582BA57894FE1C1361FB8ED57B903603F2C52ED1B4D642"), + ("SyStEm123!@#", "S:450F957ECBE075D2FA009BA822A9E28709FBC3DA82B44D284DDABEC14C42"), + ("oracle", "S:3437FF72BD69E3FB4D10C750B92B8FB90B155E26227B9AB62D94F54E5951"), + ("11g", "S:61CE616647A4F7980AFD7C7245261AF25E0AFE9C9763FCF0D54DA667D4E6"), + ("11g", "S:B9E7556F53500C8C78A58F50F24439D79962DE68117654B6700CE7CC71CF"), + + # + # source? + # + ("SHAlala", "S:2BFCFDF5895014EE9BB2B9BA067B01E0389BB5711B7B5F82B7235E9E182C"), + + # + # custom + # + (UPASS_TABLE, 'S:51586343E429A6DF024B8F242F2E9F8507B1096FACD422E29142AA4974B0'), + ] + +#============================================================================= +# pbkdf2 hashes +#============================================================================= +class atlassian_pbkdf2_sha1_test(HandlerCase): + handler = hash.atlassian_pbkdf2_sha1 + + known_correct_hashes = [ + # + # generated using Jira + # + ("admin", '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/p'), + (UPASS_WAV, + "{PKCS5S2}cE9Yq6Am5tQGdHSHhky2XLeOnURwzaLBG2sur7FHKpvy2u0qDn6GcVGRjlmJoIUy"), + ] + + known_malformed_hashes = [ + # bad char ---\/ + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy!0IPksHChwoTAVYFrhsgoq8/p' + + # bad size, missing padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/' + + # bad size, with correct padding + '{PKCS5S2}c4xaeTQM0lUieMS3V5voiexyX9XhqC2dBd5ecVy60IPksHChwoTAVYFrhsgoq8/=' + ] + +class pbkdf2_sha1_test(HandlerCase): + handler = hash.pbkdf2_sha1 + known_correct_hashes = [ + ("password", '$pbkdf2$1212$OB.dtnSEXZK8U5cgxU/GYQ$y5LKPOplRmok7CZp/aqVDVg8zGI'), + (UPASS_WAV, + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc'), + ] + + known_malformed_hashes = [ + # zero padded rounds field + '$pbkdf2$01212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # empty rounds field + '$pbkdf2$$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc', + + # too many field + '$pbkdf2$1212$THDqatpidANpadlLeTeOEg$HV3oi1k5C5LQCgG1BMOL.BX4YZc$', + ] + +class pbkdf2_sha256_test(HandlerCase): + handler = hash.pbkdf2_sha256 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha256$1212$4vjV83LKPjQzk31VI4E0Vw$hsYF68OiOUPdDZ1Fg.fJPeq1h/gXXY7acBp9/6c.tmQ' + ), + (UPASS_WAV, + '$pbkdf2-sha256$1212$3SABFJGDtyhrQMVt1uABPw$WyaUoqCLgvz97s523nF4iuOqZNbp5Nt8do/cuaa7AiI' + ), + ] + +class pbkdf2_sha512_test(HandlerCase): + handler = hash.pbkdf2_sha512 + known_correct_hashes = [ + ("password", + '$pbkdf2-sha512$1212$RHY0Fr3IDMSVO/RSZyb5ow$eNLfBK.eVozomMr.1gYa1' + '7k9B7KIK25NOEshvhrSX.esqY3s.FvWZViXz4KoLlQI.BzY/YTNJOiKc5gBYFYGww' + ), + (UPASS_WAV, + '$pbkdf2-sha512$1212$KkbvoKGsAIcF8IslDR6skQ$8be/PRmd88Ps8fmPowCJt' + 'tH9G3vgxpG.Krjt3KT.NP6cKJ0V4Prarqf.HBwz0dCkJ6xgWnSj2ynXSV7MlvMa8Q' + ), + ] + +class cta_pbkdf2_sha1_test(HandlerCase): + handler = hash.cta_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + (u("hashy the \N{SNOWMAN}"), '$p5k2$1000$ZxK4ZBJCfQg=$jJZVscWtO--p1-xIZl6jhO2LKR0='), + + # + # custom + # + ("password", "$p5k2$1$$h1TDLGSw9ST8UMAPeIE13i0t12c="), + (UPASS_WAV, + "$p5k2$4321$OTg3NjU0MzIx$jINJrSvZ3LXeIbUdrJkRpN62_WQ="), + ] + +class dlitz_pbkdf2_sha1_test(HandlerCase): + handler = hash.dlitz_pbkdf2_sha1 + known_correct_hashes = [ + # + # test vectors from original implementation + # + ('cloadm', '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'), + ('gnu', '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'), + ('dcl', '$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL'), + ('spam', '$p5k2$3e8$H0NX9mT/$wk/sE8vv6OMKuMaqazCJYDSUhWY9YB2J'), + (UPASS_WAV, + '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'), + ] + +class grub_pbkdf2_sha512_test(HandlerCase): + handler = hash.grub_pbkdf2_sha512 + known_correct_hashes = [ + # + # test vectors generated from cmd line tool + # + + # salt=32 bytes + (UPASS_WAV, + 'grub.pbkdf2.sha512.10000.BCAC1CEC5E4341C8C511C529' + '7FA877BE91C2817B32A35A3ECF5CA6B8B257F751.6968526A' + '2A5B1AEEE0A29A9E057336B48D388FFB3F600233237223C21' + '04DE1752CEC35B0DD1ED49563398A282C0F471099C2803FBA' + '47C7919CABC43192C68F60'), + + # salt=64 bytes + ('toomanysecrets', + 'grub.pbkdf2.sha512.10000.9B436BB6978682363D5C449B' + 'BEAB322676946C632208BC1294D51F47174A9A3B04A7E4785' + '986CD4EA7470FAB8FE9F6BD522D1FC6C51109A8596FB7AD48' + '7C4493.0FE5EF169AFFCB67D86E2581B1E251D88C777B98BA' + '2D3256ECC9F765D84956FC5CA5C4B6FD711AA285F0A04DCF4' + '634083F9A20F4B6F339A52FBD6BED618E527B'), + + ] + +#============================================================================= +# PHPass Portable Crypt +#============================================================================= +class phpass_test(HandlerCase): + handler = hash.phpass + + known_correct_hashes = [ + # + # from official 0.3 implementation + # http://www.openwall.com/phpass/ + # + ('test12345', '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r.L0'), # from the source + + # + # from JTR 1.7.9 + # + ('test1', '$H$9aaaaaSXBjgypwqm.JsMssPLiS8YQ00'), + ('123456', '$H$9PE8jEklgZhgLmZl5.HYJAzfGCQtzi1'), + ('123456', '$H$9pdx7dbOW3Nnt32sikrjAxYFjX8XoK1'), + ('thisisalongertestPW', '$P$912345678LIjjb6PhecupozNBmDndU0'), + ('JohnRipper', '$P$612345678si5M0DDyPpmRCmcltU/YW/'), + ('JohnRipper', '$H$712345678WhEyvy1YWzT4647jzeOmo0'), + ('JohnRipper', '$P$B12345678L6Lpt4BxNotVIMILOa9u81'), + + # + # custom + # + ('', '$P$7JaFQsPzJSuenezefD/3jHgt5hVfNH0'), + ('compL3X!', '$P$FiS0N5L672xzQx1rt1vgdJQRYKnQM9/'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$P$7SMy8VxnfsIy2Sxm7fJxDSdil.h7TW.'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # ---\/ + '$P$9IQRaTwmfeRo7ud9Fh4E2PdI0S3r!L0', + ] + +#============================================================================= +# plaintext +#============================================================================= +class plaintext_test(HandlerCase): + # TODO: integrate EncodingHandlerMixin + handler = hash.plaintext + accepts_all_hashes = True + + known_correct_hashes = [ + ('',''), + ('password', 'password'), + + # ensure unicode uses utf-8 + (UPASS_TABLE, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + (PASS_TABLE_UTF8, UPASS_TABLE if PY3 else PASS_TABLE_UTF8), + ] + +#============================================================================= +# postgres_md5 +#============================================================================= +class postgres_md5_test(UserHandlerMixin, HandlerCase): + handler = hash.postgres_md5 + known_correct_hashes = [ + # ((secret,user),hash) + + # + # generated using postgres 8.1 + # + (('mypass', 'postgres'), 'md55fba2ea04fd36069d2574ea71c8efe9d'), + (('mypass', 'root'), 'md540c31989b20437833f697e485811254b'), + (("testpassword",'testuser'), 'md5d4fc5129cc2c25465a5370113ae9835f'), + + # + # custom + # + + # verify unicode->utf8 + ((UPASS_TABLE, 'postgres'), 'md5cb9f11283265811ce076db86d18a22d2'), + ] + known_unidentified_hashes = [ + # bad 'z' char in otherwise correct hash + 'md54zc31989b20437833f697e485811254b', + ] + +#============================================================================= +# scram hash +#============================================================================= +class scram_test(HandlerCase): + handler = hash.scram + + # TODO: need a bunch more reference vectors from some real + # SCRAM transactions. + known_correct_hashes = [ + # + # taken from example in SCRAM specification (rfc 5802) + # + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), + + # + # custom + # + + # same as 5802 example hash, but with sha-256 & sha-512 added. + ('pencil', '$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + + # test unicode passwords & saslprep (all the passwords below + # should normalize to the same value: 'IX \xE0') + (u('IX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u2168\u3000a\u0300'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + (u('\u00ADIX \xE0'), '$scram$6400$0BojBCBE6P2/N4bQ$' + 'sha-1=YniLes.b8WFMvBhtSACZyyvxeCc'), + ] + + known_malformed_hashes = [ + # zero-padding in rounds + '$scram$04096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-digit in rounds + '$scram$409A$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in salt ---\/ + '$scram$4096$QSXCR.Q6sek8bf9-$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', + + # bad char in digest ---\/ + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX3-', + + # missing sections + '$scram$4096$QSXCR.Q6sek8bf92', + '$scram$4096$QSXCR.Q6sek8bf92$', + + # too many sections + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30$', + + # missing separator + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY', + + # too many chars in alg name + '$scram$4096$QSXCR.Q6sek8bf92$sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'shaxxx-190=HZbuOlKbWl.eR8AfIposuKbhX30', + + # missing sha-1 alg + '$scram$4096$QSXCR.Q6sek8bf92$sha-256=HZbuOlKbWl.eR8AfIposuKbhX30', + + # non-iana name + '$scram$4096$QSXCR.Q6sek8bf92$sha1=HZbuOlKbWl.eR8AfIposuKbhX30', + ] + + def setUp(self): + super(scram_test, self).setUp() + + # some platforms lack stringprep (e.g. Jython, IronPython) + self.require_stringprep() + + # silence norm_hash_name() warning + warnings.filterwarnings("ignore", r"norm_hash_name\(\): unknown hash") + + def test_90_algs(self): + "test parsing of 'algs' setting" + defaults = dict(salt=b('A')*10, rounds=1000) + def parse(algs, **kwds): + for k in defaults: + kwds.setdefault(k, defaults[k]) + return self.handler(algs=algs, **kwds).algs + + # None -> default list + self.assertEqual(parse(None, use_defaults=True), hash.scram.default_algs) + self.assertRaises(TypeError, parse, None) + + # strings should be parsed + self.assertEqual(parse("sha1"), ["sha-1"]) + self.assertEqual(parse("sha1, sha256, md5"), ["md5","sha-1","sha-256"]) + + # lists should be normalized + self.assertEqual(parse(["sha-1","sha256"]), ["sha-1","sha-256"]) + + # sha-1 required + self.assertRaises(ValueError, parse, ["sha-256"]) + self.assertRaises(ValueError, parse, algs=[], use_defaults=True) + + # alg names must be < 10 chars + self.assertRaises(ValueError, parse, ["sha-1","shaxxx-190"]) + + # alg & checksum mutually exclusive. + self.assertRaises(RuntimeError, parse, ['sha-1'], + checksum={"sha-1": b("\x00"*20)}) + + def test_90_checksums(self): + "test internal parsing of 'checksum' keyword" + # check non-bytes checksum values are rejected + self.assertRaises(TypeError, self.handler, use_defaults=True, + checksum={'sha-1': u('X')*20}) + + # check sha-1 is required + self.assertRaises(ValueError, self.handler, use_defaults=True, + checksum={'sha-256': b('X')*32}) + + # XXX: anything else that's not tested by the other code already? + + def test_91_extract_digest_info(self): + "test scram.extract_digest_info()" + edi = self.handler.extract_digest_info + + # return appropriate value or throw KeyError + h = "$scram$10$AAAAAA$sha-1=AQ,bbb=Ag,ccc=Aw" + s = b('\x00')*4 + self.assertEqual(edi(h,"SHA1"), (s,10, b('\x01'))) + self.assertEqual(edi(h,"bbb"), (s,10, b('\x02'))) + self.assertEqual(edi(h,"ccc"), (s,10, b('\x03'))) + self.assertRaises(KeyError, edi, h, "ddd") + + # config strings should cause value error. + c = "$scram$10$....$sha-1,bbb,ccc" + self.assertRaises(ValueError, edi, c, "sha-1") + self.assertRaises(ValueError, edi, c, "bbb") + self.assertRaises(ValueError, edi, c, "ddd") + + def test_92_extract_digest_algs(self): + "test scram.extract_digest_algs()" + eda = self.handler.extract_digest_algs + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30'), ["sha-1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30', format="hashlib"), + ["sha1"]) + + self.assertEqual(eda('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ'), + ["sha-1","sha-256","sha-512"]) + + def test_93_derive_digest(self): + "test scram.derive_digest()" + # NOTE: this just does a light test, since derive_digest + # is used by encrypt / verify, and is tested pretty well via those. + + hash = self.handler.derive_digest + + # check various encodings of password work. + s1 = b('\x01\x02\x03') + d1 = b('\xb2\xfb\xab\x82[tNuPnI\x8aZZ\x19\x87\xcen\xe9\xd3') + self.assertEqual(hash(u("\u2168"), s1, 1000, 'sha-1'), d1) + self.assertEqual(hash(b("\xe2\x85\xa8"), s1, 1000, 'SHA-1'), d1) + self.assertEqual(hash(u("IX"), s1, 1000, 'sha1'), d1) + self.assertEqual(hash(b("IX"), s1, 1000, 'SHA1'), d1) + + # check algs + self.assertEqual(hash("IX", s1, 1000, 'md5'), + b('3\x19\x18\xc0\x1c/\xa8\xbf\xe4\xa3\xc2\x8eM\xe8od')) + self.assertRaises(ValueError, hash, "IX", s1, 1000, 'sha-666') + + # check rounds + self.assertRaises(ValueError, hash, "IX", s1, 0, 'sha-1') + + # bad types + self.assertRaises(TypeError, hash, "IX", u('\x01'), 1000, 'md5') + + def test_94_saslprep(self): + "test encrypt/verify use saslprep" + # NOTE: this just does a light test that saslprep() is being + # called in various places, relying in saslpreps()'s tests + # to verify full normalization behavior. + + # encrypt unnormalized + h = self.do_encrypt(u("I\u00ADX")) + self.assertTrue(self.do_verify(u("IX"), h)) + self.assertTrue(self.do_verify(u("\u2168"), h)) + + # encrypt normalized + h = self.do_encrypt(u("\xF3")) + self.assertTrue(self.do_verify(u("o\u0301"), h)) + self.assertTrue(self.do_verify(u("\u200Do\u0301"), h)) + + # throws error if forbidden char provided + self.assertRaises(ValueError, self.do_encrypt, u("\uFDD0")) + self.assertRaises(ValueError, self.do_verify, u("\uFDD0"), h) + + def test_95_context_algs(self): + "test handling of 'algs' in context object" + handler = self.handler + from passlib.context import CryptContext + c1 = CryptContext(["scram"], scram__algs="sha1,md5") + + h = c1.encrypt("dummy") + self.assertEqual(handler.extract_digest_algs(h), ["md5", "sha-1"]) + self.assertFalse(c1.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1") + self.assertFalse(c2.needs_update(h)) + + c2 = c1.copy(scram__algs="sha1,sha256") + self.assertTrue(c2.needs_update(h)) + + def test_96_full_verify(self): + "test verify(full=True) flag" + def vpart(s, h): + return self.handler.verify(s, h) + def vfull(s, h): + return self.handler.verify(s, h, full=True) + + # reference + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVY,' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vfull('pencil', h)) + self.assertFalse(vfull('tape', h)) + + # catch truncated digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhV,' # -1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch padded digests. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' + 'sha-256=qXUXrlcvnaxxWG00DdRgVioR2gnUpuX5r.3EZ1rdhVYa,' # +1 char + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertRaises(ValueError, vfull, 'pencil', h) + + # catch hash containing digests belonging to diff passwords. + # proper behavior for quick-verify (the default) is undefined, + # but full-verify should throw error. + h = ('$scram$4096$QSXCR.Q6sek8bf92$' + 'sha-1=HZbuOlKbWl.eR8AfIposuKbhX30,' # 'pencil' + 'sha-256=R7RJDWIbeKRTFwhE9oxh04kab0CllrQ3kCcpZUcligc,' # 'tape' + 'sha-512=lzgniLFcvglRLS0gt.C4gy.NurS3OIOVRAU1zZOV4P.qFiVFO2/' # 'pencil' + 'edGQSu/kD1LwdX0SNV/KsPdHSwEl5qRTuZQ') + self.assertTrue(vpart('tape', h)) + self.assertFalse(vpart('pencil', h)) + self.assertRaises(ValueError, vfull, 'pencil', h) + self.assertRaises(ValueError, vfull, 'tape', h) + +#============================================================================= +# (netbsd's) sha1 crypt +#============================================================================= +class _sha1_crypt_test(HandlerCase): + handler = hash.sha1_crypt + + known_correct_hashes = [ + # + # custom + # + ("password", "$sha1$19703$iVdJqfSE$v4qYKl1zqYThwpjJAoKX6UvlHq/a"), + ("password", "$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH"), + (UPASS_TABLE, '$sha1$40000$uJ3Sp7LE$.VEmLO5xntyRFYihC7ggd3297T/D'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$sha1$21773$u!7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # zero padded rounds + '$sha1$01773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH', + + # too many fields + '$sha1$21773$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + + # empty rounds field + '$sha1$$uV7PTeux$I9oHnvwPZHMO0Nq6/WgyGV/tDJIH$', + ] + + platform_crypt_support = [ + ("netbsd", True), + ("freebsd|openbsd|linux|solaris|darwin", False), + ] + +sha1_crypt_os_crypt_test, sha1_crypt_builtin_test = \ + _sha1_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# roundup +#============================================================================= + +# NOTE: all roundup hashes use PrefixWrapper, +# so there's nothing natively to test. +# so we just have a few quick cases... +from passlib.handlers import roundup + +class RoundupTest(TestCase): + + def _test_pair(self, h, secret, hash): + self.assertTrue(h.verify(secret, hash)) + self.assertFalse(h.verify('x'+secret, hash)) + + def test_pairs(self): + self._test_pair( + hash.ldap_hex_sha1, + "sekrit", + '{SHA}8d42e738c7adee551324955458b5e2c0b49ee655') + + self._test_pair( + hash.ldap_hex_md5, + "sekrit", + '{MD5}ccbc53f4464604e714f69dd11138d8b5') + + self._test_pair( + hash.ldap_des_crypt, + "sekrit", + '{CRYPT}nFia0rj2TT59A') + + self._test_pair( + hash.roundup_plaintext, + "sekrit", + '{plaintext}sekrit') + + self._test_pair( + hash.ldap_pbkdf2_sha1, + "sekrit", + '{PBKDF2}5000$7BvbBq.EZzz/O0HuwX3iP.nAG3s$g3oPnFFaga2BJaX5PoPRljl4XIE') + +#============================================================================= +# sha256-crypt +#============================================================================= +class _sha256_crypt_test(HandlerCase): + handler = hash.sha256_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$5$LKO/Ute40T3FNF95$U0prpBQd4PloSGU0pnpM4z9wKn4vZ1.jsrzQfPqxph9'), + ('U*U***U', '$5$LKO/Ute40T3FNF95$fdgfoJEBoMajNxCv3Ru9LyQ0xZgv0OBMQoq80LQ/Qd.'), + ('U*U***U*', '$5$LKO/Ute40T3FNF95$8Ry82xGnnPI/6HtFYnvPBTYgOL23sdMXn8C29aO.x/A'), + ('*U*U*U*U', '$5$9mx1HkCz7G1xho50$O7V7YgleJKLUhcfk9pgzdh3RapEaWqMtEp9UUBAKIPA'), + ('', '$5$kc7lRD1fpYg0g.IP$d7CMTcEqJyTXyeq8hTdu/jB/I6DGkoo62NXbHIR7S43'), + + # + # custom tests + # + ('', '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3'), + (' ', '$5$rounds=10376$I5lNtXtRmf.OoMd8$Ko3AI1VvTANdyKhBPavaRjJzNpSatKU6QVN9uwS9MH.'), + ('test', '$5$rounds=11858$WH1ABM5sKhxbkgCK$aTQsjPkz0rBsH3lQlJxw9HDTDXPKBxC0LlVeV69P.t1'), + ('Compl3X AlphaNu3meric', '$5$rounds=10350$o.pwkySLCzwTdmQX$nCMVsnF3TXWcBPOympBUUSQi6LGGloZoOsVJMGJ09UB'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$5$rounds=11944$9dhlu07dQMRWvTId$LyUI5VWkGFwASlzntk1RLurxX54LUhgAcJZIt0pYGT7'), + (u('with unic\u00D6de'), '$5$rounds=1000$IbG0EuGQXw5EkMdP$LQ5AfPf13KufFsKtmazqnzSGZ4pxtUNw3woQ.ELRDF4'), + ] + + if TEST_MODE("full"): + # builtin alg was changed in 1.6, and had possibility of fencepost + # errors near rounds that are multiples of 42. these hashes test rounds + # 1004..1012 (42*24=1008 +/- 4) to ensure no mistakes were made. + # (also relying on fuzz testing against os_crypt backend). + known_correct_hashes.extend([ + ("secret", '$5$rounds=1004$nacl$oiWPbm.kQ7.jTCZoOtdv7/tO5mWv/vxw5yTqlBagVR7'), + ("secret", '$5$rounds=1005$nacl$6Mo/TmGDrXxg.bMK9isRzyWH3a..6HnSVVsJMEX7ud/'), + ("secret", '$5$rounds=1006$nacl$I46VwuAiUBwmVkfPFakCtjVxYYaOJscsuIeuZLbfKID'), + ("secret", '$5$rounds=1007$nacl$9fY4j1AV3N/dV/YMUn1enRHKH.7nEL4xf1wWB6wfDD4'), + ("secret", '$5$rounds=1008$nacl$CiFWCfn8ODmWs0I1xAdXFo09tM8jr075CyP64bu3by9'), + ("secret", '$5$rounds=1009$nacl$QtpFX.CJHgVQ9oAjVYStxAeiU38OmFILWm684c6FyED'), + ("secret", '$5$rounds=1010$nacl$ktAwXuT5WbjBW/0ZU1eNMpqIWY1Sm4twfRE1zbZyo.B'), + ("secret", '$5$rounds=1011$nacl$QJWLBEhO9qQHyMx4IJojSN9sS41P1Yuz9REddxdO721'), + ("secret", '$5$rounds=1012$nacl$mmf/k2PkbBF4VCtERgky3bEVavmLZKFwAcvxD1p3kV2'), + ]) + + known_malformed_hashes = [ + # bad char in otherwise correct hash + '$5$rounds=10428$uy/:jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMeZGsGx2aBvxTvDFI613c3', + + # zero-padded rounds + '$5$rounds=010428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3', + + # extra "$" + '$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3$', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ( "$5$saltstring", "Hello world!", + "$5$saltstring$5B8vYYiY.CVt1RlTTf8KbXBH3hsxY/GNooZaBBGWEc5" ), + ( "$5$rounds=10000$saltstringsaltstring", "Hello world!", + "$5$rounds=10000$saltstringsaltst$3xv.VbSHBb41AL9AvLeujZkZRBAwqFMz2." + "opqey6IcA" ), + ( "$5$rounds=5000$toolongsaltstring", "This is just a test", + "$5$rounds=5000$toolongsaltstrin$Un/5jzAHMgOGZ5.mWJpuVolil07guHPvOW8" + "mGRcvxa5" ), + ( "$5$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$5$rounds=1400$anotherlongsalts$Rx.j8H.h8HjEDGomFU8bDkXm3XIUnzyxf12" + "oP84Bnq1" ), + ( "$5$rounds=77777$short", + "we have a short salt string but not a short password", + "$5$rounds=77777$short$JiO1O3ZpDAxGJeaDIuqCoEFysAe1mZNJRs3pw0KQRd/" ), + ( "$5$rounds=123456$asaltof16chars..", "a short string", + "$5$rounds=123456$asaltof16chars..$gP3VQ/6X7UUEW3HkBn2w1/Ptq2jxPyzV/" + "cZKmF/wJvD" ), + ( "$5$rounds=10$roundstoolow", "the minimum number is still observed", + "$5$rounds=1000$roundstoolow$yfvwcWrQ8l/K0DAWyuPMDNHpIVlTQebY9l/gL97" + "2bIC" ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = [ + ("freebsd(9|1\d)|linux", True), + ("freebsd8", None), # added in freebsd 8.3 + ("freebsd|openbsd|netbsd|darwin", False), + # solaris - depends on policy + ] + +sha256_crypt_os_crypt_test, sha256_crypt_builtin_test = \ + _sha256_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# test sha512-crypt +#============================================================================= +class _sha512_crypt_test(HandlerCase): + handler = hash.sha512_crypt + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', "$6$LKO/Ute40T3FNF95$6S/6T2YuOIHY0N3XpLKABJ3soYcXD9mB7uVbtEZDj/LNscVhZoZ9DEH.sBciDrMsHOWOoASbNLTypH/5X26gN0"), + ('U*U***U', "$6$LKO/Ute40T3FNF95$wK80cNqkiAUzFuVGxW6eFe8J.fSVI65MD5yEm8EjYMaJuDrhwe5XXpHDJpwF/kY.afsUs1LlgQAaOapVNbggZ1"), + ('U*U***U*', "$6$LKO/Ute40T3FNF95$YS81pp1uhOHTgKLhSMtQCr2cDiUiN03Ud3gyD4ameviK1Zqz.w3oXsMgO6LrqmIEcG3hiqaUqHi/WEE2zrZqa/"), + ('*U*U*U*U', "$6$OmBOuxFYBZCYAadG$WCckkSZok9xhp4U1shIZEV7CCVwQUwMVea7L3A77th6SaE9jOPupEMJB.z0vIWCDiN9WLh2m9Oszrj5G.gt330"), + ('', "$6$ojWH1AiTee9x1peC$QVEnTvRVlPRhcLQCk/HnHaZmlGAAjCfrAN0FtOsOnUk5K5Bn/9eLHHiRzrTzaIKjW9NTLNIBUCtNVOowWS2mN."), + + # + # custom tests + # + ('', '$6$rounds=11021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1'), + (' ', '$6$rounds=11104$ED9SA4qGmd57Fq2m$q/.PqACDM/JpAHKmr86nkPzzuR5.YpYa8ZJJvI8Zd89ZPUYTJExsFEIuTYbM7gAGcQtTkCEhBKmp1S1QZwaXx0'), + ('test', '$6$rounds=11531$G/gkPn17kHYo0gTF$Kq.uZBHlSBXyzsOJXtxJruOOH4yc0Is13uY7yK0PvAvXxbvc1w8DO1RzREMhKsc82K/Jh8OquV8FZUlreYPJk1'), + ('Compl3X AlphaNu3meric', '$6$rounds=10787$wakX8nGKEzgJ4Scy$X78uqaX1wYXcSCtS4BVYw2trWkvpa8p7lkAtS9O/6045fK4UB2/Jia0Uy/KzCpODlfVxVNZzCCoV9s2hoLfDs/'), + ('4lpHa N|_|M3r1K W/ Cur5Es: #$%(*)(*%#', '$6$rounds=11065$5KXQoE1bztkY5IZr$Jf6krQSUKKOlKca4hSW07MSerFFzVIZt/N3rOTsUgKqp7cUdHrwV8MoIVNCk9q9WL3ZRMsdbwNXpVk0gVxKtz1'), + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$6$rounds=40000$PEZTJDiyzV28M3.m$GTlnzfzGB44DGd1XqlmC4erAJKCP.rhvLvrYxiT38htrNzVGBnplFOHjejUGVrCfusGWxLQCc3pFO0A/1jYYr0'), + ] + + known_malformed_hashes = [ + # zero-padded rounds + '$6$rounds=011021$KsvQipYPWpr93wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + # bad char in otherwise correct hash + '$6$rounds=11021$KsvQipYPWpr9:wWP$v7xjI4X6vyVptJjB1Y02vZC5SaSijBkGmq1uJhPr3cvqvvkd42Xvo48yLVPFt8dvhCsnlUgpX.//Cxn91H4qy1', + ] + + known_correct_configs = [ + # config, secret, result + + # + # taken from official specification at http://www.akkadia.org/drepper/SHA-crypt.txt + # + ("$6$saltstring", "Hello world!", + "$6$saltstring$svn8UoSVapNtMuq1ukKS4tPQd8iKwSMHWjl/O817G3uBnIFNjnQJu" + "esI68u4OTLiBFdcbYEdFCoEOfaS35inz1" ), + + ( "$6$rounds=10000$saltstringsaltstring", "Hello world!", + "$6$rounds=10000$saltstringsaltst$OW1/O6BYHV6BcXZu8QVeXbDWra3Oeqh0sb" + "HbbMCVNSnCM/UrjmM0Dp8vOuZeHBy/YTBmSK6H9qs/y3RnOaw5v." ), + + ( "$6$rounds=5000$toolongsaltstring", "This is just a test", + "$6$rounds=5000$toolongsaltstrin$lQ8jolhgVRVhY4b5pZKaysCLi0QBxGoNeKQ" + "zQ3glMhwllF7oGDZxUhx1yxdYcz/e1JSbq3y6JMxxl8audkUEm0" ), + + ( "$6$rounds=1400$anotherlongsaltstring", + "a very much longer text to encrypt. This one even stretches over more" + "than one line.", + "$6$rounds=1400$anotherlongsalts$POfYwTEok97VWcjxIiSOjiykti.o/pQs.wP" + "vMxQ6Fm7I6IoYN3CmLs66x9t0oSwbtEW7o7UmJEiDwGqd8p4ur1" ), + + ( "$6$rounds=77777$short", + "we have a short salt string but not a short password", + "$6$rounds=77777$short$WuQyW2YR.hBNpjjRhpYD/ifIw05xdfeEyQoMxIXbkvr0g" + "ge1a1x3yRULJ5CCaUeOxFmtlcGZelFl5CxtgfiAc0" ), + + ( "$6$rounds=123456$asaltof16chars..", "a short string", + "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywWvt0RLE8uZ4oPwc" + "elCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1" ), + + ( "$6$rounds=10$roundstoolow", "the minimum number is still observed", + "$6$rounds=1000$roundstoolow$kUMsbe306n21p9R.FRkW3IGn.S9NPN0x50YhH1x" + "hLsPuWGsUSklZt58jaTfF4ZEQpyUNGc0dqbpBYYBaHHrsX." ), + ] + + filter_config_warnings = True # rounds too low, salt too small + + platform_crypt_support = _sha256_crypt_test.platform_crypt_support + +sha512_crypt_os_crypt_test, sha512_crypt_builtin_test = \ + _sha512_crypt_test.create_backend_cases(["os_crypt","builtin"]) + +#============================================================================= +# sun md5 crypt +#============================================================================= +class sun_md5_crypt_test(HandlerCase): + handler = hash.sun_md5_crypt + + # TODO: this scheme needs some real test vectors, especially due to + # the "bare salt" issue which plagued the official parser. + known_correct_hashes = [ + # + # http://forums.halcyoninc.com/showthread.php?t=258 + # + ("Gpcs3_adm", "$md5$zrdhpMlZ$$wBvMOEqbSjU.hu5T2VEP01"), + + # + # http://www.c0t0d0s0.org/archives/4453-Less-known-Solaris-features-On-passwords-Part-2-Using-stronger-password-hashing.html + # + ("aa12345678", "$md5$vyy8.OVF$$FY4TWzuauRl4.VQNobqMY."), + + # + # http://www.cuddletech.com/blog/pivot/entry.php?id=778 + # + ("this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # + # http://compgroups.net/comp.unix.solaris/password-file-in-linux-and-solaris-8-9 + # + ("passwd", "$md5$RPgLF6IJ$WTvAlUJ7MqH5xak2FMEwS/"), + + # + # source: http://solaris-training.com/301_HTML/docs/deepdiv.pdf page 27 + # FIXME: password unknown + # "$md5,rounds=8000$kS9FT1JC$$mnUrRO618lLah5iazwJ9m1" + + # + # source: http://www.visualexams.com/310-303.htm + # XXX: this has 9 salt chars unlike all other hashes. is that valid? + # FIXME: password unknown + # "$md5,rounds=2006$2amXesSj5$$kCF48vfPsHDjlKNXeEw7V." + # + + # + # custom + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, '$md5,rounds=5000$10VYDzAA$$1arAVtMA3trgE1qJ2V0Ez1'), + ] + + known_correct_configs = [ + # (config, secret, hash) + + #--------------------------- + # test salt string handling + # + # these tests attempt to verify that passlib is handling + # the "bare salt" issue (see sun md5 crypt docs) + # in a sane manner + #--------------------------- + + # config with "$" suffix, hash strings with "$$" suffix, + # should all be treated the same, with one "$" added to salt digest. + ("$md5$3UqYqndY$", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + ("$md5$3UqYqndY$$......................", + "this", "$md5$3UqYqndY$$6P.aaWOoucxxq.l00SS9k0"), + + # config with no suffix, hash strings with "$" suffix, + # should all be treated the same, and no suffix added to salt digest. + # NOTE: this is just a guess re: config w/ no suffix, + # but otherwise there's no sane way to encode bare_salt=False + # within config string. + ("$md5$3UqYqndY", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ("$md5$3UqYqndY$......................", + "this", "$md5$3UqYqndY$HIZVnfJNGCPbDZ9nIRSgP1"), + ] + + known_malformed_hashes = [ + # unexpected end of hash + "$md5,rounds=5000", + + # bad rounds + "$md5,rounds=500A$xxxx", + "$md5,rounds=0500$xxxx", + "$md5,rounds=0$xxxx", + + # bad char in otherwise correct hash + "$md5$RPgL!6IJ$WTvAlUJ7MqH5xak2FMEwS/", + + # digest too short + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS", + + # digest too long + "$md5$RPgLa6IJ$WTvAlUJ7MqH5xak2FMEwS/.", + + # 2+ "$" at end of salt in config + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$3UqYqndY$$", + + # 3+ "$" at end of salt in hash + # NOTE: not sure what correct behavior is, so forbidding format for now. + "$md5$RPgLa6IJ$$$WTvAlUJ7MqH5xak2FMEwS/", + + ] + + platform_crypt_support = [ + ("solaris", True), + ("freebsd|openbsd|netbsd|linux|darwin", False), + ] + + def do_verify(self, secret, hash): + # override to fake error for "$..." hash strings listed in known_config. + # these have to be hash strings, in order to test bare salt issue. + if isinstance(hash, str) and hash.endswith("$......................"): + raise ValueError("pretending '$.' hash is config string") + return self.handler.verify(secret, hash) + +#============================================================================= +# unix disabled / fallback +#============================================================================= +class unix_disabled_test(HandlerCase): + handler = hash.unix_disabled +# accepts_all_hashes = True # TODO: turn this off. + is_disabled_handler = True + + known_correct_hashes = [ + # everything should hash to "!" (or "*" on BSD), + # and nothing should verify against either string + ("password", "!"), + (UPASS_TABLE, "*"), + ] + + known_unidentified_hashes = [ + # should never identify anything crypt() could return... + "$1$xxx", + "abc", + "./az", + "{SHA}xxx", + ] + + def test_76_hash_border(self): + # so empty strings pass + self.accepts_all_hashes = True + super(unix_disabled_test, self).test_76_hash_border() + + def test_90_special(self): + "test marker option & special behavior" + handler = self.handler + + # preserve hash if provided + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + + # use marker if no hash + self.assertEqual(handler.genhash("stub", None), handler.default_marker) + + # custom marker + self.assertEqual(handler.genhash("stub", None, marker="*xxx"), "*xxx") + + # reject invalid marker + self.assertRaises(ValueError, handler.genhash, 'stub', None, marker='abc') + +class unix_fallback_test(HandlerCase): + handler = hash.unix_fallback + accepts_all_hashes = True + is_disabled_handler = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + (UPASS_TABLE, "!"), + ] + + # silence annoying deprecation warning + def setUp(self): + super(unix_fallback_test, self).setUp() + warnings.filterwarnings("ignore", "'unix_fallback' is deprecated") + + def test_90_wildcard(self): + "test enable_wildcard flag" + h = self.handler + self.assertTrue(h.verify('password','', enable_wildcard=True)) + self.assertFalse(h.verify('password','')) + for c in ("!*x"): + self.assertFalse(h.verify('password',c, enable_wildcard=True)) + self.assertFalse(h.verify('password',c)) + + def test_91_preserves_existing(self): + "test preserves existing disabled hash" + handler = self.handler + + # use marker if no hash + self.assertEqual(handler.genhash("stub", None), "!") + + # use hash if provided and valid + self.assertEqual(handler.genhash("stub", "!asd"), "!asd") + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_handlers_bcrypt.py b/passlib/tests/test_handlers_bcrypt.py new file mode 100644 index 00000000..b12759d1 --- /dev/null +++ b/passlib/tests/test_handlers_bcrypt.py @@ -0,0 +1,472 @@ +"""passlib.tests.test_handlers_bcrypt - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +import logging; log = logging.getLogger(__name__) +import os +import sys +import warnings +# site +# pkg +from passlib import hash +from passlib.utils import repeat_string +from passlib.utils.compat import irange, PY3, u, get_method_function +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, \ + TEST_MODE, b, catch_warnings, UserHandlerMixin, randintgauss, EncodingHandlerMixin +from passlib.tests.test_handlers import UPASS_WAV, UPASS_USD, UPASS_TABLE +# module + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_test(HandlerCase): + "base for BCrypt test cases" + handler = hash.bcrypt + secret_size = 72 + reduce_default_rounds = True + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # from JTR 1.7.9 + # + ('U*U*U*U*', '$2a$05$c92SVSfjeiCD6F2nAD6y0uBpJDjdRkt0EgeC4/31Rf2LUZbDRDE.O'), + ('U*U***U', '$2a$05$WY62Xk2TXZ7EvVDQ5fmjNu7b0GEzSzUXUh2cllxJwhtOeMtWV3Ujq'), + ('U*U***U*', '$2a$05$Fa0iKV3E2SYVUlMknirWU.CFYGvJ67UwVKI1E2FP6XeLiZGcH3MJi'), + ('*U*U*U*U', '$2a$05$.WRrXibc1zPgIdRXYfv.4uu6TD1KWf0VnHzq/0imhUhuxSxCyeBs2'), + ('', '$2a$05$Otz9agnajgrAe0.kFVF9V.tzaStZ2s1s4ZWi/LY4sw2k/MTVFj/IO'), + + # + # test vectors from http://www.openwall.com/crypt v1.2 + # note that this omits any hashes that depend on crypt_blowfish's + # various CVE-2011-2483 workarounds (hash 2a and \xff\xff in password, + # and any 2x hashes); and only contain hashes which are correct + # under both crypt_blowfish 1.2 AND OpenBSD. + # + ('U*U', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.E5YPO9kmyuRGyh0XouQYb4YMJKvyOeW'), + ('U*U*', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK'), + ('U*U*U', '$2a$05$XXXXXXXXXXXXXXXXXXXXXOAcXxm9kjPGEMsLznoKqmqw7tc8WCx4a'), + ('', '$2a$05$CCCCCCCCCCCCCCCCCCCCC.7uG0VCzI2bS7j6ymqJi9CdcdxiRTWNy'), + ('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789chars after 72 are ignored', + '$2a$05$abcdefghijklmnopqrstuu5s2v8.iXieOjg/.AySBTTZIIVFJeBui'), + (b('\xa3'), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + (b('\xff\xa3345'), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.nRht2l/HRhr6zmCp9vYUvvsqynflf9e'), + (b('\xa3ab'), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.6IflQkJytoRVc1yuaNtHfiuq.FRlSIS'), + (b('\xaa')*72 + b('chars after 72 are ignored as usual'), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.swQOIzjOiJ9GHEPuhEkvqrUyvWhEMx6'), + (b('\xaa\x55'*36), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.R9xrDjiycxMbQE2bp.vgqlYpW5wx2yy'), + (b('\x55\xaa\xff'*24), + '$2a$05$/OK.fbVrR/bpIqNJ5ianF.9tQZzcJfm3uj2NvJ/n5xkhpqLrMpWCe'), + + # keeping one of their 2y tests, because we are supporting that. + (b('\xa3'), + '$2y$05$/OK.fbVrR/bpIqNJ5ianF.Sa7shbm4.OzKpvFnX1pQLmQW96oUlCq'), + + # + # from py-bcrypt tests + # + ('', '$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('a', '$2a$10$k87L/MF28Q673VKh8/cPi.SUl7MU/rWuSiIDDFayrKk/1tBsSQu4u'), + ('abc', '$2a$10$WvvTPHKwdBJ3uk0Z37EMR.hLA2W6N9AEBhEgrAOljy2Ae5MtaSIUi'), + ('abcdefghijklmnopqrstuvwxyz', + '$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + ('~!@#$%^&*() ~!@#$%^&*()PNBFRD', + '$2a$10$LgfYWkbzEvQ4JakH7rOvHe0y8pHKF9OaFgwUZ2q7W2FFZmZzJYlfS'), + + # + # custom test vectors + # + + # ensures utf-8 used for unicode + (UPASS_TABLE, + '$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + ] + + if TEST_MODE("full"): + # + # add some extra tests related to 2/2a + # + CONFIG_2 = '$2$05$' + '.'*22 + CONFIG_A = '$2a$05$' + '.'*22 + known_correct_hashes.extend([ + ("", CONFIG_2 + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("", CONFIG_A + 'J2ihDv8vVf7QZ9BsaRrKyqs2tkn55Yq'), + ("abc", CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc", CONFIG_A + 'ev6gDwpVye3oMCUpLY85aTpfBNHD0Ga'), + ("abc"*23, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*23, CONFIG_A + '2kIdfSj/4/R/Q6n847VTvc68BXiRYZC'), + ("abc"*24, CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24, CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_2 + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ("abc"*24+'x', CONFIG_A + 'XuQjdH.wPVNUZ/bOfstdW/FqB8QSjte'), + ]) + + known_correct_configs = [ + ('$2a$04$uM6csdM8R9SXTex/gbTaye', UPASS_TABLE, + '$2a$04$uM6csdM8R9SXTex/gbTayezuvzFEufYGd2uB6of7qScLjQ4GwcD4G'), + ] + + known_unidentified_hashes = [ + # invalid minor version + "$2b$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + "$2`$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # \/ + "$2a$12$EXRkfkdmXn!gzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # unsupported (but recognized) minor version + "$2x$12$EXRkfkdmXnagzds2SSitu.MW9.gAVqa9eLS1//RYtYCmB1eLHg.9q", + + # rounds not zero-padded (py-bcrypt rejects this, therefore so do we) + '$2a$6$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.' + + # NOTE: salts with padding bits set are technically malformed, + # but we can reliably correct & issue a warning for that. + ] + + platform_crypt_support = [ + ("freedbsd|openbsd|netbsd", True), + ("darwin", False), + # linux - may be present via addon, e.g. debian's libpam-unix2 + # solaris - depends on policy + ] + + #=================================================================== + # override some methods + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "enabled" + super(_bcrypt_test, self).setUp() + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_test, self).populate_settings(kwds) + + #=================================================================== + # fuzz testing + #=================================================================== + def os_supports_ident(self, hash): + "check if OS crypt is expected to support given ident" + if hash is None: + return True + # most OSes won't support 2x/2y + # XXX: definitely not the BSDs, but what about the linux variants? + from passlib.handlers.bcrypt import IDENT_2X, IDENT_2Y + if hash.startswith(IDENT_2X) or hash.startswith(IDENT_2Y): + return False + return True + + def fuzz_verifier_bcrypt(self): + # test against bcrypt, if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y + from passlib.utils import to_native_str, to_bytes + try: + import bcrypt + except ImportError: + return + if not hasattr(bcrypt, "_ffi"): + return + def check_bcrypt(secret, hash): + "bcrypt" + secret = to_bytes(secret, self.fuzz_password_encoding) + #if hash.startswith(IDENT_2Y): + # hash = IDENT_2A + hash[4:] + if hash.startswith(IDENT_2): + # bcryptor doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + hash = to_bytes(hash) + try: + return bcrypt.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("bcrypt rejected hash: %r" % (hash,)) + return check_bcrypt + + def fuzz_verifier_pybcrypt(self): + # test against py-bcrypt, if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2X, IDENT_2Y + from passlib.utils import to_native_str + try: + import bcrypt + except ImportError: + return + if hasattr(bcrypt, "_ffi"): + return + def check_pybcrypt(secret, hash): + "pybcrypt" + secret = to_native_str(secret, self.fuzz_password_encoding) + if hash.startswith(IDENT_2Y): + hash = IDENT_2A + hash[4:] + try: + return bcrypt.hashpw(secret, hash) == hash + except ValueError: + raise ValueError("py-bcrypt rejected hash: %r" % (hash,)) + return check_pybcrypt + + def fuzz_verifier_bcryptor(self): + # test against bcryptor, if available + from passlib.handlers.bcrypt import IDENT_2, IDENT_2A, IDENT_2Y + from passlib.utils import to_native_str + try: + from bcryptor.engine import Engine + except ImportError: + return + def check_bcryptor(secret, hash): + "bcryptor" + secret = to_native_str(secret, self.fuzz_password_encoding) + if hash.startswith(IDENT_2Y): + hash = IDENT_2A + hash[4:] + elif hash.startswith(IDENT_2): + # bcryptor doesn't support $2$ hashes; but we can fake it + # using the $2a$ algorithm, by repeating the password until + # it's 72 chars in length. + hash = IDENT_2A + hash[3:] + if secret: + secret = repeat_string(secret, 72) + return Engine(False).hash_key(secret, hash) == hash + return check_bcryptor + + def get_fuzz_settings(self): + secret, other, kwds = super(_bcrypt_test,self).get_fuzz_settings() + from passlib.handlers.bcrypt import IDENT_2, IDENT_2X + from passlib.utils import to_bytes + ident = kwds.get('ident') + if ident == IDENT_2X: + # 2x is just recognized, not supported. don't test with it. + del kwds['ident'] + elif ident == IDENT_2 and other and repeat_string(to_bytes(other), len(to_bytes(secret))) == to_bytes(secret): + # avoid false failure due to flaw in 0-revision bcrypt: + # repeated strings like 'abc' and 'abcabc' hash identically. + other = self.get_fuzz_password() + return secret, other, kwds + + def fuzz_setting_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return randintgauss(5, 8, 6, 1) + + #=================================================================== + # custom tests + #=================================================================== + known_incorrect_padding = [ + # password, bad hash, good hash + + # 2 bits of salt padding set +# ("loppux", # \/ +# "$2a$12$oaQbBqq8JnSM1NHRPQGXORm4GCUMqp7meTnkft4zgSnrbhoKdDV0C", +# "$2a$12$oaQbBqq8JnSM1NHRPQGXOOm4GCUMqp7meTnkft4zgSnrbhoKdDV0C"), + ("test", # \/ + '$2a$04$oaQbBqq8JnSM1NHRPQGXORY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO', + '$2a$04$oaQbBqq8JnSM1NHRPQGXOOY4Vw3bdHKLIXTecPDRAcJ98cz1ilveO'), + + # all 4 bits of salt padding set +# ("Passlib11", # \/ +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVcUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK", +# "$2a$12$M8mKpW9a2vZ7PYhq/8eJVOUtKxpo6j0zAezu0G/HAMYgMkhPu4fLK"), + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSScrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + + # bad checksum padding + ("test", # \/ + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIV", + "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS"), + ] + + def test_90_bcrypt_padding(self): + "test passlib correctly handles bcrypt padding bits" + self.require_TEST_MODE("full") + # + # prevents reccurrence of issue 25 (https://code.google.com/p/passlib/issues/detail?id=25) + # were some unused bits were incorrectly set in bcrypt salt strings. + # (fixed since 1.5.3) + # + bcrypt = self.handler + corr_desc = ".*incorrectly set padding bits" + + # + # test encrypt() / genconfig() don't generate invalid salts anymore + # + def check_padding(hash): + assert hash.startswith("$2a$") and len(hash) >= 28 + self.assertTrue(hash[28] in '.Oeu', + "unused bits incorrectly set in hash: %r" % (hash,)) + for i in irange(6): + check_padding(bcrypt.genconfig()) + for i in irange(3): + check_padding(bcrypt.encrypt("bob", rounds=bcrypt.min_rounds)) + + # + # test genconfig() corrects invalid salts & issues warning. + # + with self.assertWarningList(["salt too large", corr_desc]): + hash = bcrypt.genconfig(salt="."*21 + "A.", rounds=5, relaxed=True) + self.assertEqual(hash, "$2a$05$" + "." * 22) + + # + # make sure genhash() corrects input + # + samples = self.known_incorrect_padding + for pwd, bad, good in samples: + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.genhash(pwd, bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.genhash(pwd, good), good) + + # + # and that verify() works good & bad + # + with self.assertWarningList([corr_desc]): + self.assertTrue(bcrypt.verify(pwd, bad)) + with self.assertWarningList([]): + self.assertTrue(bcrypt.verify(pwd, good)) + + # + # test normhash cleans things up correctly + # + for pwd, bad, good in samples: + with self.assertWarningList([corr_desc]): + self.assertEqual(bcrypt.normhash(bad), good) + with self.assertWarningList([]): + self.assertEqual(bcrypt.normhash(good), good) + self.assertEqual(bcrypt.normhash("$md5$abc"), "$md5$abc") + +hash.bcrypt._no_backends_msg() # call this for coverage purposes + +# create test cases for specific backends +bcrypt_bcrypt_test, bcrypt_pybcrypt_test, bcrypt_bcryptor_test, bcrypt_os_crypt_test, bcrypt_builtin_test = \ + _bcrypt_test.create_backend_cases(["bcrypt", "pybcrypt", "bcryptor", "os_crypt", "builtin"]) + +#============================================================================= +# bcrypt +#============================================================================= +class _bcrypt_sha256_test(HandlerCase): + "base for BCrypt-SHA256 test cases" + handler = hash.bcrypt_sha256 + reduce_default_rounds = True + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + fallback_os_crypt_handler = hash.bcrypt + + known_correct_hashes = [ + # + # custom test vectors + # + + # empty + ("", + '$bcrypt-sha256$2a,5$E/e/2AOhqM5W/KJTFQzLce$F6dYSxOdAEoJZO2eoHUZWZljW/e0TXO'), + + # ascii + ("password", + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + + # unicode / utf8 + (UPASS_TABLE, + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + (UPASS_TABLE.encode("utf-8"), + '$bcrypt-sha256$2a,5$.US1fQ4TQS.ZTz/uJ5Kyn.$QNdPDOTKKT5/sovNz1iWg26quOU4Pje'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + # NOTE: test_60_secret_size() handles this already, this is just for overkill :) + (repeat_string("abc123",72), + '$bcrypt-sha256$2a,5$X1g1nh3g0v4h6970O68cxe$r/hyEtqJ0teqPEmfTLoZ83ciAI1Q74.'), + (repeat_string("abc123",72)+"qwr", + '$bcrypt-sha256$2a,5$X1g1nh3g0v4h6970O68cxe$021KLEif6epjot5yoxk0m8I0929ohEa'), + (repeat_string("abc123",72)+"xyz", + '$bcrypt-sha256$2a,5$X1g1nh3g0v4h6970O68cxe$7.1kgpHduMGEjvM3fX6e/QCvfn6OKja'), + ] + + known_correct_configs =[ + ('$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe', + "password", '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu'), + ] + + known_malformed_hashes = [ + # bad char in otherwise correct hash + # \/ + '$bcrypt-sha256$2a,5$5Hg1DKF!PE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unrecognized bcrypt variant + '$bcrypt-sha256$2c,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # unsupported bcrypt variant + '$bcrypt-sha256$2x,5$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # rounds zero-padded + '$bcrypt-sha256$2a,05$5Hg1DKFqPE8C2aflZ5vVoe$12BjNE0p7axMg55.Y/mHsYiVuFBDQyu', + + # config string w/ $ added + '$bcrypt-sha256$2a,5$5Hg1DKFqPE8C2aflZ5vVoe$', + ] + + #=================================================================== + # override some methods -- cloned from bcrypt + #=================================================================== + def setUp(self): + # ensure builtin is enabled for duration of test. + if TEST_MODE("full") and self.backend == "builtin": + key = "PASSLIB_BUILTIN_BCRYPT" + orig = os.environ.get(key) + if orig: + self.addCleanup(os.environ.__setitem__, key, orig) + else: + self.addCleanup(os.environ.__delitem__, key) + os.environ[key] = "enabled" + super(_bcrypt_sha256_test, self).setUp() + + def populate_settings(self, kwds): + # builtin is still just way too slow. + if self.backend == "builtin": + kwds.setdefault("rounds", 4) + super(_bcrypt_sha256_test, self).populate_settings(kwds) + + #=================================================================== + # override ident tests for now + #=================================================================== + def test_30_HasManyIdents(self): + raise self.skipTest("multiple idents not supported") + + def test_30_HasOneIdent(self): + # forbidding ident keyword, we only support "2a" for now + handler = self.handler + handler(use_defaults=True) + self.assertRaises(ValueError, handler, ident="$2y$", use_defaults=True) + + #=================================================================== + # fuzz testing -- cloned from bcrypt + #=================================================================== + def fuzz_setting_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return randintgauss(5, 8, 6, 1) + +# create test cases for specific backends +bcrypt_sha256_bcrypt_test, bcrypt_sha256_pybcrypt_test, bcrypt_sha256_bcryptor_test, bcrypt_sha256_os_crypt_test, bcrypt_sha256_builtin_test = \ + _bcrypt_sha256_test.create_backend_cases(["bcrypt", "pybcrypt", "bcryptor", "os_crypt", "builtin"]) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_handlers_django.py b/passlib/tests/test_handlers_django.py new file mode 100644 index 00000000..2d516ae2 --- /dev/null +++ b/passlib/tests/test_handlers_django.py @@ -0,0 +1,366 @@ +"""passlib.tests.test_handlers_django - tests for passlib hash algorithms""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +import logging; log = logging.getLogger(__name__) +import os +import warnings +# site +# pkg +from passlib import hash +from passlib.utils import repeat_string +from passlib.utils.compat import irange, PY3, u, get_method_function +from passlib.tests.utils import TestCase, HandlerCase, skipUnless, \ + TEST_MODE, b, catch_warnings, UserHandlerMixin, randintgauss, EncodingHandlerMixin +from passlib.tests.test_handlers import UPASS_WAV, UPASS_USD, UPASS_TABLE +# module + +#============================================================================= +# django +#============================================================================= + +# standard string django uses +UPASS_LETMEIN = u('l\xe8tmein') + +def vstr(version): + return ".".join(str(e) for e in version) + +class _DjangoHelper(object): + # NOTE: not testing against Django < 1.0 since it doesn't support + # most of these hash formats. + + # flag that hash wasn't added until specified version + min_django_version = () + + def fuzz_verifier_django(self): + from passlib.tests.test_ext_django import DJANGO_VERSION + # check_password() not added until 1.0 + min_django_version = max(self.min_django_version, (1,0)) + if DJANGO_VERSION < min_django_version: + return None + from django.contrib.auth.models import check_password + def verify_django(secret, hash): + "django/check_password" + if (1,4) <= DJANGO_VERSION < (1,6) and not secret: + return "skip" + if self.handler.name == "django_bcrypt" and hash.startswith("bcrypt$$2y$"): + hash = hash.replace("$$2y$", "$$2a$") + if DJANGO_VERSION >= (1,5) and self.django_has_encoding_glitch and isinstance(secret, bytes): + # e.g. unsalted_md5 on 1.5 and higher try to combine + # salt + password before encoding to bytes, leading to ascii error. + # this works around that issue. + secret = secret.decode("utf-8") + return check_password(secret, hash) + return verify_django + + def test_90_django_reference(self): + "run known correct hashes through Django's check_password()" + from passlib.tests.test_ext_django import DJANGO_VERSION + # check_password() not added until 1.0 + min_django_version = max(self.min_django_version, (1,0)) + if DJANGO_VERSION < min_django_version: + raise self.skipTest("Django >= %s not installed" % vstr(min_django_version)) + from django.contrib.auth.models import check_password + assert self.known_correct_hashes + for secret, hash in self.iter_known_hashes(): + if (1,4) <= DJANGO_VERSION < (1,6) and not secret: + # django 1.4-1.5 rejects empty passwords + self.assertFalse(check_password(secret, hash), + "empty string should not have verified") + continue + self.assertTrue(check_password(secret, hash), + "secret=%r hash=%r failed to verify" % + (secret, hash)) + self.assertFalse(check_password('x' + secret, hash), + "mangled secret=%r hash=%r incorrect verified" % + (secret, hash)) + + django_has_encoding_glitch = False + + def test_91_django_generation(self): + "test against output of Django's make_password()" + from passlib.tests.test_ext_django import DJANGO_VERSION + # make_password() not added until 1.4 + min_django_version = max(self.min_django_version, (1,4)) + if DJANGO_VERSION < min_django_version: + raise self.skipTest("Django >= %s not installed" % vstr(min_django_version)) + from passlib.utils import tick + from django.contrib.auth.hashers import make_password + name = self.handler.django_name # set for all the django_* handlers + end = tick() + self.max_fuzz_time/2 + while tick() < end: + secret, other = self.get_fuzz_password_pair() + if not secret: # django 1.4 rejects empty passwords. + continue + if DJANGO_VERSION >= (1,5) and self.django_has_encoding_glitch and isinstance(secret, bytes): + # e.g. unsalted_md5 on 1.5 and higher try to combine + # salt + password before encoding to bytes, leading to ascii error. + # this works around that issue. + secret = secret.decode("utf-8") + hash = make_password(secret, hasher=name) + self.assertTrue(self.do_identify(hash)) + self.assertTrue(self.do_verify(secret, hash)) + self.assertFalse(self.do_verify(other, hash)) + +class django_disabled_test(HandlerCase): + "test django_disabled" + handler = hash.django_disabled + is_disabled_handler = True + + known_correct_hashes = [ + # *everything* should hash to "!", and nothing should verify + ("password", "!"), + ("", "!"), + (UPASS_TABLE, "!"), + ] + + known_alternate_hashes = [ + # django 1.6 appends random alpnum string + ("!9wa845vn7098ythaehasldkfj", "password", "!"), + ] + +class django_des_crypt_test(HandlerCase, _DjangoHelper): + "test django_des_crypt" + handler = hash.django_des_crypt + secret_size = 8 + + known_correct_hashes = [ + # ensures only first two digits of salt count. + ("password", 'crypt$c2$c2M87q...WWcU'), + ("password", 'crypt$c2e86$c2M87q...WWcU'), + ("passwordignoreme", 'crypt$c2.AZ$c2M87q...WWcU'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'crypt$c2e86$c2hN1Bxd6ZiWs'), + (UPASS_TABLE, 'crypt$0.aQs$0.wB.TT0Czvlo'), + (u("hell\u00D6"), "crypt$sa$saykDgk3BPZ9E"), + + # prevent regression of issue 22 + ("foo", 'crypt$MNVY.9ajgdvDQ$MNVY.9ajgdvDQ'), + ] + + known_alternate_hashes = [ + # ensure django 1.4 empty salt field is accepted; + # but that salt field is re-filled (for django 1.0 compatibility) + ('crypt$$c2M87q...WWcU', "password", 'crypt$c2$c2M87q...WWcU'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'crypt$c2$c2M87q', + + # salt must be >2 + 'crypt$f$c2M87q...WWcU', + + # make sure first 2 chars of salt & chk field agree. + 'crypt$ffe86$c2M87q...WWcU', + ] + +class django_salted_md5_test(HandlerCase, _DjangoHelper): + "test django_salted_md5" + handler = hash.django_salted_md5 + + django_has_encoding_glitch = True + + known_correct_hashes = [ + # test extra large salt + ("password", 'md5$123abcdef$c8272612932975ee80e8a35995708e80'), + + # test django 1.4 alphanumeric salt + ("test", 'md5$3OpqnFAHW5CT$54b29300675271049a1ebae07b395e20'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'md5$c2e86$92105508419a81a6babfaecf876a2fa0'), + (UPASS_TABLE, 'md5$d9eb8$01495b32852bffb27cf5d4394fe7a54c'), + ] + + known_unidentified_hashes = [ + 'sha1$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'md5$aa$bb', + ] + + def fuzz_setting_salt_size(self): + # workaround for django14 regression -- + # 1.4 won't accept hashes with empty salt strings, unlike 1.3 and earlier. + # looks to be fixed in a future release -- https://code.djangoproject.com/ticket/18144 + # for now, we avoid salt_size==0 under 1.4 + handler = self.handler + from passlib.tests.test_ext_django import has_django14 + default = handler.default_salt_size + assert handler.min_salt_size == 0 + lower = 1 if has_django14 else 0 + upper = handler.max_salt_size or default*4 + return randintgauss(lower, upper, default, default*.5) + +class django_salted_sha1_test(HandlerCase, _DjangoHelper): + "test django_salted_sha1" + handler = hash.django_salted_sha1 + + django_has_encoding_glitch = True + + known_correct_hashes = [ + # test extra large salt + ("password",'sha1$123abcdef$e4a1877b0e35c47329e7ed7e58014276168a37ba'), + + # test django 1.4 alphanumeric salt + ("test", 'sha1$bcwHF9Hy8lxS$6b4cfa0651b43161c6f1471ce9523acf1f751ba3'), + + # ensures utf-8 used for unicode + (UPASS_USD, 'sha1$c2e86$0f75c5d7fbd100d587c127ef0b693cde611b4ada'), + (UPASS_TABLE, 'sha1$6d853$ef13a4d8fb57aed0cb573fe9c82e28dc7fd372d4'), + + # generic password + ("MyPassword", 'sha1$54123$893cf12e134c3c215f3a76bd50d13f92404a54d3'), + ] + + known_unidentified_hashes = [ + 'md5$aa$bb', + ] + + known_malformed_hashes = [ + # checksum too short + 'sha1$c2e86$0f75', + ] + + fuzz_setting_salt_size = get_method_function(django_salted_md5_test.fuzz_setting_salt_size) + +class django_pbkdf2_sha256_test(HandlerCase, _DjangoHelper): + "test django_pbkdf2_sha256" + handler = hash.django_pbkdf2_sha256 + min_django_version = (1,4) + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hasher + # + ('not a password', + 'pbkdf2_sha256$10000$kjVJaVz6qsnJ$5yPHw3rwJGECpUf70daLGhOrQ5+AMxIJdz1c3bqK1Rs='), + (UPASS_TABLE, + 'pbkdf2_sha256$10000$bEwAfNrH1TlQ$OgYUblFNUX1B8GfMqaCYUK/iHyO0pa7STTDdaEJBuY0='), + ] + +class django_pbkdf2_sha1_test(HandlerCase, _DjangoHelper): + "test django_pbkdf2_sha1" + handler = hash.django_pbkdf2_sha1 + min_django_version = (1,4) + + known_correct_hashes = [ + # + # custom - generated via django 1.4 hashers + # + ('not a password', + 'pbkdf2_sha1$10000$wz5B6WkasRoF$atJmJ1o+XfJxKq1+Nu1f1i57Z5I='), + (UPASS_TABLE, + 'pbkdf2_sha1$10000$KZKWwvqb8BfL$rw5pWsxJEU4JrZAQhHTCO+u0f5Y='), + ] + +class django_bcrypt_test(HandlerCase, _DjangoHelper): + "test django_bcrypt" + handler = hash.django_bcrypt + secret_size = 72 + min_django_version = (1,4) + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # just copied and adapted a few test vectors from bcrypt (above), + # since django_bcrypt is just a wrapper for the real bcrypt class. + # + ('', 'bcrypt$$2a$06$DCq7YPn5Rq63x1Lad4cll.TV4S6ytwfsfvkgY8jIucDrjc8deX1s.'), + ('abcdefghijklmnopqrstuvwxyz', + 'bcrypt$$2a$10$fVH8e28OQRj9tqiDXs1e1uxpsjN0c7II7YPKXua2NAKYvM6iQk7dq'), + (UPASS_TABLE, + 'bcrypt$$2a$05$Z17AXnnlpzddNUvnC6cZNOSwMA/8oNiKnHTHTwLlBijfucQQlHjaG'), + ] + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_test, self).populate_settings(kwds) + + def fuzz_setting_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return randintgauss(5, 8, 6, 1) + + def fuzz_setting_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + return None + +django_bcrypt_test = skipUnless(hash.bcrypt.has_backend(), + "no bcrypt backends available")(django_bcrypt_test) + +class django_bcrypt_sha256_test(HandlerCase, _DjangoHelper): + "test django_bcrypt_sha256" + handler = hash.django_bcrypt_sha256 + min_django_version = (1,6) + forbidden_characters = None + fuzz_salts_need_bcrypt_repair = True + + known_correct_hashes = [ + # + # custom - generated via django 1.6 hasher + # + ('', + 'bcrypt_sha256$$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu'), + (UPASS_LETMEIN, + 'bcrypt_sha256$$2a$08$NDjSAIcas.EcoxCRiArvT.MkNiPYVhrsrnJsRkLueZOoV1bsQqlmC'), + (UPASS_TABLE, + 'bcrypt_sha256$$2a$06$kCXUnRFQptGg491siDKNTu8RxjBGSjALHRuvhPYNFsa4Ea5d9M48u'), + + # test >72 chars is hashed correctly -- under bcrypt these hash the same. + (repeat_string("abc123",72), + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OySmyXA8FoY4PjGizjE1QSDfuL5MXNni'), + (repeat_string("abc123",72)+"qwr", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61Ocy0BEz1RK6xslSNi8PlaLX2pe7x/KQG'), + (repeat_string("abc123",72)+"xyz", + 'bcrypt_sha256$$2a$06$Tg/oYyZTyAf.Nb3qSgN61OvY2zoRVUa2Pugv2ExVOUT2YmhvxUFUa'), + ] + + known_malformed_hashers = [ + # data in django salt field + 'bcrypt_sha256$xyz$2a$06$/3OeRpbOf8/l6nPPRdZPp.nRiyYqPobEZGdNRBWihQhiFDh1ws1tu', + ] + + def test_30_HasManyIdents(self): + raise self.skipTest("multiple idents not supported") + + def test_30_HasOneIdent(self): + # forbidding ident keyword, django doesn't support configuring this + handler = self.handler + handler(use_defaults=True) + self.assertRaises(TypeError, handler, ident="$2a$", use_defaults=True) + + # NOTE: the following have been cloned from _bcrypt_test() + + def populate_settings(self, kwds): + # speed up test w/ lower rounds + kwds.setdefault("rounds", 4) + super(django_bcrypt_sha256_test, self).populate_settings(kwds) + + def fuzz_setting_rounds(self): + # decrease default rounds for fuzz testing to speed up volume. + return randintgauss(5, 8, 6, 1) + + def fuzz_setting_ident(self): + # omit multi-ident tests, only $2a$ counts for this class + return None + +django_bcrypt_sha256_test = skipUnless(hash.bcrypt.has_backend(), + "no bcrypt backends available")(django_bcrypt_sha256_test) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_hosts.py b/passlib/tests/test_hosts.py new file mode 100644 index 00000000..b01a108b --- /dev/null +++ b/passlib/tests/test_hosts.py @@ -0,0 +1,98 @@ +"""test passlib.hosts""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import warnings +# site +# pkg +from passlib import hosts, hash as hashmod +from passlib.utils import unix_crypt_schemes +from passlib.tests.utils import TestCase +# module + +#============================================================================= +# test predefined app contexts +#============================================================================= +class HostsTest(TestCase): + "perform general tests to make sure contexts work" + # NOTE: these tests are not really comprehensive, + # since they would do little but duplicate + # the presets in apps.py + # + # they mainly try to ensure no typos + # or dynamic behavior foul-ups. + + def check_unix_disabled(self, ctx): + for hash in [ + "", + "!", + "*", + "!$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0", + ]: + self.assertEqual(ctx.identify(hash), 'unix_disabled') + self.assertFalse(ctx.verify('test', hash)) + + def test_linux_context(self): + ctx = hosts.linux_context + for hash in [ + ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751'), + ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17'), + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + self.check_unix_disabled(ctx) + + def test_bsd_contexts(self): + for ctx in [ + hosts.freebsd_context, + hosts.openbsd_context, + hosts.netbsd_context, + ]: + for hash in [ + '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0', + 'kAJJz.Rwp0A/I', + ]: + self.assertTrue(ctx.verify("test", hash)) + h1 = "$2a$04$yjDgE74RJkeqC0/1NheSSOrvKeu9IbKDpcQf/Ox3qsrRS/Kw42qIS" + if hashmod.bcrypt.has_backend(): + self.assertTrue(ctx.verify("test", h1)) + else: + self.assertEqual(ctx.identify(h1), "bcrypt") + self.check_unix_disabled(ctx) + + def test_host_context(self): + ctx = getattr(hosts, "host_context", None) + if not ctx: + return self.skipTest("host_context not available on this platform") + + # validate schemes is non-empty, + # and contains unix_disabled + at least one real scheme + schemes = list(ctx.schemes()) + self.assertTrue(schemes, "appears to be unix system, but no known schemes supported by crypt") + self.assertTrue('unix_disabled' in schemes) + schemes.remove("unix_disabled") + self.assertTrue(schemes, "should have schemes beside fallback scheme") + self.assertTrue(set(unix_crypt_schemes).issuperset(schemes)) + + # check for hash support + self.check_unix_disabled(ctx) + for scheme, hash in [ + ("sha512_crypt", ('$6$rounds=41128$VoQLvDjkaZ6L6BIE$4pt.1Ll1XdDYduEwEYPCMOBiR6W6' + 'znsyUEoNlcVXpv2gKKIbQolgmTGe6uEEVJ7azUxuc8Tf7zV9SD2z7Ij751')), + ("sha256_crypt", ('$5$rounds=31817$iZGmlyBQ99JSB5n6$p4E.pdPBWx19OajgjLRiOW0itGny' + 'xDGgMlDcOsfaI17')), + ("md5_crypt", '$1$TXl/FX/U$BZge.lr.ux6ekjEjxmzwz0'), + ("des_crypt", 'kAJJz.Rwp0A/I'), + ]: + if scheme in schemes: + self.assertTrue(ctx.verify("test", hash)) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_registry.py b/passlib/tests/test_registry.py new file mode 100644 index 00000000..27c5c5c6 --- /dev/null +++ b/passlib/tests/test_registry.py @@ -0,0 +1,214 @@ +"""tests for passlib.pwhash -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import hashlib +from logging import getLogger +import os +import time +import warnings +import sys +# site +# pkg +from passlib import hash, registry +from passlib.registry import register_crypt_handler, register_crypt_handler_path, \ + get_crypt_handler, list_crypt_handlers, _unload_handler_name as unload_handler_name +import passlib.utils.handlers as uh +from passlib.tests.utils import TestCase, catch_warnings +# module +log = getLogger(__name__) + +#============================================================================= +# dummy handlers +# +# NOTE: these are defined outside of test case +# since they're used by test_register_crypt_handler_path(), +# which needs them to be available as module globals. +#============================================================================= +class dummy_0(uh.StaticHandler): + name = "dummy_0" + +class alt_dummy_0(uh.StaticHandler): + name = "dummy_0" + +dummy_x = 1 + +#============================================================================= +# test registry +#============================================================================= +class RegistryTest(TestCase): + + descriptionPrefix = "passlib registry" + + def tearDown(self): + for name in ("dummy_0", "dummy_1", "dummy_x", "dummy_bad"): + unload_handler_name(name) + + def test_hash_proxy(self): + "test passlib.hash proxy object" + # check dir works + dir(hash) + + # check repr works + repr(hash) + + # check non-existent attrs raise error + self.assertRaises(AttributeError, getattr, hash, 'fooey') + + # GAE tries to set __loader__, + # make sure that doesn't call register_crypt_handler. + old = getattr(hash, "__loader__", None) + test = object() + hash.__loader__ = test + self.assertIs(hash.__loader__, test) + if old is None: + del hash.__loader__ + self.assertFalse(hasattr(hash, "__loader__")) + else: + hash.__loader__ = old + self.assertIs(hash.__loader__, old) + + # check storing attr calls register_crypt_handler + class dummy_1(uh.StaticHandler): + name = "dummy_1" + hash.dummy_1 = dummy_1 + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + # check storing under wrong name results in error + self.assertRaises(ValueError, setattr, hash, "dummy_1x", dummy_1) + + def test_register_crypt_handler_path(self): + "test register_crypt_handler_path()" + # NOTE: this messes w/ internals of registry, shouldn't be used publically. + paths = registry._locations + + # check namespace is clear + self.assertTrue('dummy_0' not in paths) + self.assertFalse(hasattr(hash, 'dummy_0')) + + # check invalid names are rejected + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", ".test_registry") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0:xxx") + self.assertRaises(ValueError, register_crypt_handler_path, + "dummy_0", __name__ + ":dummy_0.xxx") + + # try lazy load + register_crypt_handler_path('dummy_0', __name__) + self.assertTrue('dummy_0' in list_crypt_handlers()) + self.assertTrue('dummy_0' not in list_crypt_handlers(loaded_only=True)) + self.assertIs(hash.dummy_0, dummy_0) + self.assertTrue('dummy_0' in list_crypt_handlers(loaded_only=True)) + unload_handler_name('dummy_0') + + # try lazy load w/ alt + register_crypt_handler_path('dummy_0', __name__ + ':alt_dummy_0') + self.assertIs(hash.dummy_0, alt_dummy_0) + unload_handler_name('dummy_0') + + # check lazy load w/ wrong type fails + register_crypt_handler_path('dummy_x', __name__) + self.assertRaises(TypeError, get_crypt_handler, 'dummy_x') + + # check lazy load w/ wrong name fails + register_crypt_handler_path('alt_dummy_0', __name__) + self.assertRaises(ValueError, get_crypt_handler, "alt_dummy_0") + + # TODO: check lazy load which calls register_crypt_handler (warning should be issued) + sys.modules.pop("passlib.tests._test_bad_register", None) + register_crypt_handler_path("dummy_bad", "passlib.tests._test_bad_register") + with catch_warnings(): + warnings.filterwarnings("ignore", "xxxxxxxxxx", DeprecationWarning) + h = get_crypt_handler("dummy_bad") + from passlib.tests import _test_bad_register as tbr + self.assertIs(h, tbr.alt_dummy_bad) + + def test_register_crypt_handler(self): + "test register_crypt_handler()" + + self.assertRaises(TypeError, register_crypt_handler, {}) + + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name=None))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="AB_CD"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab-cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="ab__cd"))) + self.assertRaises(ValueError, register_crypt_handler, type('x', (uh.StaticHandler,), dict(name="default"))) + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + class dummy_1b(uh.StaticHandler): + name = "dummy_1" + + self.assertTrue('dummy_1' not in list_crypt_handlers()) + + register_crypt_handler(dummy_1) + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + self.assertRaises(KeyError, register_crypt_handler, dummy_1b) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + register_crypt_handler(dummy_1b, force=True) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1b) + + self.assertTrue('dummy_1' in list_crypt_handlers()) + + def test_get_crypt_handler(self): + "test get_crypt_handler()" + + class dummy_1(uh.StaticHandler): + name = "dummy_1" + + # without available handler + self.assertRaises(KeyError, get_crypt_handler, "dummy_1") + self.assertIs(get_crypt_handler("dummy_1", None), None) + + # already loaded handler + register_crypt_handler(dummy_1) + self.assertIs(get_crypt_handler("dummy_1"), dummy_1) + + with catch_warnings(): + warnings.filterwarnings("ignore", "handler names should be lower-case, and use underscores instead of hyphens:.*", UserWarning) + + # already loaded handler, using incorrect name + self.assertIs(get_crypt_handler("DUMMY-1"), dummy_1) + + # lazy load of unloaded handler, using incorrect name + register_crypt_handler_path('dummy_0', __name__) + self.assertIs(get_crypt_handler("DUMMY-0"), dummy_0) + + # check system & private names aren't returned + import passlib.hash # ensure module imported, so py3.3 sets __package__ + passlib.hash.__dict__["_fake"] = "dummy" # so behavior seen under py2x also + for name in ["_fake", "__package__"]: + self.assertRaises(KeyError, get_crypt_handler, name) + self.assertIs(get_crypt_handler(name, None), None) + + def test_list_crypt_handlers(self): + "test list_crypt_handlers()" + from passlib.registry import list_crypt_handlers + + # check system & private names aren't returned + import passlib.hash # ensure module imported, so py3.3 sets __package__ + passlib.hash.__dict__["_fake"] = "dummy" # so behavior seen under py2x also + for name in list_crypt_handlers(): + self.assertFalse(name.startswith("_"), "%r: " % name) + + def test_handlers(self): + "verify we have tests for all handlers" + from passlib.registry import list_crypt_handlers + from passlib.tests.test_handlers import get_handler_case + for name in list_crypt_handlers(): + if name.startswith("ldap_") and name[5:] in list_crypt_handlers(): + continue + if name in ["roundup_plaintext"]: + continue + self.assertTrue(get_handler_case(name)) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_utils.py b/passlib/tests/test_utils.py new file mode 100644 index 00000000..67834a03 --- /dev/null +++ b/passlib/tests/test_utils.py @@ -0,0 +1,917 @@ +"""tests for passlib.util""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +from binascii import hexlify, unhexlify +import sys +import random +import warnings +# site +# pkg +# module +from passlib.utils.compat import b, bytes, bascii_to_str, irange, PY2, PY3, u, \ + unicode, join_bytes, SUPPORTS_DIR_METHOD +from passlib.tests.utils import TestCase, catch_warnings + +def hb(source): + return unhexlify(b(source)) + +#============================================================================= +# byte funcs +#============================================================================= +class MiscTest(TestCase): + "tests various parts of utils module" + + # NOTE: could test xor_bytes(), but it's exercised well enough by pbkdf2 test + + def test_compat(self): + "test compat's lazymodule" + from passlib.utils import compat + # "" + self.assertRegex(repr(compat), + r"^$") + + # test synthentic dir() + dir(compat) + if SUPPORTS_DIR_METHOD: + self.assertTrue('UnicodeIO' in dir(compat)) + self.assertTrue('irange' in dir(compat)) + + def test_classproperty(self): + from passlib.utils import classproperty + + class test(object): + xvar = 1 + @classproperty + def xprop(cls): + return cls.xvar + + self.assertEqual(test.xprop, 1) + prop = test.__dict__['xprop'] + self.assertIs(prop.im_func, prop.__func__) + + def test_deprecated_function(self): + from passlib.utils import deprecated_function + # NOTE: not comprehensive, just tests the basic behavior + + @deprecated_function(deprecated="1.6", removed="1.8") + def test_func(*args): + "test docstring" + return args + + self.assertTrue(".. deprecated::" in test_func.__doc__) + + with self.assertWarningList(dict(category=DeprecationWarning, + message="the function passlib.tests.test_utils.test_func() " + "is deprecated as of Passlib 1.6, and will be " + "removed in Passlib 1.8." + )): + self.assertEqual(test_func(1,2), (1,2)) + + def test_memoized_property(self): + from passlib.utils import memoized_property + + class dummy(object): + counter = 0 + + @memoized_property + def value(self): + value = self.counter + self.counter = value+1 + return value + + d = dummy() + self.assertEqual(d.value, 0) + self.assertEqual(d.value, 0) + self.assertEqual(d.counter, 1) + + prop = dummy.value + self.assertIs(prop.im_func, prop.__func__) + + def test_getrandbytes(self): + "test getrandbytes()" + from passlib.utils import getrandbytes, rng + def f(*a,**k): + return getrandbytes(rng, *a, **k) + self.assertEqual(len(f(0)), 0) + a = f(10) + b = f(10) + self.assertIsInstance(a, bytes) + self.assertEqual(len(a), 10) + self.assertEqual(len(b), 10) + self.assertNotEqual(a, b) + + def test_getrandstr(self): + "test getrandstr()" + from passlib.utils import getrandstr, rng + def f(*a,**k): + return getrandstr(rng, *a, **k) + + # count 0 + self.assertEqual(f('abc',0), '') + + # count <0 + self.assertRaises(ValueError, f, 'abc', -1) + + # letters 0 + self.assertRaises(ValueError, f, '', 0) + + # letters 1 + self.assertEqual(f('a',5), 'aaaaa') + + # letters + x = f(u('abc'), 16) + y = f(u('abc'), 16) + self.assertIsInstance(x, unicode) + self.assertNotEqual(x,y) + self.assertEqual(sorted(set(x)), [u('a'),u('b'),u('c')]) + + # bytes + x = f(b('abc'), 16) + y = f(b('abc'), 16) + self.assertIsInstance(x, bytes) + self.assertNotEqual(x,y) + # NOTE: decoding this due to py3 bytes + self.assertEqual(sorted(set(x.decode("ascii"))), [u('a'),u('b'),u('c')]) + + # generate_password + from passlib.utils import generate_password + self.assertEqual(len(generate_password(15)), 15) + + def test_is_crypt_context(self): + "test is_crypt_context()" + from passlib.utils import is_crypt_context + from passlib.context import CryptContext + cc = CryptContext(["des_crypt"]) + self.assertTrue(is_crypt_context(cc)) + self.assertFalse(not is_crypt_context(cc)) + + def test_genseed(self): + "test genseed()" + import random + from passlib.utils import genseed + rng = random.Random(genseed()) + a = rng.randint(0, 100000) + + rng = random.Random(genseed()) + b = rng.randint(0, 100000) + + self.assertNotEqual(a,b) + + rng.seed(genseed(rng)) + + def test_crypt(self): + "test crypt.crypt() wrappers" + from passlib.utils import has_crypt, safe_crypt, test_crypt + + # test everything is disabled + if not has_crypt: + self.assertEqual(safe_crypt("test", "aa"), None) + self.assertFalse(test_crypt("test", "aaqPiZY5xR5l.")) + raise self.skipTest("crypt.crypt() not available") + + # XXX: this assumes *every* crypt() implementation supports des_crypt. + # if this fails for some platform, this test will need modifying. + + # test return type + self.assertIsInstance(safe_crypt(u("test"), u("aa")), unicode) + + # test ascii password + h1 = u('aaqPiZY5xR5l.') + self.assertEqual(safe_crypt(u('test'), u('aa')), h1) + self.assertEqual(safe_crypt(b('test'), b('aa')), h1) + + # test utf-8 / unicode password + h2 = u('aahWwbrUsKZk.') + self.assertEqual(safe_crypt(u('test\u1234'), 'aa'), h2) + self.assertEqual(safe_crypt(b('test\xe1\x88\xb4'), 'aa'), h2) + + # test latin-1 password + hash = safe_crypt(b('test\xff'), 'aa') + if PY3: # py3 supports utf-8 bytes only. + self.assertEqual(hash, None) + else: # but py2 is fine. + self.assertEqual(hash, u('aaOx.5nbTU/.M')) + + # test rejects null chars in password + self.assertRaises(ValueError, safe_crypt, '\x00', 'aa') + + # check test_crypt() + h1x = h1[:-1] + 'x' + self.assertTrue(test_crypt("test", h1)) + self.assertFalse(test_crypt("test", h1x)) + + # check crypt returning variant error indicators + # some platforms return None on errors, others empty string, + # The BSDs in some cases return ":" + import passlib.utils as mod + orig = mod._crypt + try: + fake = None + mod._crypt = lambda secret, hash: fake + for fake in [None, "", ":", ":0", "*0"]: + self.assertEqual(safe_crypt("test", "aa"), None) + self.assertFalse(test_crypt("test", h1)) + fake = 'xxx' + self.assertEqual(safe_crypt("test", "aa"), "xxx") + finally: + mod._crypt = orig + + def test_consteq(self): + "test consteq()" + # NOTE: this test is kind of over the top, but that's only because + # this is used for the critical task of comparing hashes for equality. + from passlib.utils import consteq + + # ensure error raises for wrong types + self.assertRaises(TypeError, consteq, u(''), b('')) + self.assertRaises(TypeError, consteq, u(''), 1) + self.assertRaises(TypeError, consteq, u(''), None) + + self.assertRaises(TypeError, consteq, b(''), u('')) + self.assertRaises(TypeError, consteq, b(''), 1) + self.assertRaises(TypeError, consteq, b(''), None) + + self.assertRaises(TypeError, consteq, None, u('')) + self.assertRaises(TypeError, consteq, None, b('')) + self.assertRaises(TypeError, consteq, 1, u('')) + self.assertRaises(TypeError, consteq, 1, b('')) + + # check equal inputs compare correctly + for value in [ + u("a"), + u("abc"), + u("\xff\xa2\x12\x00")*10, + ]: + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + value = value.encode("latin-1") + self.assertTrue(consteq(value, value), "value %r:" % (value,)) + + # check non-equal inputs compare correctly + for l,r in [ + # check same-size comparisons with differing contents fail. + (u("a"), u("c")), + (u("abcabc"), u("zbaabc")), + (u("abcabc"), u("abzabc")), + (u("abcabc"), u("abcabz")), + ((u("\xff\xa2\x12\x00")*10)[:-1] + u("\x01"), + u("\xff\xa2\x12\x00")*10), + + # check different-size comparisons fail. + (u(""), u("a")), + (u("abc"), u("abcdef")), + (u("abc"), u("defabc")), + (u("qwertyuiopasdfghjklzxcvbnm"), u("abc")), + ]: + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + l = l.encode("latin-1") + r = r.encode("latin-1") + self.assertFalse(consteq(l, r), "values %r %r:" % (l,r)) + self.assertFalse(consteq(r, l), "values %r %r:" % (r,l)) + + # TODO: add some tests to ensure we take THETA(strlen) time. + # this might be hard to do reproducably. + # NOTE: below code was used to generate stats for analysis + ##from math import log as logb + ##import timeit + ##multipliers = [ 1< encode() -> decode() -> raw + # + + # generate some random bytes + size = random.randint(1 if saw_zero else 0, 12) + if not size: + saw_zero = True + enc_size = (4*size+2)//3 + raw = getrandbytes(random, size) + + # encode them, check invariants + encoded = engine.encode_bytes(raw) + self.assertEqual(len(encoded), enc_size) + + # make sure decode returns original + result = engine.decode_bytes(encoded) + self.assertEqual(result, raw) + + # + # test encoded -> decode() -> encode() -> encoded + # + + # generate some random encoded data + if size % 4 == 1: + size += random.choice([-1,1,2]) + raw_size = 3*size//4 + encoded = getrandstr(random, engine.bytemap, size) + + # decode them, check invariants + raw = engine.decode_bytes(encoded) + self.assertEqual(len(raw), raw_size, "encoded %d:" % size) + + # make sure encode returns original (barring padding bits) + result = engine.encode_bytes(raw) + if size % 4: + self.assertEqual(result[:-1], encoded[:-1]) + else: + self.assertEqual(result, encoded) + + def test_repair_unused(self): + "test repair_unused()" + # NOTE: this test relies on encode_bytes() always returning clear + # padding bits - which should be ensured by test vectors. + from passlib.utils import rng, getrandstr + engine = self.engine + check_repair_unused = self.engine.check_repair_unused + i = 0 + while i < 300: + size = rng.randint(0,23) + cdata = getrandstr(rng, engine.charmap, size).encode("ascii") + if size & 3 == 1: + # should throw error + self.assertRaises(ValueError, check_repair_unused, cdata) + continue + rdata = engine.encode_bytes(engine.decode_bytes(cdata)) + if rng.random() < .5: + cdata = cdata.decode("ascii") + rdata = rdata.decode("ascii") + if cdata == rdata: + # should leave unchanged + ok, result = check_repair_unused(cdata) + self.assertFalse(ok) + self.assertEqual(result, rdata) + else: + # should repair bits + self.assertNotEqual(size % 4, 0) + ok, result = check_repair_unused(cdata) + self.assertTrue(ok) + self.assertEqual(result, rdata) + i += 1 + + #=================================================================== + # test transposed encode/decode - encoding independant + #=================================================================== + # NOTE: these tests assume normal encode/decode has been tested elsewhere. + + transposed = [ + # orig, result, transpose map + (b("\x33\x22\x11"), b("\x11\x22\x33"),[2,1,0]), + (b("\x22\x33\x11"), b("\x11\x22\x33"),[1,2,0]), + ] + + transposed_dups = [ + # orig, result, transpose projection + (b("\x11\x11\x22"), b("\x11\x22\x33"),[0,0,1]), + ] + + def test_encode_transposed_bytes(self): + "test encode_transposed_bytes()" + engine = self.engine + for result, input, offsets in self.transposed + self.transposed_dups: + tmp = engine.encode_transposed_bytes(input, offsets) + out = engine.decode_bytes(tmp) + self.assertEqual(out, result) + + self.assertRaises(TypeError, engine.encode_transposed_bytes, u("a"), []) + + def test_decode_transposed_bytes(self): + "test decode_transposed_bytes()" + engine = self.engine + for input, result, offsets in self.transposed: + tmp = engine.encode_bytes(input) + out = engine.decode_transposed_bytes(tmp, offsets) + self.assertEqual(out, result) + + def test_decode_transposed_bytes_bad(self): + "test decode_transposed_bytes() fails if map is a one-way" + engine = self.engine + for input, _, offsets in self.transposed_dups: + tmp = engine.encode_bytes(input) + self.assertRaises(TypeError, engine.decode_transposed_bytes, tmp, + offsets) + + #=================================================================== + # test 6bit handling + #=================================================================== + def check_int_pair(self, bits, encoded_pairs): + "helper to check encode_intXX & decode_intXX functions" + engine = self.engine + encode = getattr(engine, "encode_int%s" % bits) + decode = getattr(engine, "decode_int%s" % bits) + pad = -bits % 6 + chars = (bits+pad)//6 + upper = 1< hex digest + # test vectors from http://www.faqs.org/rfcs/rfc1320.html - A.5 + (b(""), "31d6cfe0d16ae931b73c59d7e0c089c0"), + (b("a"), "bde52cb31de33e46245e05fbdbd6fb24"), + (b("abc"), "a448017aaf21d8525fc10ae87aa6729d"), + (b("message digest"), "d9130a8164549fe818874806e1c7014b"), + (b("abcdefghijklmnopqrstuvwxyz"), "d79e1c308aa5bbcdeea8ed63df412da9"), + (b("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"), "043f8582f241db351ce627e153e7f0e4"), + (b("12345678901234567890123456789012345678901234567890123456789012345678901234567890"), "e33b4ddc9c38f2199c3e7b164fcc0536"), + ] + + def test_md4_update(self): + "test md4 update" + from passlib.utils.md4 import md4 + h = md4(b('')) + self.assertEqual(h.hexdigest(), "31d6cfe0d16ae931b73c59d7e0c089c0") + + # NOTE: under py2, hashlib methods try to encode to ascii, + # though shouldn't rely on that. + if PY3 or self._disable_native: + self.assertRaises(TypeError, h.update, u('x')) + + h.update(b('a')) + self.assertEqual(h.hexdigest(), "bde52cb31de33e46245e05fbdbd6fb24") + + h.update(b('bcdefghijklmnopqrstuvwxyz')) + self.assertEqual(h.hexdigest(), "d79e1c308aa5bbcdeea8ed63df412da9") + + def test_md4_hexdigest(self): + "test md4 hexdigest()" + from passlib.utils.md4 import md4 + for input, hex in self.vectors: + out = md4(input).hexdigest() + self.assertEqual(out, hex) + + def test_md4_digest(self): + "test md4 digest()" + from passlib.utils.md4 import md4 + for input, hex in self.vectors: + out = bascii_to_str(hexlify(md4(input).digest())) + self.assertEqual(out, hex) + + def test_md4_copy(self): + "test md4 copy()" + from passlib.utils.md4 import md4 + h = md4(b('abc')) + + h2 = h.copy() + h2.update(b('def')) + self.assertEqual(h2.hexdigest(), '804e7f1c2586e50b49ac65db5b645131') + + h.update(b('ghi')) + self.assertEqual(h.hexdigest(), 'c5225580bfe176f6deeee33dee98732c') + +# create subclasses to test with and without native backend +class MD4_SSL_Test(_MD4_Test): + descriptionPrefix = "MD4 (ssl version)" +MD4_SSL_TEST = skipUnless(has_native_md4, "hashlib lacks ssl support")(MD4_SSL_Test) + +class MD4_Builtin_Test(_MD4_Test): + descriptionPrefix = "MD4 (builtin version)" + _disable_native = True +MD4_Builtin_Test = skipUnless(TEST_MODE("full") or not has_native_md4, + "skipped under current test mode")(MD4_Builtin_Test) + +#============================================================================= +# test PBKDF1 support +#============================================================================= +class Pbkdf1_Test(TestCase): + "test kdf helpers" + descriptionPrefix = "pbkdf1" + + pbkdf1_tests = [ + # (password, salt, rounds, keylen, hash, result) + + # + # from http://www.di-mgt.com.au/cryptoKDFs.html + # + (b('password'), hb('78578E5A5D63CB06'), 1000, 16, 'sha1', hb('dc19847e05c64d2faf10ebfb4a3d2a20')), + + # + # custom + # + (b('password'), b('salt'), 1000, 0, 'md5', b('')), + (b('password'), b('salt'), 1000, 1, 'md5', hb('84')), + (b('password'), b('salt'), 1000, 8, 'md5', hb('8475c6a8531a5d27')), + (b('password'), b('salt'), 1000, 16, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b('password'), b('salt'), 1000, None, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), + (b('password'), b('salt'), 1000, None, 'sha1', hb('4a8fd48e426ed081b535be5769892fa396293efb')), + ] + if not (PYPY or JYTHON): + pbkdf1_tests.append( + (b('password'), b('salt'), 1000, None, 'md4', hb('f7f2e91100a8f96190f2dd177cb26453')) + ) + + def test_known(self): + "test reference vectors" + from passlib.utils.pbkdf2 import pbkdf1 + for secret, salt, rounds, keylen, digest, correct in self.pbkdf1_tests: + result = pbkdf1(secret, salt, rounds, keylen, digest) + self.assertEqual(result, correct) + + def test_border(self): + "test border cases" + from passlib.utils.pbkdf2 import pbkdf1 + def helper(secret=b('secret'), salt=b('salt'), rounds=1, keylen=1, hash='md5'): + return pbkdf1(secret, salt, rounds, keylen, hash) + helper() + + # salt/secret wrong type + self.assertRaises(TypeError, helper, secret=1) + self.assertRaises(TypeError, helper, salt=1) + + # non-existent hashes + self.assertRaises(ValueError, helper, hash='missing') + + # rounds < 1 and wrong type + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='1') + + # keylen < 0, keylen > block_size, and wrong type + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=17, hash='md5') + self.assertRaises(TypeError, helper, keylen='1') + +#============================================================================= +# test PBKDF2 support +#============================================================================= +class _Pbkdf2_Test(TestCase): + "test pbkdf2() support" + _disable_m2crypto = False + + def setUp(self): + super(_Pbkdf2_Test, self).setUp() + import passlib.utils.pbkdf2 as mod + + # disable m2crypto support, and use software backend + if M2Crypto and self._disable_m2crypto: + self.addCleanup(setattr, mod, "_EVP", mod._EVP) + mod._EVP = None + + # flush cached prf functions, since we're screwing with their backend. + mod._clear_prf_cache() + self.addCleanup(mod._clear_prf_cache) + + pbkdf2_test_vectors = [ + # (result, secret, salt, rounds, keylen, prf="sha1") + + # + # from rfc 3962 + # + + # test case 1 / 128 bit + ( + hb("cdedb5281bb2f801565a1122b2563515"), + b("password"), b("ATHENA.MIT.EDUraeburn"), 1, 16 + ), + + # test case 2 / 128 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935d"), + b("password"), b("ATHENA.MIT.EDUraeburn"), 2, 16 + ), + + # test case 2 / 256 bit + ( + hb("01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86"), + b("password"), b("ATHENA.MIT.EDUraeburn"), 2, 32 + ), + + # test case 3 / 256 bit + ( + hb("5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13"), + b("password"), b("ATHENA.MIT.EDUraeburn"), 1200, 32 + ), + + # test case 4 / 256 bit + ( + hb("d1daa78615f287e6a1c8b120d7062a493f98d203e6be49a6adf4fa574b6e64ee"), + b("password"), b('\x12\x34\x56\x78\x78\x56\x34\x12'), 5, 32 + ), + + # test case 5 / 256 bit + ( + hb("139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1"), + b("X"*64), b("pass phrase equals block size"), 1200, 32 + ), + + # test case 6 / 256 bit + ( + hb("9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a"), + b("X"*65), b("pass phrase exceeds block size"), 1200, 32 + ), + + # + # from rfc 6070 + # + ( + hb("0c60c80f961f0e71f3a9b524af6012062fe037a6"), + b("password"), b("salt"), 1, 20, + ), + + ( + hb("ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + b("password"), b("salt"), 2, 20, + ), + + ( + hb("4b007901b765489abead49d926f721d065a429c1"), + b("password"), b("salt"), 4096, 20, + ), + + # just runs too long - could enable if ALL option is set + ##( + ## + ## unhexlify("eefe3d61cd4da4e4e9945b3d6ba2158c2634e984"), + ## "password", "salt", 16777216, 20, + ##), + + ( + hb("3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + b("passwordPASSWORDpassword"), + b("saltSALTsaltSALTsaltSALTsaltSALTsalt"), + 4096, 25, + ), + + ( + hb("56fa6aa75548099dcc37d7f03425e0c3"), + b("pass\00word"), b("sa\00lt"), 4096, 16, + ), + + # + # from example in http://grub.enbug.org/Authentication + # + ( + hb("887CFF169EA8335235D8004242AA7D6187A41E3187DF0CE14E256D85ED" + "97A97357AAA8FF0A3871AB9EEFF458392F462F495487387F685B7472FC" + "6C29E293F0A0"), + b("hello"), + hb("9290F727ED06C38BA4549EF7DE25CF5642659211B7FC076F2D28FEFD71" + "784BB8D8F6FB244A8CC5C06240631B97008565A120764C0EE9C2CB0073" + "994D79080136"), + 10000, 64, "hmac-sha512" + ), + + # + # custom + # + ( + hb('e248fb6b13365146f8ac6307cc222812'), + b("secret"), b("salt"), 10, 16, "hmac-sha1", + ), + ( + hb('e248fb6b13365146f8ac6307cc2228127872da6d'), + b("secret"), b("salt"), 10, None, "hmac-sha1", + ), + + ] + + def test_known(self): + "test reference vectors" + from passlib.utils.pbkdf2 import pbkdf2 + for row in self.pbkdf2_test_vectors: + correct, secret, salt, rounds, keylen = row[:5] + prf = row[5] if len(row) == 6 else "hmac-sha1" + result = pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(result, correct) + + def test_border(self): + "test border cases" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b('password'), salt=b('salt'), rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + helper() + + # invalid rounds + self.assertRaises(ValueError, helper, rounds=0) + self.assertRaises(TypeError, helper, rounds='x') + + # invalid keylen + helper(keylen=0) + self.assertRaises(ValueError, helper, keylen=-1) + self.assertRaises(ValueError, helper, keylen=20*(2**32-1)+1) + self.assertRaises(TypeError, helper, keylen='x') + + # invalid secret/salt type + self.assertRaises(TypeError, helper, salt=5) + self.assertRaises(TypeError, helper, secret=5) + + # invalid hash + self.assertRaises(ValueError, helper, prf='hmac-foo') + self.assertRaises(ValueError, helper, prf='foo') + self.assertRaises(TypeError, helper, prf=5) + + def test_default_keylen(self): + "test keylen==None" + from passlib.utils.pbkdf2 import pbkdf2 + def helper(secret=b('password'), salt=b('salt'), rounds=1, keylen=None, prf="hmac-sha1"): + return pbkdf2(secret, salt, rounds, keylen, prf) + self.assertEqual(len(helper(prf='hmac-sha1')), 20) + self.assertEqual(len(helper(prf='hmac-sha256')), 32) + + def test_custom_prf(self): + "test custom prf function" + from passlib.utils.pbkdf2 import pbkdf2 + def prf(key, msg): + return hashlib.md5(key+msg+b('fooey')).digest() + result = pbkdf2(b('secret'), b('salt'), 1000, 20, prf) + self.assertEqual(result, hb('5fe7ce9f7e379d3f65cbc66ba8aa6440474a6849')) + +# create subclasses to test with and without m2crypto +class Pbkdf2_M2Crypto_Test(_Pbkdf2_Test): + descriptionPrefix = "pbkdf2 (m2crypto backend)" +Pbkdf2_M2Crypto_Test = skipUnless(M2Crypto, "M2Crypto not found")(Pbkdf2_M2Crypto_Test) + +class Pbkdf2_Builtin_Test(_Pbkdf2_Test): + descriptionPrefix = "pbkdf2 (builtin backend)" + _disable_m2crypto = True +Pbkdf2_Builtin_Test = skipUnless(TEST_MODE("full") or not M2Crypto, + "skipped under current test mode")(Pbkdf2_Builtin_Test) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_utils_handlers.py b/passlib/tests/test_utils_handlers.py new file mode 100644 index 00000000..5191111d --- /dev/null +++ b/passlib/tests/test_utils_handlers.py @@ -0,0 +1,806 @@ +"""tests for passlib.pwhash -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import re +import hashlib +from logging import getLogger +import warnings +# site +# pkg +from passlib.hash import ldap_md5, sha256_crypt +from passlib.registry import _unload_handler_name as unload_handler_name, \ + register_crypt_handler, get_crypt_handler +from passlib.exc import MissingBackendError, PasslibHashWarning +from passlib.utils import getrandstr, JYTHON, rng +from passlib.utils.compat import b, bytes, bascii_to_str, str_to_uascii, \ + uascii_to_str, unicode, PY_MAX_25, SUPPORTS_DIR_METHOD +import passlib.utils.handlers as uh +from passlib.tests.utils import HandlerCase, TestCase, catch_warnings +from passlib.utils.compat import u, PY3 +# module +log = getLogger(__name__) + +#============================================================================= +# utils +#============================================================================= +def _makelang(alphabet, size): + "generate all strings of given size using alphabet" + def helper(size): + if size < 2: + for char in alphabet: + yield char + else: + for char in alphabet: + for tail in helper(size-1): + yield char+tail + return set(helper(size)) + +#============================================================================= +# test GenericHandler & associates mixin classes +#============================================================================= +class SkeletonTest(TestCase): + "test hash support classes" + + #=================================================================== + # StaticHandler + #=================================================================== + def test_00_static_handler(self): + "test StaticHandler class" + + class d1(uh.StaticHandler): + name = "d1" + context_kwds = ("flag",) + _hash_prefix = u("_") + checksum_chars = u("ab") + checksum_size = 1 + + def __init__(self, flag=False, **kwds): + super(d1, self).__init__(**kwds) + self.flag = flag + + def _calc_checksum(self, secret): + return u('b') if self.flag else u('a') + + # check default identify method + self.assertTrue(d1.identify(u('_a'))) + self.assertTrue(d1.identify(b('_a'))) + self.assertTrue(d1.identify(u('_b'))) + + self.assertFalse(d1.identify(u('_c'))) + self.assertFalse(d1.identify(b('_c'))) + self.assertFalse(d1.identify(u('a'))) + self.assertFalse(d1.identify(u('b'))) + self.assertFalse(d1.identify(u('c'))) + self.assertRaises(TypeError, d1.identify, None) + self.assertRaises(TypeError, d1.identify, 1) + + # check default genconfig method + self.assertIs(d1.genconfig(), None) + + # check default verify method + self.assertTrue(d1.verify('s', b('_a'))) + self.assertTrue(d1.verify('s',u('_a'))) + self.assertFalse(d1.verify('s', b('_b'))) + self.assertFalse(d1.verify('s',u('_b'))) + self.assertTrue(d1.verify('s', b('_b'), flag=True)) + self.assertRaises(ValueError, d1.verify, 's', b('_c')) + self.assertRaises(ValueError, d1.verify, 's', u('_c')) + + # check default encrypt method + self.assertEqual(d1.encrypt('s'), '_a') + self.assertEqual(d1.encrypt('s', flag=True), '_b') + + def test_01_calc_checksum_hack(self): + "test StaticHandler legacy attr" + # release 1.5 StaticHandler required genhash(), + # not _calc_checksum, be implemented. we have backward compat wrapper, + # this tests that it works. + + class d1(uh.StaticHandler): + name = "d1" + + @classmethod + def identify(self, hash): + if not hash or len(hash) != 40: + return False + try: + int(hash, 16) + except ValueError: + return False + return True + + @classmethod + def genhash(cls, secret, hash): + if secret is None: + raise TypeError("no secret provided") + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + if hash is not None and not cls.identify(hash): + raise ValueError("invalid hash") + return hashlib.sha1(b("xyz") + secret).hexdigest() + + @classmethod + def verify(cls, secret, hash): + if hash is None: + raise ValueError("no hash specified") + return cls.genhash(secret, hash) == hash.lower() + + # encrypt should issue api warnings, but everything else should be fine. + with self.assertWarningList("d1.*should be updated.*_calc_checksum"): + hash = d1.encrypt("test") + self.assertEqual(hash, '7c622762588a0e5cc786ad0a143156f9fd38eea3') + + self.assertTrue(d1.verify("test", hash)) + self.assertFalse(d1.verify("xtest", hash)) + + # not defining genhash either, however, should cause NotImplementedError + del d1.genhash + self.assertRaises(NotImplementedError, d1.encrypt, 'test') + + #=================================================================== + # GenericHandler & mixins + #=================================================================== + def test_10_identify(self): + "test GenericHandler.identify()" + class d1(uh.GenericHandler): + + @classmethod + def from_string(cls, hash): + if isinstance(hash, bytes): + hash = hash.decode("ascii") + if hash == u('a'): + return cls(checksum=hash) + else: + raise ValueError + + # check fallback + self.assertRaises(TypeError, d1.identify, None) + self.assertRaises(TypeError, d1.identify, 1) + self.assertFalse(d1.identify('')) + self.assertTrue(d1.identify('a')) + self.assertFalse(d1.identify('b')) + + # check regexp + d1._hash_regex = re.compile(u('@.')) + self.assertRaises(TypeError, d1.identify, None) + self.assertRaises(TypeError, d1.identify, 1) + self.assertTrue(d1.identify('@a')) + self.assertFalse(d1.identify('a')) + del d1._hash_regex + + # check ident-based + d1.ident = u('!') + self.assertRaises(TypeError, d1.identify, None) + self.assertRaises(TypeError, d1.identify, 1) + self.assertTrue(d1.identify('!a')) + self.assertFalse(d1.identify('a')) + del d1.ident + + def test_11_norm_checksum(self): + "test GenericHandler checksum handling" + # setup helpers + class d1(uh.GenericHandler): + name = 'd1' + checksum_size = 4 + checksum_chars = u('xz') + _stub_checksum = u('z')*4 + + def norm_checksum(*a, **k): + return d1(*a, **k).checksum + + # too small + self.assertRaises(ValueError, norm_checksum, u('xxx')) + + # right size + self.assertEqual(norm_checksum(u('xxxx')), u('xxxx')) + self.assertEqual(norm_checksum(u('xzxz')), u('xzxz')) + + # too large + self.assertRaises(ValueError, norm_checksum, u('xxxxx')) + + # wrong chars + self.assertRaises(ValueError, norm_checksum, u('xxyx')) + + # wrong type + self.assertRaises(TypeError, norm_checksum, b('xxyx')) + + # relaxed + with self.assertWarningList("checksum should be unicode"): + self.assertEqual(norm_checksum(b('xxzx'), relaxed=True), u('xxzx')) + self.assertRaises(TypeError, norm_checksum, 1, relaxed=True) + + # test _stub_checksum behavior + self.assertIs(norm_checksum(u('zzzz')), None) + + def test_12_norm_checksum_raw(self): + "test GenericHandler + HasRawChecksum mixin" + class d1(uh.HasRawChecksum, uh.GenericHandler): + name = 'd1' + checksum_size = 4 + _stub_checksum = b('0')*4 + + def norm_checksum(*a, **k): + return d1(*a, **k).checksum + + # test bytes + self.assertEqual(norm_checksum(b('1234')), b('1234')) + + # test unicode + self.assertRaises(TypeError, norm_checksum, u('xxyx')) + self.assertRaises(TypeError, norm_checksum, u('xxyx'), relaxed=True) + + # test _stub_checksum behavior + self.assertIs(norm_checksum(b('0')*4), None) + + def test_20_norm_salt(self): + "test GenericHandler + HasSalt mixin" + # setup helpers + class d1(uh.HasSalt, uh.GenericHandler): + name = 'd1' + setting_kwds = ('salt',) + min_salt_size = 2 + max_salt_size = 4 + default_salt_size = 3 + salt_chars = 'ab' + + def norm_salt(**k): + return d1(**k).salt + + def gen_salt(sz, **k): + return d1(use_defaults=True, salt_size=sz, **k).salt + + salts2 = _makelang('ab', 2) + salts3 = _makelang('ab', 3) + salts4 = _makelang('ab', 4) + + # check salt=None + self.assertRaises(TypeError, norm_salt) + self.assertRaises(TypeError, norm_salt, salt=None) + self.assertIn(norm_salt(use_defaults=True), salts3) + + # check explicit salts + with catch_warnings(record=True) as wlog: + + # check too-small salts + self.assertRaises(ValueError, norm_salt, salt='') + self.assertRaises(ValueError, norm_salt, salt='a') + self.consumeWarningList(wlog) + + # check correct salts + self.assertEqual(norm_salt(salt='ab'), 'ab') + self.assertEqual(norm_salt(salt='aba'), 'aba') + self.assertEqual(norm_salt(salt='abba'), 'abba') + self.consumeWarningList(wlog) + + # check too-large salts + self.assertRaises(ValueError, norm_salt, salt='aaaabb') + self.consumeWarningList(wlog) + + self.assertEqual(norm_salt(salt='aaaabb', relaxed=True), 'aaaa') + self.consumeWarningList(wlog, PasslibHashWarning) + + # check generated salts + with catch_warnings(record=True) as wlog: + + # check too-small salt size + self.assertRaises(ValueError, gen_salt, 0) + self.assertRaises(ValueError, gen_salt, 1) + self.consumeWarningList(wlog) + + # check correct salt size + self.assertIn(gen_salt(2), salts2) + self.assertIn(gen_salt(3), salts3) + self.assertIn(gen_salt(4), salts4) + self.consumeWarningList(wlog) + + # check too-large salt size + self.assertRaises(ValueError, gen_salt, 5) + self.consumeWarningList(wlog) + + self.assertIn(gen_salt(5, relaxed=True), salts4) + self.consumeWarningList(wlog, ["salt too large"]) + + # test with max_salt_size=None + del d1.max_salt_size + with self.assertWarningList([]): + self.assertEqual(len(gen_salt(None)), 3) + self.assertEqual(len(gen_salt(5)), 5) + + # TODO: test HasRawSalt mixin + + def test_30_norm_rounds(self): + "test GenericHandler + HasRounds mixin" + # setup helpers + class d1(uh.HasRounds, uh.GenericHandler): + name = 'd1' + setting_kwds = ('rounds',) + min_rounds = 1 + max_rounds = 3 + default_rounds = 2 + + def norm_rounds(**k): + return d1(**k).rounds + + # check rounds=None + self.assertRaises(TypeError, norm_rounds) + self.assertRaises(TypeError, norm_rounds, rounds=None) + self.assertEqual(norm_rounds(use_defaults=True), 2) + + # check rounds=non int + self.assertRaises(TypeError, norm_rounds, rounds=1.5) + + # check explicit rounds + with catch_warnings(record=True) as wlog: + # too small + self.assertRaises(ValueError, norm_rounds, rounds=0) + self.consumeWarningList(wlog) + + self.assertEqual(norm_rounds(rounds=0, relaxed=True), 1) + self.consumeWarningList(wlog, PasslibHashWarning) + + # just right + self.assertEqual(norm_rounds(rounds=1), 1) + self.assertEqual(norm_rounds(rounds=2), 2) + self.assertEqual(norm_rounds(rounds=3), 3) + self.consumeWarningList(wlog) + + # too large + self.assertRaises(ValueError, norm_rounds, rounds=4) + self.consumeWarningList(wlog) + + self.assertEqual(norm_rounds(rounds=4, relaxed=True), 3) + self.consumeWarningList(wlog, PasslibHashWarning) + + # check no default rounds + d1.default_rounds = None + self.assertRaises(TypeError, norm_rounds, use_defaults=True) + + def test_40_backends(self): + "test GenericHandler + HasManyBackends mixin" + class d1(uh.HasManyBackends, uh.GenericHandler): + name = 'd1' + setting_kwds = () + + backends = ("a", "b") + + _has_backend_a = False + _has_backend_b = False + + def _calc_checksum_a(self, secret): + return 'a' + + def _calc_checksum_b(self, secret): + return 'b' + + # test no backends + self.assertRaises(MissingBackendError, d1.get_backend) + self.assertRaises(MissingBackendError, d1.set_backend) + self.assertRaises(MissingBackendError, d1.set_backend, 'any') + self.assertRaises(MissingBackendError, d1.set_backend, 'default') + self.assertFalse(d1.has_backend()) + + # enable 'b' backend + d1._has_backend_b = True + + # test lazy load + obj = d1() + self.assertEqual(obj._calc_checksum('s'), 'b') + + # test repeat load + d1.set_backend('b') + d1.set_backend('any') + self.assertEqual(obj._calc_checksum('s'), 'b') + + # test unavailable + self.assertRaises(MissingBackendError, d1.set_backend, 'a') + self.assertTrue(d1.has_backend('b')) + self.assertFalse(d1.has_backend('a')) + + # enable 'a' backend also + d1._has_backend_a = True + + # test explicit + self.assertTrue(d1.has_backend()) + d1.set_backend('a') + self.assertEqual(obj._calc_checksum('s'), 'a') + + # test unknown backend + self.assertRaises(ValueError, d1.set_backend, 'c') + self.assertRaises(ValueError, d1.has_backend, 'c') + + def test_50_norm_ident(self): + "test GenericHandler + HasManyIdents" + # setup helpers + class d1(uh.HasManyIdents, uh.GenericHandler): + name = 'd1' + setting_kwds = ('ident',) + default_ident = u("!A") + ident_values = [ u("!A"), u("!B") ] + ident_aliases = { u("A"): u("!A")} + + def norm_ident(**k): + return d1(**k).ident + + # check ident=None + self.assertRaises(TypeError, norm_ident) + self.assertRaises(TypeError, norm_ident, ident=None) + self.assertEqual(norm_ident(use_defaults=True), u('!A')) + + # check valid idents + self.assertEqual(norm_ident(ident=u('!A')), u('!A')) + self.assertEqual(norm_ident(ident=u('!B')), u('!B')) + self.assertRaises(ValueError, norm_ident, ident=u('!C')) + + # check aliases + self.assertEqual(norm_ident(ident=u('A')), u('!A')) + + # check invalid idents + self.assertRaises(ValueError, norm_ident, ident=u('B')) + + # check identify is honoring ident system + self.assertTrue(d1.identify(u("!Axxx"))) + self.assertTrue(d1.identify(u("!Bxxx"))) + self.assertFalse(d1.identify(u("!Cxxx"))) + self.assertFalse(d1.identify(u("A"))) + self.assertFalse(d1.identify(u(""))) + self.assertRaises(TypeError, d1.identify, None) + self.assertRaises(TypeError, d1.identify, 1) + + # check default_ident missing is detected. + d1.default_ident = None + self.assertRaises(AssertionError, norm_ident, use_defaults=True) + + #=================================================================== + # experimental - the following methods are not finished or tested, + # but way work correctly for some hashes + #=================================================================== + def test_91_parsehash(self): + "test parsehash()" + # NOTE: this just tests some existing GenericHandler classes + from passlib import hash + + # + # parsehash() + # + + # simple hash w/ salt + result = hash.des_crypt.parsehash("OgAwTx2l6NADI") + self.assertEqual(result, {'checksum': u('AwTx2l6NADI'), 'salt': u('Og')}) + + # parse rounds and extra implicit_rounds flag + h = '$5$LKO/Ute40T3FNF95$U0prpBQd4PloSGU0pnpM4z9wKn4vZ1.jsrzQfPqxph9' + s = u('LKO/Ute40T3FNF95') + c = u('U0prpBQd4PloSGU0pnpM4z9wKn4vZ1.jsrzQfPqxph9') + result = hash.sha256_crypt.parsehash(h) + self.assertEqual(result, dict(salt=s, rounds=5000, + implicit_rounds=True, checksum=c)) + + # omit checksum + result = hash.sha256_crypt.parsehash(h, checksum=False) + self.assertEqual(result, dict(salt=s, rounds=5000, implicit_rounds=True)) + + # sanitize + result = hash.sha256_crypt.parsehash(h, sanitize=True) + self.assertEqual(result, dict(rounds=5000, implicit_rounds=True, + salt=u('LK**************'), + checksum=u('U0pr***************************************'))) + + # parse w/o implicit rounds flag + result = hash.sha256_crypt.parsehash('$5$rounds=10428$uy/jIAhCetNCTtb0$YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3') + self.assertEqual(result, dict( + checksum=u('YWvUOXbkqlqhyoPMpN8BMe.ZGsGx2aBvxTvDFI613c3'), + salt=u('uy/jIAhCetNCTtb0'), + rounds=10428, + )) + + # parsing of raw checksums & salts + h1 = '$pbkdf2$60000$DoEwpvQeA8B4T.k951yLUQ$O26Y3/NJEiLCVaOVPxGXshyjW8k' + result = hash.pbkdf2_sha1.parsehash(h1) + self.assertEqual(result, dict( + checksum=b(';n\x98\xdf\xf3I\x12"\xc2U\xa3\x95?\x11\x97\xb2\x1c\xa3[\xc9'), + rounds=60000, + salt=b('\x0e\x810\xa6\xf4\x1e\x03\xc0xO\xe9=\xe7\\\x8bQ'), + )) + + # sanitizing of raw checksums & salts + result = hash.pbkdf2_sha1.parsehash(h1, sanitize=True) + self.assertEqual(result, dict( + checksum=u('O26************************'), + rounds=60000, + salt=u('Do********************'), + )) + + def test_92_bitsize(self): + "test bitsize()" + # NOTE: this just tests some existing GenericHandler classes + from passlib import hash + + # no rounds + self.assertEqual(hash.des_crypt.bitsize(), + {'checksum': 66, 'salt': 12}) + + # log2 rounds + self.assertEqual(hash.bcrypt.bitsize(), + {'checksum': 186, 'salt': 132}) + + # linear rounds + self.assertEqual(hash.sha256_crypt.bitsize(), + {'checksum': 258, 'rounds': 14, 'salt': 96}) + + # raw checksum + self.assertEqual(hash.pbkdf2_sha1.bitsize(), + {'checksum': 160, 'rounds': 13, 'salt': 128}) + + # TODO: handle fshp correctly, and other glitches noted in code. + ##self.assertEqual(hash.fshp.bitsize(variant=1), + ## {'checksum': 256, 'rounds': 13, 'salt': 128}) + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# PrefixWrapper +#============================================================================= +class dummy_handler_in_registry(object): + "context manager that inserts dummy handler in registry" + def __init__(self, name): + self.name = name + self.dummy = type('dummy_' + name, (uh.GenericHandler,), dict( + name=name, + setting_kwds=(), + )) + + def __enter__(self): + from passlib import registry + registry._unload_handler_name(self.name, locations=False) + registry.register_crypt_handler(self.dummy) + assert registry.get_crypt_handler(self.name) is self.dummy + return self.dummy + + def __exit__(self, *exc_info): + from passlib import registry + registry._unload_handler_name(self.name, locations=False) + +class PrefixWrapperTest(TestCase): + "test PrefixWrapper class" + + def test_00_lazy_loading(self): + "test PrefixWrapper lazy loading of handler" + d1 = uh.PrefixWrapper("d1", "ldap_md5", "{XXX}", "{MD5}", lazy=True) + + # check base state + self.assertEqual(d1._wrapped_name, "ldap_md5") + self.assertIs(d1._wrapped_handler, None) + + # check loading works + self.assertIs(d1.wrapped, ldap_md5) + self.assertIs(d1._wrapped_handler, ldap_md5) + + # replace w/ wrong handler, make sure doesn't reload w/ dummy + with dummy_handler_in_registry("ldap_md5") as dummy: + self.assertIs(d1.wrapped, ldap_md5) + + def test_01_active_loading(self): + "test PrefixWrapper active loading of handler" + d1 = uh.PrefixWrapper("d1", "ldap_md5", "{XXX}", "{MD5}") + + # check base state + self.assertEqual(d1._wrapped_name, "ldap_md5") + self.assertIs(d1._wrapped_handler, ldap_md5) + self.assertIs(d1.wrapped, ldap_md5) + + # replace w/ wrong handler, make sure doesn't reload w/ dummy + with dummy_handler_in_registry("ldap_md5") as dummy: + self.assertIs(d1.wrapped, ldap_md5) + + def test_02_explicit(self): + "test PrefixWrapper with explicitly specified handler" + + d1 = uh.PrefixWrapper("d1", ldap_md5, "{XXX}", "{MD5}") + + # check base state + self.assertEqual(d1._wrapped_name, None) + self.assertIs(d1._wrapped_handler, ldap_md5) + self.assertIs(d1.wrapped, ldap_md5) + + # replace w/ wrong handler, make sure doesn't reload w/ dummy + with dummy_handler_in_registry("ldap_md5") as dummy: + self.assertIs(d1.wrapped, ldap_md5) + + def test_10_wrapped_attributes(self): + d1 = uh.PrefixWrapper("d1", "ldap_md5", "{XXX}", "{MD5}") + self.assertEqual(d1.name, "d1") + self.assertIs(d1.setting_kwds, ldap_md5.setting_kwds) + self.assertFalse('max_rounds' in dir(d1)) + + d2 = uh.PrefixWrapper("d2", "sha256_crypt", "{XXX}") + self.assertIs(d2.setting_kwds, sha256_crypt.setting_kwds) + if SUPPORTS_DIR_METHOD: + self.assertTrue('max_rounds' in dir(d2)) + else: + self.assertFalse('max_rounds' in dir(d2)) + + def test_11_wrapped_methods(self): + d1 = uh.PrefixWrapper("d1", "ldap_md5", "{XXX}", "{MD5}") + dph = "{XXX}X03MO1qnZdYdgyfeuILPmQ==" + lph = "{MD5}X03MO1qnZdYdgyfeuILPmQ==" + + # genconfig + self.assertIs(d1.genconfig(), None) + + # genhash + self.assertEqual(d1.genhash("password", None), dph) + self.assertEqual(d1.genhash("password", dph), dph) + self.assertRaises(ValueError, d1.genhash, "password", lph) + + # encrypt + self.assertEqual(d1.encrypt("password"), dph) + + # identify + self.assertTrue(d1.identify(dph)) + self.assertFalse(d1.identify(lph)) + + # verify + self.assertRaises(ValueError, d1.verify, "password", lph) + self.assertTrue(d1.verify("password", dph)) + + def test_12_ident(self): + # test ident is proxied + h = uh.PrefixWrapper("h2", "ldap_md5", "{XXX}") + self.assertEqual(h.ident, u("{XXX}{MD5}")) + self.assertIs(h.ident_values, None) + + # test lack of ident means no proxy + h = uh.PrefixWrapper("h2", "des_crypt", "{XXX}") + self.assertIs(h.ident, None) + self.assertIs(h.ident_values, None) + + # test orig_prefix disabled ident proxy + h = uh.PrefixWrapper("h1", "ldap_md5", "{XXX}", "{MD5}") + self.assertIs(h.ident, None) + self.assertIs(h.ident_values, None) + + # test custom ident overrides default + h = uh.PrefixWrapper("h3", "ldap_md5", "{XXX}", ident="{X") + self.assertEqual(h.ident, u("{X")) + self.assertIs(h.ident_values, None) + + # test custom ident must match + h = uh.PrefixWrapper("h3", "ldap_md5", "{XXX}", ident="{XXX}A") + self.assertRaises(ValueError, uh.PrefixWrapper, "h3", "ldap_md5", + "{XXX}", ident="{XY") + self.assertRaises(ValueError, uh.PrefixWrapper, "h3", "ldap_md5", + "{XXX}", ident="{XXXX") + + # test ident_values is proxied + h = uh.PrefixWrapper("h4", "phpass", "{XXX}") + self.assertIs(h.ident, None) + self.assertEqual(h.ident_values, [ u("{XXX}$P$"), u("{XXX}$H$") ]) + + # test ident=True means use prefix even if hash has no ident. + h = uh.PrefixWrapper("h5", "des_crypt", "{XXX}", ident=True) + self.assertEqual(h.ident, u("{XXX}")) + self.assertIs(h.ident_values, None) + + # ... but requires prefix + self.assertRaises(ValueError, uh.PrefixWrapper, "h6", "des_crypt", ident=True) + + # orig_prefix + HasManyIdent - warning + with self.assertWarningList("orig_prefix.*may not work correctly"): + h = uh.PrefixWrapper("h7", "phpass", orig_prefix="$", prefix="?") + self.assertEqual(h.ident_values, None) # TODO: should output (u("?P$"), u("?H$"))) + self.assertEqual(h.ident, None) + + def test_13_repr(self): + "test repr()" + h = uh.PrefixWrapper("h2", "md5_crypt", "{XXX}", orig_prefix="$1$") + self.assertRegex(repr(h), + r"""(?x)^PrefixWrapper\( + ['"]h2['"],\s+ + ['"]md5_crypt['"],\s+ + prefix=u?["']{XXX}['"],\s+ + orig_prefix=u?["']\$1\$['"] + \)$""") + + def test_14_bad_hash(self): + "test orig_prefix sanity check" + # shoudl throw InvalidHashError if wrapped hash doesn't begin + # with orig_prefix. + h = uh.PrefixWrapper("h2", "md5_crypt", orig_prefix="$6$") + self.assertRaises(ValueError, h.encrypt, 'test') + +#============================================================================= +# sample algorithms - these serve as known quantities +# to test the unittests themselves, as well as other +# parts of passlib. they shouldn't be used as actual password schemes. +#============================================================================= +class UnsaltedHash(uh.StaticHandler): + "test algorithm which lacks a salt" + name = "unsalted_test_hash" + checksum_chars = uh.LOWER_HEX_CHARS + checksum_size = 40 + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + data = b("boblious") + secret + return str_to_uascii(hashlib.sha1(data).hexdigest()) + +class SaltedHash(uh.HasSalt, uh.GenericHandler): + "test algorithm with a salt" + name = "salted_test_hash" + setting_kwds = ("salt",) + + min_salt_size = 2 + max_salt_size = 4 + checksum_size = 40 + salt_chars = checksum_chars = uh.LOWER_HEX_CHARS + + _hash_regex = re.compile(u("^@salt[0-9a-f]{42,44}$")) + + @classmethod + def from_string(cls, hash): + if not cls.identify(hash): + raise uh.exc.InvalidHashError(cls) + if isinstance(hash, bytes): + hash = hash.decode("ascii") + return cls(salt=hash[5:-40], checksum=hash[-40:]) + + _stub_checksum = u('0') * 40 + + def to_string(self): + hash = u("@salt%s%s") % (self.salt, self.checksum or self._stub_checksum) + return uascii_to_str(hash) + + def _calc_checksum(self, secret): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + data = self.salt.encode("ascii") + secret + self.salt.encode("ascii") + return str_to_uascii(hashlib.sha1(data).hexdigest()) + +#============================================================================= +# test sample algorithms - really a self-test of HandlerCase +#============================================================================= + +# TODO: provide data samples for algorithms +# (positive knowns, negative knowns, invalid identify) + +UPASS_TEMP = u('\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2') + +class UnsaltedHashTest(HandlerCase): + handler = UnsaltedHash + + known_correct_hashes = [ + ("password", "61cfd32684c47de231f1f982c214e884133762c0"), + (UPASS_TEMP, '96b329d120b97ff81ada770042e44ba87343ad2b'), + ] + + def test_bad_kwds(self): + if not PY_MAX_25: + # annoyingly, py25's ``super().__init__()`` doesn't throw TypeError + # when passing unknown keywords to object. just ignoring + # this issue for now, since it's a minor border case. + self.assertRaises(TypeError, UnsaltedHash, salt='x') + self.assertRaises(TypeError, UnsaltedHash.genconfig, rounds=1) + +class SaltedHashTest(HandlerCase): + handler = SaltedHash + + known_correct_hashes = [ + ("password", '@salt77d71f8fe74f314dac946766c1ac4a2a58365482c0'), + (UPASS_TEMP, '@salt9f978a9bfe360d069b0c13f2afecd570447407fa7e48'), + ] + + def test_bad_kwds(self): + self.assertRaises(TypeError, SaltedHash, + checksum=SaltedHash._stub_checksum, salt=None) + self.assertRaises(ValueError, SaltedHash, + checksum=SaltedHash._stub_checksum, salt='xxx') + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/test_win32.py b/passlib/tests/test_win32.py new file mode 100644 index 00000000..9b01752f --- /dev/null +++ b/passlib/tests/test_win32.py @@ -0,0 +1,51 @@ +"""tests for passlib.win32 -- (c) Assurance Technologies 2003-2009""" +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import warnings +# site +# pkg +from passlib.tests.utils import TestCase +# module +from passlib.utils.compat import u + +#============================================================================= +# +#============================================================================= +class UtilTest(TestCase): + "test util funcs in passlib.win32" + + ##test hashes from http://msdn.microsoft.com/en-us/library/cc245828(v=prot.10).aspx + ## among other places + + def setUp(self): + super(UtilTest, self).setUp() + warnings.filterwarnings("ignore", + "the 'passlib.win32' module is deprecated") + + def test_lmhash(self): + from passlib.win32 import raw_lmhash + for secret, hash in [ + ("OLDPASSWORD", u("c9b81d939d6fd80cd408e6b105741864")), + ("NEWPASSWORD", u('09eeab5aa415d6e4d408e6b105741864')), + ("welcome", u("c23413a8a1e7665faad3b435b51404ee")), + ]: + result = raw_lmhash(secret, hex=True) + self.assertEqual(result, hash) + + def test_nthash(self): + warnings.filterwarnings("ignore", + r"nthash\.raw_nthash\(\) is deprecated") + from passlib.win32 import raw_nthash + for secret, hash in [ + ("OLDPASSWORD", u("6677b2c394311355b54f25eec5bfacf5")), + ("NEWPASSWORD", u("256781a62031289d3c2c98c14f1efc8c")), + ]: + result = raw_nthash(secret, hex=True) + self.assertEqual(result, hash) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/tox_support.py b/passlib/tests/tox_support.py new file mode 100644 index 00000000..2072806d --- /dev/null +++ b/passlib/tests/tox_support.py @@ -0,0 +1,83 @@ +"""passlib.tests.tox_support - helper script for tox tests""" +#============================================================================= +# init script env +#============================================================================= +import os, sys +root_dir = os.path.join(os.path.dirname(__file__), os.pardir, os.pardir) +sys.path.insert(0, root_dir) + +#============================================================================= +# imports +#============================================================================= +# core +import re +import logging; log = logging.getLogger(__name__) +# site +# pkg +from passlib.utils.compat import print_ +# local +__all__ = [ +] + +#============================================================================= +# main +#============================================================================= +TH_PATH = "passlib.tests.test_handlers" + +def do_hash_tests(*args): + "return list of hash algorithm tests that match regexes" + if not args: + print(TH_PATH) + return + suffix = '' + args = list(args) + while True: + if args[0] == "--method": + suffix = '.' + args[1] + del args[:2] + else: + break + from passlib.tests import test_handlers + names = [TH_PATH + ":" + name + suffix for name in dir(test_handlers) + if not name.startswith("_") and any(re.match(arg,name) for arg in args)] + print_("\n".join(names)) + return not names + +def do_preset_tests(name): + "return list of preset test names" + if name == "django" or name == "django-hashes": + do_hash_tests("django_.*_test", "hex_md5_test") + if name == "django": + print_("passlib.tests.test_ext_django") + else: + raise ValueError("unknown name: %r" % name) + +def do_setup_gae(path, runtime): + "write fake GAE ``app.yaml`` to current directory so nosegae will work" + from passlib.tests.utils import set_file + set_file(os.path.join(path, "app.yaml"), """\ +application: fake-app +version: 2 +runtime: %s +api_version: 1 +threadsafe: no + +handlers: +- url: /.* + script: dummy.py + +libraries: +- name: django + version: "latest" +""" % runtime) + +def main(cmd, *args): + return globals()["do_" + cmd](*args) + +if __name__ == "__main__": + import sys + sys.exit(main(*sys.argv[1:]) or 0) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/tests/utils.py b/passlib/tests/utils.py new file mode 100644 index 00000000..b840aff0 --- /dev/null +++ b/passlib/tests/utils.py @@ -0,0 +1,2252 @@ +"""helpers for passlib unittests""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import logging; log = logging.getLogger(__name__) +import re +import os +import sys +import tempfile +import time +from passlib.exc import PasslibHashWarning +from passlib.utils.compat import PY27, PY_MIN_32, PY3, JYTHON +import warnings +from warnings import warn +# site +# pkg +from passlib.exc import MissingBackendError +import passlib.registry as registry +from passlib.tests.backports import TestCase as _TestCase, catch_warnings, skip, skipIf, skipUnless +from passlib.utils import has_rounds_info, has_salt_info, rounds_cost_values, \ + classproperty, rng, getrandstr, is_ascii_safe, to_native_str, \ + repeat_string, tick +from passlib.utils.compat import b, bytes, iteritems, irange, callable, \ + base_string_types, exc_err, u, unicode, PY2 +import passlib.utils.handlers as uh +# local +__all__ = [ + # util funcs + 'TEST_MODE', + 'set_file', 'get_file', + + # unit testing + 'TestCase', + 'HandlerCase', +] + +#============================================================================= +# environment detection +#============================================================================= +# figure out if we're running under GAE; +# some tests (e.g. FS writing) should be skipped. +# XXX: is there better way to do this? +try: + import google.appengine +except ImportError: + GAE = False +else: + GAE = True + +def ensure_mtime_changed(path): + "ensure file's mtime has changed" + # NOTE: this is hack to deal w/ filesystems whose mtime resolution is >= 1s, + # when a test needs to be sure the mtime changed after writing to the file. + last = os.path.getmtime(path) + while os.path.getmtime(path) == last: + time.sleep(0.1) + os.utime(path, None) + +def _get_timer_resolution(timer): + def sample(): + start = cur = timer() + while start == cur: + cur = timer() + return cur-start + return min(sample() for _ in range(3)) +TICK_RESOLUTION = _get_timer_resolution(tick) + +#============================================================================= +# test mode +#============================================================================= +_TEST_MODES = ["quick", "default", "full"] +_test_mode = _TEST_MODES.index(os.environ.get("PASSLIB_TEST_MODE", + "default").strip().lower()) + +def TEST_MODE(min=None, max=None): + """check if test for specified mode should be enabled. + + ``"quick"`` + run the bare minimum tests to ensure functionality. + variable-cost hashes are tested at their lowest setting. + hash algorithms are only tested against the backend that will + be used on the current host. no fuzz testing is done. + + ``"default"`` + same as ``"quick"``, except: hash algorithms are tested + at default levels, and a brief round of fuzz testing is done + for each hash. + + ``"full"`` + extra regression and internal tests are enabled, hash algorithms are tested + against all available backends, unavailable ones are mocked whre possible, + additional time is devoted to fuzz testing. + """ + if min and _test_mode < _TEST_MODES.index(min): + return False + if max and _test_mode > _TEST_MODES.index(max): + return False + return True + +#============================================================================= +# hash object inspection +#============================================================================= +def has_crypt_support(handler): + "check if host's crypt() supports this natively" + if hasattr(handler, "orig_prefix"): + # ignore wrapper classes + return False + return 'os_crypt' in getattr(handler, "backends", ()) and handler.has_backend("os_crypt") + +def has_relaxed_setting(handler): + "check if handler supports 'relaxed' kwd" + # FIXME: I've been lazy, should probably just add 'relaxed' kwd + # to all handlers that derive from GenericHandler + + # ignore wrapper classes for now.. though could introspec. + if hasattr(handler, "orig_prefix"): + return False + + return 'relaxed' in handler.setting_kwds or issubclass(handler, + uh.GenericHandler) + +def has_active_backend(handler): + "return active backend for handler, if any" + if not hasattr(handler, "get_backend"): + return "builtin" + try: + return handler.get_backend() + except MissingBackendError: + return None + +def is_default_backend(handler, backend): + "check if backend is the default for source" + try: + orig = handler.get_backend() + except MissingBackendError: + return False + try: + return handler.set_backend("default") == backend + finally: + handler.set_backend(orig) + +class temporary_backend(object): + "temporarily set handler to specific backend" + def __init__(self, handler, backend=None): + self.handler = handler + self.backend = backend + + def __enter__(self): + orig = self._orig = self.handler.get_backend() + if self.backend: + self.handler.set_backend(self.backend) + return orig + + def __exit__(self, *exc_info): + self.handler.set_backend(self._orig) + +#============================================================================= +# misc helpers +#============================================================================= +def set_file(path, content): + "set file to specified bytes" + if isinstance(content, unicode): + content = content.encode("utf-8") + with open(path, "wb") as fh: + fh.write(content) + +def get_file(path): + "read file as bytes" + with open(path, "rb") as fh: + return fh.read() + +def tonn(source): + "convert native string to non-native string" + if not isinstance(source, str): + return source + elif PY3: + return source.encode("utf-8") + else: + try: + return source.decode("utf-8") + except UnicodeDecodeError: + return source.decode("latin-1") + +def limit(value, lower, upper): + if value < lower: + return lower + elif value > upper: + return upper + return value + +def randintgauss(lower, upper, mu, sigma): + "hack used by fuzz testing" + return int(limit(rng.normalvariate(mu, sigma), lower, upper)) + +def quicksleep(delay): + "because time.sleep() doesn't even have 10ms accuracy on some OSes" + start = tick() + while tick()-start < delay: + pass + +#============================================================================= +# custom test harness +#============================================================================= + +def patchAttr(test, obj, attr, value): + """monkeypatch object value, restoring original on cleanup""" + try: + orig = getattr(obj, attr) + except AttributeError: + def cleanup(): + try: + delattr(obj, attr) + except AttributeError: + pass + test.addCleanup(cleanup) + else: + test.addCleanup(setattr, obj, attr, orig) + setattr(obj, attr, value) + +class TestCase(_TestCase): + """passlib-specific test case class + + this class adds a number of features to the standard TestCase... + * common prefix for all test descriptions + * resets warnings filter & registry for every test + * tweaks to message formatting + * __msg__ kwd added to assertRaises() + * suite of methods for matching against warnings + """ + #=================================================================== + # add various custom features + #=================================================================== + + #--------------------------------------------------------------- + # make it easy for test cases to add common prefix to shortDescription + #--------------------------------------------------------------- + + # string prepended to all tests in TestCase + descriptionPrefix = None + + def shortDescription(self): + "wrap shortDescription() method to prepend descriptionPrefix" + desc = super(TestCase, self).shortDescription() + prefix = self.descriptionPrefix + if prefix: + desc = "%s: %s" % (prefix, desc or str(self)) + return desc + + #--------------------------------------------------------------- + # hack things so nose and ut2 both skip subclasses who have + # "__unittest_skip=True" set, or whose names start with "_" + #--------------------------------------------------------------- + @classproperty + def __unittest_skip__(cls): + # NOTE: this attr is technically a unittest2 internal detail. + name = cls.__name__ + return name.startswith("_") or \ + getattr(cls, "_%s__unittest_skip" % name, False) + + # make this mirror nose's '__test__' attr + return not getattr(cls, "__test__", True) + + @classproperty + def __test__(cls): + # make nose just proxy __unittest_skip__ + return not cls.__unittest_skip__ + + # flag to skip *this* class + __unittest_skip = True + + #--------------------------------------------------------------- + # reset warning filters & registry before each test + #--------------------------------------------------------------- + + # flag to reset all warning filters & ignore state + resetWarningState = True + + def setUp(self): + super(TestCase, self).setUp() + self.setUpWarnings() + + def setUpWarnings(self): + "helper to init warning filters before subclass setUp()" + if self.resetWarningState: + ctx = reset_warnings() + ctx.__enter__() + self.addCleanup(ctx.__exit__) + + #--------------------------------------------------------------- + # tweak message formatting so longMessage mode is only enabled + # if msg ends with ":", and turn on longMessage by default. + #--------------------------------------------------------------- + longMessage = True + + def _formatMessage(self, msg, std): + if self.longMessage and msg and msg.rstrip().endswith(":"): + return '%s %s' % (msg.rstrip(), std) + else: + return msg or std + + #--------------------------------------------------------------- + # override assertRaises() to support '__msg__' keyword + #--------------------------------------------------------------- + def assertRaises(self, _exc_type, _callable=None, *args, **kwds): + msg = kwds.pop("__msg__", None) + if _callable is None: + # FIXME: this ignores 'msg' + return super(TestCase, self).assertRaises(_exc_type, None, + *args, **kwds) + try: + result = _callable(*args, **kwds) + except _exc_type: + return + std = "function returned %r, expected it to raise %r" % (result, + _exc_type) + raise self.failureException(self._formatMessage(msg, std)) + + #--------------------------------------------------------------- + # forbid a bunch of deprecated aliases so I stop using them + #--------------------------------------------------------------- + def assertEquals(self, *a, **k): + raise AssertionError("this alias is deprecated by unittest2") + assertNotEquals = assertRegexMatches = assertEquals + + #=================================================================== + # custom methods for matching warnings + #=================================================================== + def assertWarning(self, warning, + message_re=None, message=None, + category=None, + filename_re=None, filename=None, + lineno=None, + msg=None, + ): + """check if warning matches specified parameters. + 'warning' is the instance of Warning to match against; + can also be instance of WarningMessage (as returned by catch_warnings). + """ + # check input type + if hasattr(warning, "category"): + # resolve WarningMessage -> Warning, but preserve original + wmsg = warning + warning = warning.message + else: + # no original WarningMessage, passed raw Warning + wmsg = None + + # tests that can use a warning instance or WarningMessage object + if message: + self.assertEqual(str(warning), message, msg) + if message_re: + self.assertRegex(str(warning), message_re, msg) + if category: + self.assertIsInstance(warning, category, msg) + + # tests that require a WarningMessage object + if filename or filename_re: + if not wmsg: + raise TypeError("matching on filename requires a " + "WarningMessage instance") + real = wmsg.filename + if real.endswith(".pyc") or real.endswith(".pyo"): + # FIXME: should use a stdlib call to resolve this back + # to module's original filename. + real = real[:-1] + if filename: + self.assertEqual(real, filename, msg) + if filename_re: + self.assertRegex(real, filename_re, msg) + if lineno: + if not wmsg: + raise TypeError("matching on lineno requires a " + "WarningMessage instance") + self.assertEqual(wmsg.lineno, lineno, msg) + + class _AssertWarningList(catch_warnings): + """context manager for assertWarningList()""" + def __init__(self, case, **kwds): + self.case = case + self.kwds = kwds + self.__super = super(TestCase._AssertWarningList, self) + self.__super.__init__(record=True) + + def __enter__(self): + self.log = self.__super.__enter__() + + def __exit__(self, *exc_info): + self.__super.__exit__(*exc_info) + if not exc_info: + self.case.assertWarningList(self.log, **self.kwds) + + def assertWarningList(self, wlist=None, desc=None, msg=None): + """check that warning list (e.g. from catch_warnings) matches pattern""" + if desc is None: + assert wlist is not None + return self._AssertWarningList(self, desc=wlist, msg=msg) + # TODO: make this display better diff of *which* warnings did not match + assert desc is not None + if not isinstance(desc, (list,tuple)): + desc = [desc] + for idx, entry in enumerate(desc): + if isinstance(entry, str): + entry = dict(message_re=entry) + elif isinstance(entry, type) and issubclass(entry, Warning): + entry = dict(category=entry) + elif not isinstance(entry, dict): + raise TypeError("entry must be str, warning, or dict") + try: + data = wlist[idx] + except IndexError: + break + self.assertWarning(data, msg=msg, **entry) + else: + if len(wlist) == len(desc): + return + std = "expected %d warnings, found %d: wlist=%s desc=%r" % \ + (len(desc), len(wlist), self._formatWarningList(wlist), desc) + raise self.failureException(self._formatMessage(msg, std)) + + def consumeWarningList(self, wlist, desc=None, *args, **kwds): + """[deprecated] assertWarningList() variant that clears list afterwards""" + if desc is None: + desc = [] + self.assertWarningList(wlist, desc, *args, **kwds) + del wlist[:] + + def _formatWarning(self, entry): + tail = "" + if hasattr(entry, "message"): + # WarningMessage instance. + tail = " filename=%r lineno=%r" % (entry.filename, entry.lineno) + if entry.line: + tail += " line=%r" % (entry.line,) + entry = entry.message + cls = type(entry) + return "<%s.%s message=%r%s>" % (cls.__module__, cls.__name__, + str(entry), tail) + + def _formatWarningList(self, wlist): + return "[%s]" % ", ".join(self._formatWarning(entry) for entry in wlist) + + #=================================================================== + # capability tests + #=================================================================== + def require_stringprep(self): + "helper to skip test if stringprep is missing" + from passlib.utils import stringprep + if not stringprep: + from passlib.utils import _stringprep_missing_reason + raise self.skipTest("not available - stringprep module is " + + _stringprep_missing_reason) + + def require_TEST_MODE(self, level): + "skip test for all PASSLIB_TEST_MODE values below " + if not TEST_MODE(level): + raise self.skipTest("requires >= %r test mode" % level) + + def require_writeable_filesystem(self): + "skip test if writeable FS not available" + if GAE: + return self.skipTest("GAE doesn't offer read/write filesystem access") + + #=================================================================== + # other + #=================================================================== + _mktemp_queue = None + + def mktemp(self, *args, **kwds): + "create temp file that's cleaned up at end of test" + self.require_writeable_filesystem() + fd, path = tempfile.mkstemp(*args, **kwds) + os.close(fd) + queue = self._mktemp_queue + if queue is None: + queue = self._mktemp_queue = [] + def cleaner(): + for path in queue: + if os.path.exists(path): + os.remove(path) + del queue[:] + self.addCleanup(cleaner) + queue.append(path) + return path + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# other unittest helpers +#============================================================================= +RESERVED_BACKEND_NAMES = ["any", "default"] + +class HandlerCase(TestCase): + """base class for testing password hash handlers (esp passlib.utils.handlers subclasses) + + In order to use this to test a handler, + create a subclass will all the appropriate attributes + filled as listed in the example below, + and run the subclass via unittest. + + .. todo:: + + Document all of the options HandlerCase offers. + + .. note:: + + This is subclass of :class:`unittest.TestCase` + (or :class:`unittest2.TestCase` if available). + """ + #=================================================================== + # class attrs - should be filled in by subclass + #=================================================================== + + #--------------------------------------------------------------- + # handler setup + #--------------------------------------------------------------- + + # handler class to test [required] + handler = None + + # if set, run tests against specified backend + backend = None + + #--------------------------------------------------------------- + # test vectors + #--------------------------------------------------------------- + + # list of (secret, hash) tuples which are known to be correct + known_correct_hashes = [] + + # list of (config, secret, hash) tuples are known to be correct + known_correct_configs = [] + + # list of (alt_hash, secret, hash) tuples, where alt_hash is a hash + # using an alternate representation that should be recognized and verify + # correctly, but should be corrected to match hash when passed through + # genhash() + known_alternate_hashes = [] + + # hashes so malformed they aren't even identified properly + known_unidentified_hashes = [] + + # hashes which are identifiabled but malformed - they should identify() + # as True, but cause an error when passed to genhash/verify. + known_malformed_hashes = [] + + # list of (handler name, hash) pairs for other algorithm's hashes that + # handler shouldn't identify as belonging to it this list should generally + # be sufficient (if handler name in list, that entry will be skipped) + known_other_hashes = [ + ('des_crypt', '6f8c114b58f2c'), + ('md5_crypt', '$1$dOHYPKoP$tnxS1T8Q6VVn3kpV8cN6o.'), + ('sha512_crypt', "$6$rounds=123456$asaltof16chars..$BtCwjqMJGx5hrJhZywW" + "vt0RLE8uZ4oPwcelCjmw2kSYu.Ec6ycULevoBK25fs2xXgMNrCzIMVcgEJAstJeonj1"), + ] + + # passwords used to test basic encrypt behavior - generally + # don't need to be overidden. + stock_passwords = [ + u("test"), + u("\u20AC\u00A5$"), + b('\xe2\x82\xac\xc2\xa5$') + ] + + #--------------------------------------------------------------- + # option flags + #--------------------------------------------------------------- + + # maximum number of chars which hash will include in digest. + # ``None`` (the default) indicates the hash uses ALL of the password. + secret_size = None + + # whether hash is case insensitive + # True, False, or special value "verify-only" (which indicates + # hash contains case-sensitive portion, but verifies is case-insensitive) + secret_case_insensitive = False + + # flag if scheme accepts ALL hash strings (e.g. plaintext) + accepts_all_hashes = False + + # flag indicating "disabled account" handler (e.g. unix_disabled) + is_disabled_handler = False + + # flag/hack to filter PasslibHashWarning issued by test_72_configs() + filter_config_warnings = False + + # forbid certain characters in passwords + @classproperty + def forbidden_characters(cls): + # anything that supports crypt() interface should forbid null chars, + # since crypt() uses null-terminated strings. + if 'os_crypt' in getattr(cls.handler, "backends", ()): + return b("\x00") + return None + + #=================================================================== + # internal class attrs + #=================================================================== + __unittest_skip = True + + @property + def descriptionPrefix(self): + handler = self.handler + name = handler.name + if hasattr(handler, "get_backend"): + name += " (%s backend)" % (handler.get_backend(),) + return name + + #=================================================================== + # internal instance attrs + #=================================================================== + # indicates safe_crypt() has been patched to use another backend of handler. + using_patched_crypt = False + + #=================================================================== + # support methods + #=================================================================== + + #--------------------------------------------------------------- + # configuration helpers + #--------------------------------------------------------------- + @property + def supports_config_string(self): + return self.do_genconfig() is not None + + @classmethod + def iter_known_hashes(cls): + "iterate through known (secret, hash) pairs" + for secret, hash in cls.known_correct_hashes: + yield secret, hash + for config, secret, hash in cls.known_correct_configs: + yield secret, hash + for alt, secret, hash in cls.known_alternate_hashes: + yield secret, hash + + def get_sample_hash(self): + "test random sample secret/hash pair" + known = list(self.iter_known_hashes()) + return rng.choice(known) + + #--------------------------------------------------------------- + # test helpers + #--------------------------------------------------------------- + def check_verify(self, secret, hash, msg=None, negate=False): + "helper to check verify() outcome, honoring is_disabled_handler" + result = self.do_verify(secret, hash) + self.assertTrue(result is True or result is False, + "verify() returned non-boolean value: %r" % (result,)) + if self.is_disabled_handler or negate: + if not result: + return + if not msg: + msg = ("verify incorrectly returned True: secret=%r, hash=%r" % + (secret, hash)) + raise self.failureException(msg) + else: + if result: + return + if not msg: + msg = "verify failed: secret=%r, hash=%r" % (secret, hash) + raise self.failureException(msg) + + def check_returned_native_str(self, result, func_name): + self.assertIsInstance(result, str, + "%s() failed to return native string: %r" % (func_name, result,)) + + #--------------------------------------------------------------- + # PasswordHash helpers - wraps all calls to PasswordHash api, + # so that subclasses can fill in defaults and account for other specialized behavior + #--------------------------------------------------------------- + def populate_settings(self, kwds): + "subclassable method to populate default settings" + # use lower rounds settings for certain test modes + handler = self.handler + if 'rounds' in handler.setting_kwds and 'rounds' not in kwds: + mn = handler.min_rounds + df = handler.default_rounds + if TEST_MODE(max="quick"): + # use minimum rounds for quick mode + kwds['rounds'] = max(3, mn) + else: + # use default/16 otherwise + factor = 3 + if getattr(handler, "rounds_cost", None) == "log2": + df -= factor + else: + df = df//(1<= 1") + + # check min_salt_size + if cls.min_salt_size < 0: + raise AssertionError("min_salt_chars must be >= 0") + if mx_set and cls.min_salt_size > cls.max_salt_size: + raise AssertionError("min_salt_chars must be <= max_salt_chars") + + # check default_salt_size + if cls.default_salt_size < cls.min_salt_size: + raise AssertionError("default_salt_size must be >= min_salt_size") + if mx_set and cls.default_salt_size > cls.max_salt_size: + raise AssertionError("default_salt_size must be <= max_salt_size") + + # check for 'salt_size' keyword + if 'salt_size' not in cls.setting_kwds and \ + (not mx_set or cls.min_salt_size < cls.max_salt_size): + # NOTE: only bothering to issue warning if default_salt_size + # isn't maxed out + if (not mx_set or cls.default_salt_size < cls.max_salt_size): + warn("%s: hash handler supports range of salt sizes, " + "but doesn't offer 'salt_size' setting" % (cls.name,)) + + # check salt_chars & default_salt_chars + if cls.salt_chars: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars must not be empty") + if any(c not in cls.salt_chars for c in cls.default_salt_chars): + raise AssertionError("default_salt_chars must be subset of salt_chars: %r not in salt_chars" % (c,)) + else: + if not cls.default_salt_chars: + raise AssertionError("default_salt_chars MUST be specified if salt_chars is empty") + + @property + def salt_bits(self): + "calculate number of salt bits in hash" + # XXX: replace this with bitsize() method? + handler = self.handler + assert has_salt_info(handler), "need explicit bit-size for " + handler.name + from math import log + # FIXME: this may be off for case-insensitive hashes, but that accounts + # for ~1 bit difference, which is good enough for test_11() + return int(handler.default_salt_size * + log(len(handler.default_salt_chars), 2)) + + def test_11_unique_salt(self): + "test encrypt() / genconfig() creates new salt each time" + self.require_salt() + # odds of picking 'n' identical salts at random is '(.5**salt_bits)**n'. + # we want to pick the smallest N needed s.t. odds are <1/1000, just + # to eliminate false-positives. which works out to n>7-salt_bits. + # n=1 is sufficient for most hashes, but a few border cases (e.g. + # cisco_type7) have < 7 bits of salt, requiring more. + samples = max(1,7-self.salt_bits) + def sampler(func): + value1 = func() + for i in irange(samples): + value2 = func() + if value1 != value2: + return + raise self.failureException("failed to find different salt after " + "%d samples" % (samples,)) + if self.do_genconfig() is not None: # cisco_type7 has salt & no config + sampler(self.do_genconfig) + sampler(lambda : self.do_encrypt("stub")) + + def test_12_min_salt_size(self): + "test encrypt() / genconfig() honors min_salt_size" + self.require_salt_info() + + handler = self.handler + salt_char = handler.salt_chars[0:1] + min_size = handler.min_salt_size + + # + # check min is accepted + # + s1 = salt_char * min_size + self.do_genconfig(salt=s1) + + self.do_encrypt('stub', salt_size=min_size) + + # + # check min-1 is rejected + # + if min_size > 0: + self.assertRaises(ValueError, self.do_genconfig, + salt=s1[:-1]) + + self.assertRaises(ValueError, self.do_encrypt, 'stub', + salt_size=min_size-1) + + def test_13_max_salt_size(self): + "test encrypt() / genconfig() honors max_salt_size" + self.require_salt_info() + + handler = self.handler + max_size = handler.max_salt_size + salt_char = handler.salt_chars[0:1] + + if max_size is None: + # + # if it's not set, salt should never be truncated; so test it + # with an unreasonably large salt. + # + s1 = salt_char * 1024 + c1 = self.do_genconfig(salt=s1) + c2 = self.do_genconfig(salt=s1 + salt_char) + self.assertNotEqual(c1, c2) + + self.do_encrypt('stub', salt_size=1024) + + else: + # + # check max size is accepted + # + s1 = salt_char * max_size + c1 = self.do_genconfig(salt=s1) + + self.do_encrypt('stub', salt_size=max_size) + + # + # check max size + 1 is rejected + # + s2 = s1 + salt_char + self.assertRaises(ValueError, self.do_genconfig, salt=s2) + + self.assertRaises(ValueError, self.do_encrypt, 'stub', + salt_size=max_size+1) + + # + # should accept too-large salt in relaxed mode + # + if has_relaxed_setting(handler): + with catch_warnings(record=True): # issues passlibhandlerwarning + c2 = self.do_genconfig(salt=s2, relaxed=True) + self.assertEqual(c2, c1) + + # + # if min_salt supports it, check smaller than mx is NOT truncated + # + if handler.min_salt_size < max_size: + c3 = self.do_genconfig(salt=s1[:-1]) + self.assertNotEqual(c3, c1) + + # whether salt should be passed through bcrypt repair function + fuzz_salts_need_bcrypt_repair = False + + def prepare_salt(self, salt): + "prepare generated salt" + if self.fuzz_salts_need_bcrypt_repair: + from passlib.utils import bcrypt64 + salt = bcrypt64.repair_unused(salt) + return salt + + def test_14_salt_chars(self): + "test genconfig() honors salt_chars" + self.require_salt_info() + + handler = self.handler + mx = handler.max_salt_size + mn = handler.min_salt_size + cs = handler.salt_chars + raw = isinstance(cs, bytes) + + # make sure all listed chars are accepted + chunk = mx or 32 + for i in irange(0,len(cs),chunk): + salt = cs[i:i+chunk] + if len(salt) < mn: + salt = (salt*(mn//len(salt)+1))[:chunk] + salt = self.prepare_salt(salt) + self.do_genconfig(salt=salt) + + # check some invalid salt chars, make sure they're rejected + source = u('\x00\xff') + if raw: + source = source.encode("latin-1") + chunk = max(mn, 1) + for c in source: + if c not in cs: + self.assertRaises(ValueError, self.do_genconfig, salt=c*chunk, + __msg__="invalid salt char %r:" % (c,)) + + @property + def salt_type(self): + "hack to determine salt keyword's datatype" + # NOTE: cisco_type7 uses 'int' + if getattr(self.handler, "_salt_is_bytes", False): + return bytes + else: + return unicode + + def test_15_salt_type(self): + "test non-string salt values" + self.require_salt() + salt_type = self.salt_type + + # should always throw error for random class. + class fake(object): + pass + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=fake()) + + # unicode should be accepted only if salt_type is unicode. + if salt_type is not unicode: + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=u('x')) + + # bytes should be accepted only if salt_type is bytes, + # OR if salt type is unicode and running PY2 - to allow native strings. + if not (salt_type is bytes or (PY2 and salt_type is unicode)): + self.assertRaises(TypeError, self.do_encrypt, 'stub', salt=b('x')) + + #=================================================================== + # rounds + #=================================================================== + def require_rounds_info(self): + if not has_rounds_info(self.handler): + raise self.skipTest("handler lacks rounds attributes") + + def test_20_optional_rounds_attributes(self): + "validate optional rounds attributes" + self.require_rounds_info() + + cls = self.handler + AssertionError = self.failureException + + # check max_rounds + if cls.max_rounds is None: + raise AssertionError("max_rounds not specified") + if cls.max_rounds < 1: + raise AssertionError("max_rounds must be >= 1") + + # check min_rounds + if cls.min_rounds < 0: + raise AssertionError("min_rounds must be >= 0") + if cls.min_rounds > cls.max_rounds: + raise AssertionError("min_rounds must be <= max_rounds") + + # check default_rounds + if cls.default_rounds is not None: + if cls.default_rounds < cls.min_rounds: + raise AssertionError("default_rounds must be >= min_rounds") + if cls.default_rounds > cls.max_rounds: + raise AssertionError("default_rounds must be <= max_rounds") + + # check rounds_cost + if cls.rounds_cost not in rounds_cost_values: + raise AssertionError("unknown rounds cost constant: %r" % (cls.rounds_cost,)) + + def test_21_rounds_limits(self): + "test encrypt() / genconfig() honors rounds limits" + self.require_rounds_info() + handler = self.handler + min_rounds = handler.min_rounds + + # check min is accepted + self.do_genconfig(rounds=min_rounds) + self.do_encrypt('stub', rounds=min_rounds) + + # check min-1 is rejected + self.assertRaises(ValueError, self.do_genconfig, rounds=min_rounds-1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', + rounds=min_rounds-1) + + # TODO: check relaxed mode clips min-1 + + # handle max rounds + max_rounds = handler.max_rounds + if max_rounds is None: + # check large value is accepted + self.do_genconfig(rounds=(1<<31)-1) + else: + # check max is accepted + self.do_genconfig(rounds=max_rounds) + + # check max+1 is rejected + self.assertRaises(ValueError, self.do_genconfig, + rounds=max_rounds+1) + self.assertRaises(ValueError, self.do_encrypt, 'stub', + rounds=max_rounds+1) + + # TODO: check relaxed mode clips max+1 + + #=================================================================== + # idents + #=================================================================== + def test_30_HasManyIdents(self): + "validate HasManyIdents configuration" + cls = self.handler + if not isinstance(cls, type) or not issubclass(cls, uh.HasManyIdents): + raise self.skipTest("handler doesn't derive from HasManyIdents") + + # check settings + self.assertTrue('ident' in cls.setting_kwds) + + # check ident_values list + for value in cls.ident_values: + self.assertIsInstance(value, unicode, + "cls.ident_values must be unicode:") + self.assertTrue(len(cls.ident_values)>1, + "cls.ident_values must have 2+ elements:") + + # check default_ident value + self.assertIsInstance(cls.default_ident, unicode, + "cls.default_ident must be unicode:") + self.assertTrue(cls.default_ident in cls.ident_values, + "cls.default_ident must specify member of cls.ident_values") + + # check optional aliases list + if cls.ident_aliases: + for alias, ident in iteritems(cls.ident_aliases): + self.assertIsInstance(alias, unicode, + "cls.ident_aliases keys must be unicode:") # XXX: allow ints? + self.assertIsInstance(ident, unicode, + "cls.ident_aliases values must be unicode:") + self.assertTrue(ident in cls.ident_values, + "cls.ident_aliases must map to cls.ident_values members: %r" % (ident,)) + + # check constructor validates ident correctly. + handler = cls + hash = self.get_sample_hash()[1] + kwds = handler.parsehash(hash) + del kwds['ident'] + + # ... accepts good ident + handler(ident=cls.default_ident, **kwds) + + # ... requires ident w/o defaults + self.assertRaises(TypeError, handler, **kwds) + + # ... supplies default ident + handler(use_defaults=True, **kwds) + + # ... rejects bad ident + self.assertRaises(ValueError, handler, ident='xXx', **kwds) + + # TODO: check various supported idents + + #=================================================================== + # passwords + #=================================================================== + def test_60_secret_size(self): + "test password size limits" + sc = self.secret_size + base = "too many secrets" # 16 chars + alt = 'x' # char that's not in base string + if sc is not None: + # hash only counts the first characters; eg: bcrypt, des-crypt + + # create & hash string that's exactly sc+1 chars + secret = repeat_string(base, sc+1) + hash = self.do_encrypt(secret) + + # check sc value isn't too large by verifying that sc-1'th char + # affects hash + secret2 = secret[:-2] + alt + secret[-1] + self.assertFalse(self.do_verify(secret2, hash), + "secret_size value is too large") + + # check sc value isn't too small by verifying adding sc'th char + # *doesn't* affect hash + secret3 = secret[:-1] + alt + self.assertTrue(self.do_verify(secret3, hash), + "secret_size value is too small") + + else: + # hash counts all characters; e.g. md5-crypt + + # NOTE: this doesn't do an exhaustive search to verify algorithm + # doesn't have some cutoff point, it just tries + # 1024-character string, and alters the last char. + # as long as algorithm doesn't clip secret at point <1024, + # the new secret shouldn't verify. + secret = base * 64 + hash = self.do_encrypt(secret) + secret2 = secret[:-1] + alt + self.assertFalse(self.do_verify(secret2, hash), + "full password not used in digest") + + def test_61_secret_case_sensitive(self): + "test password case sensitivity" + hash_insensitive = self.secret_case_insensitive is True + verify_insensitive = self.secret_case_insensitive in [True, + "verify-only"] + + lower = 'test' + upper = 'TEST' + h1 = self.do_encrypt(lower) + if verify_insensitive and not self.is_disabled_handler: + self.assertTrue(self.do_verify(upper, h1), + "verify() should not be case sensitive") + else: + self.assertFalse(self.do_verify(upper, h1), + "verify() should be case sensitive") + + h2 = self.do_genhash(upper, h1) + if hash_insensitive or self.is_disabled_handler: + self.assertEqual(h2, h1, + "genhash() should not be case sensitive") + else: + self.assertNotEqual(h2, h1, + "genhash() should be case sensitive") + + def test_62_secret_border(self): + "test non-string passwords are rejected" + hash = self.get_sample_hash()[1] + + # secret=None + self.assertRaises(TypeError, self.do_encrypt, None) + self.assertRaises(TypeError, self.do_genhash, None, hash) + self.assertRaises(TypeError, self.do_verify, None, hash) + + # secret=int (picked as example of entirely wrong class) + self.assertRaises(TypeError, self.do_encrypt, 1) + self.assertRaises(TypeError, self.do_genhash, 1, hash) + self.assertRaises(TypeError, self.do_verify, 1, hash) + + def test_63_large_secret(self): + "test MAX_PASSWORD_SIZE is enforced" + from passlib.exc import PasswordSizeError + from passlib.utils import MAX_PASSWORD_SIZE + secret = '.' * (1+MAX_PASSWORD_SIZE) + hash = self.get_sample_hash()[1] + self.assertRaises(PasswordSizeError, self.do_genhash, secret, hash) + self.assertRaises(PasswordSizeError, self.do_encrypt, secret) + self.assertRaises(PasswordSizeError, self.do_verify, secret, hash) + + def test_64_forbidden_chars(self): + "test forbidden characters not allowed in password" + chars = self.forbidden_characters + if not chars: + raise self.skipTest("none listed") + base = u('stub') + if isinstance(chars, bytes): + from passlib.utils.compat import iter_byte_chars + chars = iter_byte_chars(chars) + base = base.encode("ascii") + for c in chars: + self.assertRaises(ValueError, self.do_encrypt, base + c + base) + + #=================================================================== + # check identify(), verify(), genhash() against test vectors + #=================================================================== + def is_secret_8bit(self, secret): + secret = self.populate_context(secret, {}) + return not is_ascii_safe(secret) + + def test_70_hashes(self): + "test known hashes" + # sanity check + self.assertTrue(self.known_correct_hashes or self.known_correct_configs, + "test must set at least one of 'known_correct_hashes' " + "or 'known_correct_configs'") + + # run through known secret/hash pairs + saw8bit = False + for secret, hash in self.iter_known_hashes(): + if self.is_secret_8bit(secret): + saw8bit = True + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify hash: %r" % (hash,)) + + # secret should verify successfully against hash + self.check_verify(secret, hash, "verify() of known hash failed: " + "secret=%r, hash=%r" % (secret, hash)) + + # genhash() should reproduce same hash + result = self.do_genhash(secret, hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash: secret=%r, hash=%r: result=%r" % + (secret, hash, result)) + + # would really like all handlers to have at least one 8-bit test vector + if not saw8bit: + warn("%s: no 8-bit secrets tested" % self.__class__) + + def test_71_alternates(self): + "test known alternate hashes" + if not self.known_alternate_hashes: + raise self.skipTest("no alternate hashes provided") + + for alt, secret, hash in self.known_alternate_hashes: + + # hash should be positively identified by handler + self.assertTrue(self.do_identify(hash), + "identify() failed to identify alternate hash: %r" % + (hash,)) + + # secret should verify successfully against hash + self.check_verify(secret, alt, "verify() of known alternate hash " + "failed: secret=%r, hash=%r" % (secret, alt)) + + # genhash() should reproduce canonical hash + result = self.do_genhash(secret, alt) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + self.assertEqual(result, hash, "genhash() failed to normalize " + "known alternate hash: secret=%r, alt=%r, hash=%r: " + "result=%r" % (secret, alt, hash, result)) + + def test_72_configs(self): + "test known config strings" + # special-case handlers without settings + if not self.handler.setting_kwds: + self.assertFalse(self.known_correct_configs, + "handler should not have config strings") + raise self.skipTest("hash has no settings") + + if not self.known_correct_configs: + # XXX: make this a requirement? + raise self.skipTest("no config strings provided") + + # make sure config strings work (hashes in list tested in test_70) + if self.filter_config_warnings: + warnings.filterwarnings("ignore", category=PasslibHashWarning) + for config, secret, hash in self.known_correct_configs: + + # config should be positively identified by handler + self.assertTrue(self.do_identify(config), + "identify() failed to identify known config string: %r" % + (config,)) + + # verify() should throw error for config strings. + self.assertRaises(ValueError, self.do_verify, secret, config, + __msg__="verify() failed to reject config string: %r" % + (config,)) + + # genhash() should reproduce hash from config. + result = self.do_genhash(secret, config) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + self.assertEqual(result, hash, "genhash() failed to reproduce " + "known hash from config: secret=%r, config=%r, hash=%r: " + "result=%r" % (secret, config, hash, result)) + + def test_73_unidentified(self): + "test known unidentifiably-mangled strings" + if not self.known_unidentified_hashes: + raise self.skipTest("no unidentified hashes provided") + for hash in self.known_unidentified_hashes: + + # identify() should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified known unidentifiable " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for unidentifiable " + "hash: %r" % (hash,)) + + def test_74_malformed(self): + "test known identifiable-but-malformed strings" + if not self.known_malformed_hashes: + raise self.skipTest("no malformed hashes provided") + for hash in self.known_malformed_hashes: + + # identify() should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known malformed " + "hash: %r" % (hash,)) + + # verify() should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for malformed " + "hash: %r" % (hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for malformed " + "hash: %r" % (hash,)) + + def test_75_foreign(self): + "test known foreign hashes" + if self.accepts_all_hashes: + raise self.skipTest("not applicable") + if not self.known_other_hashes: + raise self.skipTest("no foreign hashes provided") + for name, hash in self.known_other_hashes: + # NOTE: most tests use default list of foreign hashes, + # so they may include ones belonging to that hash... + # hence the 'own' logic. + + if name == self.handler.name: + # identify should accept these + self.assertTrue(self.do_identify(hash), + "identify() failed to identify known hash: %r" % (hash,)) + + # verify & genhash should NOT throw error + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.assertIsInstance(result, str, + "genhash() failed to return native string: %r" % (result,)) + + else: + # identify should reject these + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified hash belonging to " + "%s: %r" % (name, hash)) + + # verify should throw error + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__= "verify() failed to throw error for hash " + "belonging to %s: %r" % (name, hash,)) + + # genhash() should throw error + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__= "genhash() failed to throw error for hash " + "belonging to %s: %r" % (name, hash)) + + def test_76_hash_border(self): + "test non-string hashes are rejected" + # + # test hash=None is rejected (except if config=None) + # + self.assertRaises(TypeError, self.do_identify, None) + self.assertRaises(TypeError, self.do_verify, 'stub', None) + if self.supports_config_string: + self.assertRaises(TypeError, self.do_genhash, 'stub', None) + else: + result = self.do_genhash('stub', None) + self.check_returned_native_str(result, "genhash") + + # + # test hash=int is rejected (picked as example of entirely wrong type) + # + self.assertRaises(TypeError, self.do_identify, 1) + self.assertRaises(TypeError, self.do_verify, 'stub', 1) + self.assertRaises(TypeError, self.do_genhash, 'stub', 1) + + # + # test hash='' is rejected for all but the plaintext hashes + # + for hash in [u(''), b('')]: + if self.accepts_all_hashes: + # then it accepts empty string as well. + self.assertTrue(self.do_identify(hash)) + self.do_verify('stub', hash) + result = self.do_genhash('stub', hash) + self.check_returned_native_str(result, "genhash") + else: + # otherwise it should reject them + self.assertFalse(self.do_identify(hash), + "identify() incorrectly identified empty hash") + self.assertRaises(ValueError, self.do_verify, 'stub', hash, + __msg__="verify() failed to reject empty hash") + self.assertRaises(ValueError, self.do_genhash, 'stub', hash, + __msg__="genhash() failed to reject empty hash") + + # + # test identify doesn't throw decoding errors on 8-bit input + # + self.do_identify('\xe2\x82\xac\xc2\xa5$') # utf-8 + self.do_identify('abc\x91\x00') # non-utf8 + + #=================================================================== + # fuzz testing + #=================================================================== + def test_77_fuzz_input(self): + """test random passwords and options + + This test attempts to perform some basic fuzz testing of the hash, + based on whatever information can be found about it. + It does as much as it can within a fixed amount of time + (defaults to 1 second, but can be overridden via $PASSLIB_TEST_FUZZ_TIME). + It tests the following: + + * randomly generated passwords including extended unicode chars + * randomly selected rounds values (if rounds supported) + * randomly selected salt sizes (if salts supported) + * randomly selected identifiers (if multiple found) + * runs output of selected backend against other available backends + (if any) to detect errors occurring between different backends. + * runs output against other "external" verifiers such as OS crypt() + """ + if self.is_disabled_handler: + raise self.skipTest("not applicable") + + # gather info + from passlib.utils import tick + handler = self.handler + disabled = self.is_disabled_handler + max_time = self.max_fuzz_time + if max_time <= 0: + raise self.skipTest("disabled by test mode") + verifiers = self.get_fuzz_verifiers() + def vname(v): + return (v.__doc__ or v.__name__).splitlines()[0] + + # do as many tests as possible for max_time seconds + stop = tick() + max_time + count = 0 + while tick() <= stop: + # generate random password & options + secret, other, kwds = self.get_fuzz_settings() + ctx = dict((k,kwds[k]) for k in handler.context_kwds if k in kwds) + + # create new hash + hash = self.do_encrypt(secret, **kwds) + ##log.debug("fuzz test: hash=%r secret=%r other=%r", + ## hash, secret, other) + + # run through all verifiers we found. + for verify in verifiers: + name = vname(verify) + result = verify(secret, hash, **ctx) + if result == "skip": # let verifiers signal lack of support + continue + assert result is True or result is False + if not result: + raise self.failureException("failed to verify against %s: " + "secret=%r config=%r hash=%r" % + (name, secret, kwds, hash)) + # occasionally check that some other secrets WON'T verify + # against this hash. + if rng.random() < .1: + result = verify(other, hash, **ctx) + if result and result != "skip": + raise self.failureException("was able to verify wrong " + "password using %s: wrong_secret=%r real_secret=%r " + "config=%r hash=%r" % (name, other, secret, kwds, hash)) + count +=1 + + log.debug("fuzz test: %r checked %d passwords against %d verifiers (%s)", + self.descriptionPrefix, count, len(verifiers), + ", ".join(vname(v) for v in verifiers)) + + #--------------------------------------------------------------- + # fuzz constants & helpers + #--------------------------------------------------------------- + + # alphabet for randomly generated passwords + fuzz_password_alphabet = u('qwertyASDF1234<>.@*#! \u00E1\u0259\u0411\u2113') + + # encoding when testing bytes + fuzz_password_encoding = "utf-8" + + @property + def max_fuzz_time(self): + "amount of time to spend on fuzz testing" + value = float(os.environ.get("PASSLIB_TEST_FUZZ_TIME") or 0) + if value: + return value + elif TEST_MODE(max="quick"): + return 0 + elif TEST_MODE(max="default"): + return 1 + else: + return 5 + + def os_supports_ident(self, ident): + "whether native OS crypt() supports particular ident value" + return True + + #--------------------------------------------------------------- + # fuzz verifiers + #--------------------------------------------------------------- + def get_fuzz_verifiers(self): + """return list of password verifiers (including external libs) + + used by fuzz testing. + verifiers should be callable with signature + ``func(password: unicode, hash: ascii str) -> ok: bool``. + """ + handler = self.handler + verifiers = [] + + # call all methods starting with prefix in order to create + # any verifiers. + prefix = "fuzz_verifier_" + for name in dir(self): + if name.startswith(prefix): + func = getattr(self, name)() + if func is not None: + verifiers.append(func) + + # create verifiers for any other available backends + if hasattr(handler, "backends") and TEST_MODE("full"): + def maker(backend): + def func(secret, hash): + with temporary_backend(handler, backend): + return handler.verify(secret, hash) + func.__name__ = "check_" + backend + "_backend" + func.__doc__ = backend + "-backend" + return func + cur = handler.get_backend() + for backend in handler.backends: + if backend != cur and handler.has_backend(backend): + verifiers.append(maker(backend)) + + return verifiers + + def fuzz_verifier_default(self): + # test against self + def check_default(secret, hash, **ctx): + return self.do_verify(secret, hash, **ctx) + if self.backend: + check_default.__doc__ = self.backend + "-backend" + else: + check_default.__doc__ = "self" + return check_default + + def fuzz_verifier_crypt(self): + "test results against OS crypt()" + handler = self.handler + if self.using_patched_crypt or not has_crypt_support(handler): + return None + from crypt import crypt + def check_crypt(secret, hash): + "stdlib-crypt" + if not self.os_supports_ident(hash): + return "skip" + secret = to_native_str(secret, self.fuzz_password_encoding) + return crypt(secret, hash) == hash + return check_crypt + + #--------------------------------------------------------------- + # fuzz settings generation + #--------------------------------------------------------------- + def get_fuzz_settings(self): + "generate random password and options for fuzz testing" + prefix = "fuzz_setting_" + kwds = {} + for name in dir(self): + if name.startswith(prefix): + value = getattr(self, name)() + if value is not None: + kwds[name[len(prefix):]] = value + secret, other = self.get_fuzz_password_pair() + return secret, other, kwds + + def fuzz_setting_rounds(self): + handler = self.handler + if not has_rounds_info(handler): + return None + default = handler.default_rounds or handler.min_rounds + lower = handler.min_rounds + if handler.rounds_cost == "log2": + upper = default + else: + upper = min(default*2, handler.max_rounds) + return randintgauss(lower, upper, default, default*.5) + + def fuzz_setting_salt_size(self): + handler = self.handler + if not (has_salt_info(handler) and 'salt_size' in handler.setting_kwds): + return None + default = handler.default_salt_size + lower = handler.min_salt_size + upper = handler.max_salt_size or default*4 + return randintgauss(lower, upper, default, default*.5) + + def fuzz_setting_ident(self): + handler = self.handler + if 'ident' not in handler.setting_kwds or not hasattr(handler, "ident_values"): + return None + if rng.random() < .5: + return None + # resolve wrappers before reading values + handler = getattr(handler, "wrapped", handler) + ident = rng.choice(handler.ident_values) + if self.backend == "os_crypt" and not self.using_patched_crypt and not self.os_supports_ident(ident): + return None + return ident + + #--------------------------------------------------------------- + # fuzz password generation + #--------------------------------------------------------------- + def get_fuzz_password(self): + "generate random passwords for fuzz testing" + # occasionally try an empty password + if rng.random() < .0001: + return u('') + # otherwise alternate between large and small passwords. + if rng.random() < .5: + size = randintgauss(1, 50, 15, 15) + else: + size = randintgauss(50, 99, 70, 20) + return getrandstr(rng, self.fuzz_password_alphabet, size) + + def accept_fuzz_pair(self, secret, other): + "verify fuzz pair contains different passwords" + return secret != other + + def get_fuzz_password_pair(self): + "generate random password, and non-matching alternate password" + secret = self.get_fuzz_password() + while True: + other = self.get_fuzz_password() + if self.accept_fuzz_pair(secret, other): + break + if rng.randint(0,1): + secret = secret.encode(self.fuzz_password_encoding) + if rng.randint(0,1): + other = other.encode(self.fuzz_password_encoding) + return secret, other + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# HandlerCase mixins providing additional tests for certain hashes +#============================================================================= +class OsCryptMixin(HandlerCase): + """helper used by create_backend_case() which adds additional features + to test the os_crypt backend. + + * if crypt support is missing, inserts fake crypt support to simulate + a working safe_crypt, to test passlib's codepath as fully as possible. + + * extra tests to verify non-conformant crypt implementations are handled + correctly. + + * check that native crypt support is detected correctly for known platforms. + """ + #=================================================================== + # option flags + #=================================================================== + # platforms that are known to support / not support this hash natively. + # list of (platform_regex, True|False|None) entries. + platform_crypt_support = [] + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # force this backend + backend = "os_crypt" + + # flag read by HandlerCase to detect if fake os crypt is enabled. + using_patched_crypt = False + + #=================================================================== + # setup + #=================================================================== + def setUp(self): + assert self.backend == "os_crypt" + if not self.handler.has_backend("os_crypt"): + self.handler.get_backend() # hack to prevent recursion issue + self._patch_safe_crypt() + super(OsCryptMixin, self).setUp() + + # alternate handler to use for fake os_crypt, + # e.g. bcrypt_sha256 uses bcrypt + fallback_os_crypt_handler = None + + def _patch_safe_crypt(self): + """if crypt() doesn't support current hash alg, this patches + safe_crypt() so that it transparently uses another one of the handler's + backends, so that we can go ahead and test as much of code path + as possible. + """ + handler = self.fallback_os_crypt_handler or self.handler + # resolve wrappers, since we want to return crypt compatible hash. + while hasattr(handler, "wrapped"): + handler = handler.wrapped + alt_backend = self.find_crypt_replacement() + if not alt_backend: + raise AssertionError("handler has no available backends!") + import passlib.utils as mod + def crypt_stub(secret, hash): + with temporary_backend(handler, alt_backend): + hash = handler.genhash(secret, hash) + assert isinstance(hash, str) + return hash + self.addCleanup(setattr, mod, "_crypt", mod._crypt) + mod._crypt = crypt_stub + self.using_patched_crypt = True + + #=================================================================== + # custom tests + #=================================================================== + def _use_mock_crypt(self): + "patch safe_crypt() so it returns mock value" + import passlib.utils as mod + if not self.using_patched_crypt: + self.addCleanup(setattr, mod, "_crypt", mod._crypt) + crypt_value = [None] + mod._crypt = lambda secret, config: crypt_value[0] + def setter(value): + crypt_value[0] = value + return setter + + def test_80_faulty_crypt(self): + "test with faulty crypt()" + hash = self.get_sample_hash()[1] + exc_types = (AssertionError,) + setter = self._use_mock_crypt() + + def test(value): + # set safe_crypt() to return specified value, and + # make sure assertion error is raised by handler. + setter(value) + self.assertRaises(exc_types, self.do_genhash, "stub", hash) + self.assertRaises(exc_types, self.do_encrypt, "stub") + self.assertRaises(exc_types, self.do_verify, "stub", hash) + + test('$x' + hash[2:]) # detect wrong prefix + test(hash[:-1]) # detect too short + test(hash + 'x') # detect too long + + def test_81_crypt_fallback(self): + "test per-call crypt() fallback" + # set safe_crypt to return None + setter = self._use_mock_crypt() + setter(None) + if self.find_crypt_replacement(): + # handler should have a fallback to use + h1 = self.do_encrypt("stub") + h2 = self.do_genhash("stub", h1) + self.assertEqual(h2, h1) + self.assertTrue(self.do_verify("stub", h1)) + else: + # handler should give up + from passlib.exc import MissingBackendError + hash = self.get_sample_hash()[1] + self.assertRaises(MissingBackendError, self.do_encrypt, 'stub') + self.assertRaises(MissingBackendError, self.do_genhash, 'stub', hash) + self.assertRaises(MissingBackendError, self.do_verify, 'stub', hash) + + def test_82_crypt_support(self): + "test platform-specific crypt() support detection" + # NOTE: this is mainly just a sanity check to ensure the runtime + # detection is functioning correctly on some known platforms, + # so that I can feel more confident it'll work right on unknown ones. + if hasattr(self.handler, "orig_prefix"): + raise self.skipTest("not applicable to wrappers") + platform = sys.platform + for pattern, state in self.platform_crypt_support: + if re.match(pattern, platform): + break + else: + raise self.skipTest("no data for %r platform" % platform) + if state is None: + # e.g. platform='freebsd8' ... sha256_crypt not added until 8.3 + raise self.skipTest("varied support on %r platform" % platform) + elif state != self.using_patched_crypt: + return + elif state: + self.fail("expected %r platform would have native support " + "for %r" % (platform, self.handler.name)) + else: + self.fail("did not expect %r platform would have native support " + "for %r" % (platform, self.handler.name)) + + #=================================================================== + # eoc + #=================================================================== + +class UserHandlerMixin(HandlerCase): + """helper for handlers w/ 'user' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that a username is automatically inserted to encrypt/verify + calls. as well, passing in a pair of strings as the password + will be interpreted as (secret,user) + """ + #=================================================================== + # option flags + #=================================================================== + default_user = "user" + requires_user = True + user_case_insensitive = False + + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + #=================================================================== + # custom tests + #=================================================================== + def test_80_user(self): + "test user context keyword" + handler = self.handler + password = 'stub' + hash = handler.encrypt(password, user=self.default_user) + + if self.requires_user: + self.assertRaises(TypeError, handler.encrypt, password) + self.assertRaises(TypeError, handler.genhash, password, hash) + self.assertRaises(TypeError, handler.verify, password, hash) + else: + # e.g. cisco_pix works with or without one. + handler.encrypt(password) + handler.genhash(password, hash) + handler.verify(password, hash) + + def test_81_user_case(self): + "test user case sensitivity" + lower = self.default_user.lower() + upper = lower.upper() + hash = self.do_encrypt('stub', user=lower) + if self.user_case_insensitive: + self.assertTrue(self.do_verify('stub', hash, user=upper), + "user should not be case sensitive") + else: + self.assertFalse(self.do_verify('stub', hash, user=upper), + "user should be case sensitive") + + def test_82_user_salt(self): + "test user used as salt" + config = self.do_genconfig() + h1 = self.do_genhash('stub', config, user='admin') + h2 = self.do_genhash('stub', config, user='admin') + self.assertEqual(h2, h1) + h3 = self.do_genhash('stub', config, user='root') + self.assertNotEqual(h3, h1) + + # TODO: user size? kinda dicey, depends on algorithm. + + #=================================================================== + # override test helpers + #=================================================================== + def populate_context(self, secret, kwds): + "insert username into kwds" + if isinstance(secret, tuple): + secret, user = secret + elif not self.requires_user: + return secret + else: + user = self.default_user + if 'user' not in kwds: + kwds['user'] = user + return secret + + #=================================================================== + # modify fuzz testing + #=================================================================== + fuzz_user_alphabet = u("asdQWE123") + + def fuzz_setting_user(self): + if not self.requires_user and rng.random() < .1: + return None + return getrandstr(rng, self.fuzz_user_alphabet, rng.randint(2,10)) + + #=================================================================== + # eoc + #=================================================================== + +class EncodingHandlerMixin(HandlerCase): + """helper for handlers w/ 'encoding' context kwd; mixin for HandlerCase + + this overrides the HandlerCase test harness methods + so that an encoding can be inserted to encrypt/verify + calls by passing in a pair of strings as the password + will be interpreted as (secret,encoding) + """ + #=================================================================== + # instance attrs + #=================================================================== + __unittest_skip = True + + # restrict stock passwords & fuzz alphabet to latin-1, + # so different encodings can be tested safely. + stock_passwords = [ + u("test"), + b("test"), + u("\u00AC\u00BA"), + ] + + fuzz_password_alphabet = u('qwerty1234<>.@*#! \u00AC') + + def populate_context(self, secret, kwds): + "insert encoding into kwds" + if isinstance(secret, tuple): + secret, encoding = secret + kwds.setdefault('encoding', encoding) + return secret + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# warnings helpers +#============================================================================= +class reset_warnings(catch_warnings): + """catch_warnings() wrapper which clears warning registry & filters""" + def __init__(self, reset_filter="always", reset_registry=".*", **kwds): + super(reset_warnings, self).__init__(**kwds) + self._reset_filter = reset_filter + self._reset_registry = re.compile(reset_registry) if reset_registry else None + + def __enter__(self): + # let parent class archive filter state + ret = super(reset_warnings, self).__enter__() + + # reset the filter to list everything + if self._reset_filter: + warnings.resetwarnings() + warnings.simplefilter(self._reset_filter) + + # archive and clear the __warningregistry__ key for all modules + # that match the 'reset' pattern. + pattern = self._reset_registry + if pattern: + orig = self._orig_registry = {} + for name, mod in sys.modules.items(): + if pattern.match(name): + reg = getattr(mod, "__warningregistry__", None) + if reg: + orig[name] = reg.copy() + reg.clear() + return ret + + def __exit__(self, *exc_info): + # restore warning registry for all modules + pattern = self._reset_registry + if pattern: + # restore archived registry data + orig = self._orig_registry + for name, content in iteritems(orig): + mod = sys.modules.get(name) + if mod is None: + continue + reg = getattr(mod, "__warningregistry__", None) + if reg is None: + setattr(mod, "__warningregistry__", content) + else: + reg.clear() + reg.update(content) + # clear all registry entries that we didn't archive + for name, mod in sys.modules.items(): + if pattern.match(name) and name not in orig: + reg = getattr(mod, "__warningregistry__", None) + if reg: + reg.clear() + super(reset_warnings, self).__exit__(*exc_info) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/__init__.py b/passlib/utils/__init__.py new file mode 100644 index 00000000..654124ea --- /dev/null +++ b/passlib/utils/__init__.py @@ -0,0 +1,1619 @@ +"""passlib.utils -- helpers for writing password hashes""" +#============================================================================= +# imports +#============================================================================= +from passlib.utils.compat import PYPY, JYTHON +# core +from base64 import b64encode, b64decode +from codecs import lookup as _lookup_codec +from functools import update_wrapper +import logging; log = logging.getLogger(__name__) +import math +import os +import sys +import random +if JYTHON: # pragma: no cover -- runtime detection + # Jython 2.5.2 lacks stringprep module - + # see http://bugs.jython.org/issue1758320 + try: + import stringprep + except ImportError: + stringprep = None + _stringprep_missing_reason = "not present under Jython" +else: + import stringprep +import time +if stringprep: + import unicodedata +from warnings import warn +# site +# pkg +from passlib.exc import ExpectedStringError +from passlib.utils.compat import add_doc, b, bytes, join_bytes, join_byte_values, \ + join_byte_elems, exc_err, irange, imap, PY3, u, \ + join_unicode, unicode, byte_elem_value, PY_MIN_32, next_method_attr +# local +__all__ = [ + # constants + 'PYPY', + 'JYTHON', + 'sys_bits', + 'unix_crypt_schemes', + 'rounds_cost_values', + + # decorators + "classproperty", +## "deprecated_function", +## "relocated_function", +## "memoized_class_property", + + # unicode helpers + 'consteq', + 'saslprep', + + # bytes helpers + "xor_bytes", + "render_bytes", + + # encoding helpers + 'is_same_codec', + 'is_ascii_safe', + 'to_bytes', + 'to_unicode', + 'to_native_str', + + # base64 helpers + "BASE64_CHARS", "HASH64_CHARS", "BCRYPT_CHARS", "AB64_CHARS", + "Base64Engine", "h64", "h64big", + "ab64_encode", "ab64_decode", + + # host OS + 'has_crypt', + 'test_crypt', + 'safe_crypt', + 'tick', + + # randomness + 'rng', + 'getrandbytes', + 'getrandstr', + 'generate_password', + + # object type / interface tests + 'is_crypt_handler', + 'is_crypt_context', + 'has_rounds_info', + 'has_salt_info', +] + +#============================================================================= +# constants +#============================================================================= + +# bitsize of system architecture (32 or 64) +sys_bits = int(math.log(sys.maxsize if PY3 else sys.maxint, 2) + 1.5) + +# list of hashes algs supported by crypt() on at least one OS. +unix_crypt_schemes = [ + "sha512_crypt", "sha256_crypt", + "sha1_crypt", "bcrypt", + "md5_crypt", + # "bsd_nthash", + "bsdi_crypt", "des_crypt", + ] + +# list of rounds_cost constants +rounds_cost_values = [ "linear", "log2" ] + +# legacy import, will be removed in 1.8 +from passlib.exc import MissingBackendError + +# internal helpers +_BEMPTY = b('') +_UEMPTY = u("") +_USPACE = u(" ") + +# maximum password size which passlib will allow; see exc.PasswordSizeError +MAX_PASSWORD_SIZE = int(os.environ.get("PASSLIB_MAX_PASSWORD_SIZE") or 4096) + +#============================================================================= +# decorators and meta helpers +#============================================================================= +class classproperty(object): + """Function decorator which acts like a combination of classmethod+property (limited to read-only properties)""" + + def __init__(self, func): + self.im_func = func + + def __get__(self, obj, cls): + return self.im_func(cls) + + @property + def __func__(self): + "py3 compatible alias" + return self.im_func + +def deprecated_function(msg=None, deprecated=None, removed=None, updoc=True, + replacement=None, _is_method=False): + """decorator to deprecate a function. + + :arg msg: optional msg, default chosen if omitted + :kwd deprecated: version when function was first deprecated + :kwd removed: version when function will be removed + :kwd replacement: alternate name / instructions for replacing this function. + :kwd updoc: add notice to docstring (default ``True``) + """ + if msg is None: + if _is_method: + msg = "the method %(mod)s.%(klass)s.%(name)s() is deprecated" + else: + msg = "the function %(mod)s.%(name)s() is deprecated" + if deprecated: + msg += " as of Passlib %(deprecated)s" + if removed: + msg += ", and will be removed in Passlib %(removed)s" + if replacement: + msg += ", use %s instead" % replacement + msg += "." + def build(func): + opts = dict( + mod=func.__module__, + name=func.__name__, + deprecated=deprecated, + removed=removed, + ) + if _is_method: + def wrapper(*args, **kwds): + tmp = opts.copy() + klass = args[0].__class__ + tmp.update(klass=klass.__name__, mod=klass.__module__) + warn(msg % tmp, DeprecationWarning, stacklevel=2) + return func(*args, **kwds) + else: + text = msg % opts + def wrapper(*args, **kwds): + warn(text, DeprecationWarning, stacklevel=2) + return func(*args, **kwds) + update_wrapper(wrapper, func) + if updoc and (deprecated or removed) and \ + wrapper.__doc__ and ".. deprecated::" not in wrapper.__doc__: + txt = deprecated or '' + if removed or replacement: + txt += "\n " + if removed: + txt += "and will be removed in version %s" % (removed,) + if replacement: + if removed: + txt += ", " + txt += "use %s instead" % replacement + txt += "." + if not wrapper.__doc__.strip(" ").endswith("\n"): + wrapper.__doc__ += "\n" + wrapper.__doc__ += "\n.. deprecated:: %s\n" % (txt,) + return wrapper + return build + +def deprecated_method(msg=None, deprecated=None, removed=None, updoc=True, + replacement=None): + """decorator to deprecate a method. + + :arg msg: optional msg, default chosen if omitted + :kwd deprecated: version when method was first deprecated + :kwd removed: version when method will be removed + :kwd replacement: alternate name / instructions for replacing this method. + :kwd updoc: add notice to docstring (default ``True``) + """ + return deprecated_function(msg, deprecated, removed, updoc, replacement, + _is_method=True) + +class memoized_property(object): + """decorator which invokes method once, then replaces attr with result""" + def __init__(self, func): + self.im_func = func + + def __get__(self, obj, cls): + if obj is None: + return self + func = self.im_func + value = func(obj) + setattr(obj, func.__name__, value) + return value + + @property + def __func__(self): + "py3 alias" + return self.im_func + +# works but not used +##class memoized_class_property(object): +## """function decorator which calls function as classmethod, +## and replaces itself with result for current and all future invocations. +## """ +## def __init__(self, func): +## self.im_func = func +## +## def __get__(self, obj, cls): +## func = self.im_func +## value = func(cls) +## setattr(cls, func.__name__, value) +## return value +## +## @property +## def __func__(self): +## "py3 compatible alias" + +#============================================================================= +# unicode helpers +#============================================================================= + +def consteq(left, right): + """Check two strings/bytes for equality. + This is functionally equivalent to ``left == right``, + but attempts to take constant time relative to the size of the righthand input. + + The purpose of this function is to help prevent timing attacks + during digest comparisons: the standard ``==`` operator aborts + after the first mismatched character, causing it's runtime to be + proportional to the longest prefix shared by the two inputs. + If an attacker is able to predict and control one of the two + inputs, repeated queries can be leveraged to reveal information about + the content of the second argument. To minimize this risk, :func:`!consteq` + is designed to take ``THETA(len(right))`` time, regardless + of the contents of the two strings. + It is recommended that the attacker-controlled input + be passed in as the left-hand value. + + .. warning:: + + This function is *not* perfect. Various VM-dependant issues + (e.g. the VM's integer object instantiation algorithm, internal unicode representation, etc), + may still cause the function's run time to be affected by the inputs, + though in a less predictable manner. + *To minimize such risks, this function should not be passed* :class:`unicode` + *inputs that might contain non-* ``ASCII`` *characters*. + + .. versionadded:: 1.6 + """ + # NOTE: + # resources & discussions considered in the design of this function: + # hmac timing attack -- + # http://rdist.root.org/2009/05/28/timing-attack-in-google-keyczar-library/ + # python developer discussion surrounding similar function -- + # http://bugs.python.org/issue15061 + # http://bugs.python.org/issue14955 + + # validate types + if isinstance(left, unicode): + if not isinstance(right, unicode): + raise TypeError("inputs must be both unicode or both bytes") + is_py3_bytes = False + elif isinstance(left, bytes): + if not isinstance(right, bytes): + raise TypeError("inputs must be both unicode or both bytes") + is_py3_bytes = PY3 + else: + raise TypeError("inputs must be both unicode or both bytes") + + # do size comparison. + # NOTE: the double-if construction below is done deliberately, to ensure + # the same number of operations (including branches) is performed regardless + # of whether left & right are the same size. + same_size = (len(left) == len(right)) + if same_size: + # if sizes are the same, setup loop to perform actual check of contents. + tmp = left + result = 0 + if not same_size: + # if sizes aren't the same, set 'result' so equality will fail regardless + # of contents. then, to ensure we do exactly 'len(right)' iterations + # of the loop, just compare 'right' against itself. + tmp = right + result = 1 + + # run constant-time string comparision + # TODO: use izip instead (but first verify it's faster than zip for this case) + if is_py3_bytes: + for l,r in zip(tmp, right): + result |= l ^ r + else: + for l,r in zip(tmp, right): + result |= ord(l) ^ ord(r) + return result == 0 + +def splitcomma(source, sep=","): + """split comma-separated string into list of elements, + stripping whitespace. + """ + source = source.strip() + if source.endswith(sep): + source = source[:-1] + if not source: + return [] + return [ elem.strip() for elem in source.split(sep) ] + +def saslprep(source, param="value"): + """Normalizes unicode strings using SASLPrep stringprep profile. + + The SASLPrep profile is defined in :rfc:`4013`. + It provides a uniform scheme for normalizing unicode usernames + and passwords before performing byte-value sensitive operations + such as hashing. Among other things, it normalizes diacritic + representations, removes non-printing characters, and forbids + invalid characters such as ``\\n``. Properly internationalized + applications should run user passwords through this function + before hashing. + + :arg source: + unicode string to normalize & validate + + :param param: + Optional noun used to refer to identify source parameter in error messages + (Defaults to the string ``"value"``). This is mainly useful to make the caller's error + messages make more sense. + + :raises ValueError: + if any characters forbidden by the SASLPrep profile are encountered. + + :returns: + normalized unicode string + + .. note:: + + This function is not available under Jython, + as the Jython stdlib is missing the :mod:`!stringprep` module + (`Jython issue 1758320 `_). + + .. versionadded:: 1.6 + """ + # saslprep - http://tools.ietf.org/html/rfc4013 + # stringprep - http://tools.ietf.org/html/rfc3454 + # http://docs.python.org/library/stringprep.html + + # validate type + if not isinstance(source, unicode): + raise TypeError("input must be unicode string, not %s" % + (type(source),)) + + # mapping stage + # - map non-ascii spaces to U+0020 (stringprep C.1.2) + # - strip 'commonly mapped to nothing' chars (stringprep B.1) + in_table_c12 = stringprep.in_table_c12 + in_table_b1 = stringprep.in_table_b1 + data = join_unicode( + _USPACE if in_table_c12(c) else c + for c in source + if not in_table_b1(c) + ) + + # normalize to KC form + data = unicodedata.normalize('NFKC', data) + if not data: + return _UEMPTY + + # check for invalid bi-directional strings. + # stringprep requires the following: + # - chars in C.8 must be prohibited. + # - if any R/AL chars in string: + # - no L chars allowed in string + # - first and last must be R/AL chars + # this checks if start/end are R/AL chars. if so, prohibited loop + # will forbid all L chars. if not, prohibited loop will forbid all + # R/AL chars instead. in both cases, prohibited loop takes care of C.8. + is_ral_char = stringprep.in_table_d1 + if is_ral_char(data[0]): + if not is_ral_char(data[-1]): + raise ValueError("malformed bidi sequence in " + param) + # forbid L chars within R/AL sequence. + is_forbidden_bidi_char = stringprep.in_table_d2 + else: + # forbid R/AL chars if start not setup correctly; L chars allowed. + is_forbidden_bidi_char = is_ral_char + + # check for prohibited output - stringprep tables A.1, B.1, C.1.2, C.2 - C.9 + in_table_a1 = stringprep.in_table_a1 + in_table_c21_c22 = stringprep.in_table_c21_c22 + in_table_c3 = stringprep.in_table_c3 + in_table_c4 = stringprep.in_table_c4 + in_table_c5 = stringprep.in_table_c5 + in_table_c6 = stringprep.in_table_c6 + in_table_c7 = stringprep.in_table_c7 + in_table_c8 = stringprep.in_table_c8 + in_table_c9 = stringprep.in_table_c9 + for c in data: + # check for chars mapping stage should have removed + assert not in_table_b1(c), "failed to strip B.1 in mapping stage" + assert not in_table_c12(c), "failed to replace C.1.2 in mapping stage" + + # check for forbidden chars + if in_table_a1(c): + raise ValueError("unassigned code points forbidden in " + param) + if in_table_c21_c22(c): + raise ValueError("control characters forbidden in " + param) + if in_table_c3(c): + raise ValueError("private use characters forbidden in " + param) + if in_table_c4(c): + raise ValueError("non-char code points forbidden in " + param) + if in_table_c5(c): + raise ValueError("surrogate codes forbidden in " + param) + if in_table_c6(c): + raise ValueError("non-plaintext chars forbidden in " + param) + if in_table_c7(c): + # XXX: should these have been caught by normalize? + # if so, should change this to an assert + raise ValueError("non-canonical chars forbidden in " + param) + if in_table_c8(c): + raise ValueError("display-modifying / deprecated chars " + "forbidden in" + param) + if in_table_c9(c): + raise ValueError("tagged characters forbidden in " + param) + + # do bidi constraint check chosen by bidi init, above + if is_forbidden_bidi_char(c): + raise ValueError("forbidden bidi character in " + param) + + return data + +# replace saslprep() with stub when stringprep is missing +if stringprep is None: # pragma: no cover -- runtime detection + def saslprep(source, param="value"): + "stub for saslprep()" + raise NotImplementedError("saslprep() support requires the 'stringprep' " + "module, which is " + _stringprep_missing_reason) + +#============================================================================= +# bytes helpers +#============================================================================= +def render_bytes(source, *args): + """Peform ``%`` formating using bytes in a uniform manner across Python 2/3. + + This function is motivated by the fact that + :class:`bytes` instances do not support ``%`` or ``{}`` formatting under Python 3. + This function is an attempt to provide a replacement: + it converts everything to unicode (decoding bytes instances as ``latin-1``), + performs the required formatting, then encodes the result to ``latin-1``. + + Calling ``render_bytes(source, *args)`` should function roughly the same as + ``source % args`` under Python 2. + """ + if isinstance(source, bytes): + source = source.decode("latin-1") + result = source % tuple(arg.decode("latin-1") if isinstance(arg, bytes) + else arg for arg in args) + return result.encode("latin-1") + +if PY_MIN_32: + def bytes_to_int(value): + return int.from_bytes(value, 'big') + def int_to_bytes(value, count): + return value.to_bytes(count, 'big') +else: + # XXX: can any of these be sped up? + from binascii import hexlify, unhexlify + def bytes_to_int(value): + return int(hexlify(value),16) + if PY3: + # grr, why did py3 have to break % for bytes? + def int_to_bytes(value, count): + return unhexlify((('%%0%dx' % (count<<1)) % value).encode("ascii")) + else: + def int_to_bytes(value, count): + return unhexlify(('%%0%dx' % (count<<1)) % value) + +add_doc(bytes_to_int, "decode byte string as single big-endian integer") +add_doc(int_to_bytes, "encode integer as single big-endian byte string") + +def xor_bytes(left, right): + "Perform bitwise-xor of two byte strings (must be same size)" + return int_to_bytes(bytes_to_int(left) ^ bytes_to_int(right), len(left)) + +def repeat_string(source, size): + "repeat or truncate string, so it has length " + cur = len(source) + if size > cur: + mult = (size+cur-1)//cur + return (source*mult)[:size] + else: + return source[:size] + +_BNULL = b("\x00") +_UNULL = u("\x00") + +def right_pad_string(source, size, pad=None): + "right-pad or truncate string, so it has length " + cur = len(source) + if size > cur: + if pad is None: + pad = _UNULL if isinstance(source, unicode) else _BNULL + return source+pad*(size-cur) + else: + return source[:size] + +#============================================================================= +# encoding helpers +#============================================================================= +_ASCII_TEST_BYTES = b("\x00\n aA:#!\x7f") +_ASCII_TEST_UNICODE = _ASCII_TEST_BYTES.decode("ascii") + +def is_ascii_codec(codec): + "Test if codec is compatible with 7-bit ascii (e.g. latin-1, utf-8; but not utf-16)" + return _ASCII_TEST_UNICODE.encode(codec) == _ASCII_TEST_BYTES + +def is_same_codec(left, right): + "Check if two codec names are aliases for same codec" + if left == right: + return True + if not (left and right): + return False + return _lookup_codec(left).name == _lookup_codec(right).name + +_B80 = b('\x80')[0] +_U80 = u('\x80') +def is_ascii_safe(source): + "Check if string (bytes or unicode) contains only 7-bit ascii" + r = _B80 if isinstance(source, bytes) else _U80 + return all(c < r for c in source) + +def to_bytes(source, encoding="utf-8", param="value", source_encoding=None): + """Helper to normalize input to bytes. + + :arg source: + Source bytes/unicode to process. + + :arg encoding: + Target encoding (defaults to ``"utf-8"``). + + :param param: + Optional name of variable/noun to reference when raising errors + + :param source_encoding: + If this is specified, and the source is bytes, + the source will be transcoded from *source_encoding* to *encoding* + (via unicode). + + :raises TypeError: if source is not unicode or bytes. + + :returns: + * unicode strings will be encoded using *encoding*, and returned. + * if *source_encoding* is not specified, byte strings will be + returned unchanged. + * if *source_encoding* is specified, byte strings will be transcoded + to *encoding*. + """ + assert encoding + if isinstance(source, bytes): + if source_encoding and not is_same_codec(source_encoding, encoding): + return source.decode(source_encoding).encode(encoding) + else: + return source + elif isinstance(source, unicode): + return source.encode(encoding) + else: + raise ExpectedStringError(source, param) + +def to_unicode(source, encoding="utf-8", param="value"): + """Helper to normalize input to unicode. + + :arg source: + source bytes/unicode to process. + + :arg encoding: + encoding to use when decoding bytes instances. + + :param param: + optional name of variable/noun to reference when raising errors. + + :raises TypeError: if source is not unicode or bytes. + + :returns: + * returns unicode strings unchanged. + * returns bytes strings decoded using *encoding* + """ + assert encoding + if isinstance(source, unicode): + return source + elif isinstance(source, bytes): + return source.decode(encoding) + else: + raise ExpectedStringError(source, param) + +if PY3: + def to_native_str(source, encoding="utf-8", param="value"): + if isinstance(source, bytes): + return source.decode(encoding) + elif isinstance(source, unicode): + return source + else: + raise ExpectedStringError(source, param) +else: + def to_native_str(source, encoding="utf-8", param="value"): + if isinstance(source, bytes): + return source + elif isinstance(source, unicode): + return source.encode(encoding) + else: + raise ExpectedStringError(source, param) + +add_doc(to_native_str, + """Take in unicode or bytes, return native string. + + Python 2: encodes unicode using specified encoding, leaves bytes alone. + Python 3: leaves unicode alone, decodes bytes using specified encoding. + + :raises TypeError: if source is not unicode or bytes. + + :arg source: + source unicode or bytes string. + + :arg encoding: + encoding to use when encoding unicode or decoding bytes. + this defaults to ``"utf-8"``. + + :param param: + optional name of variable/noun to reference when raising errors. + + :returns: :class:`str` instance + """) + +@deprecated_function(deprecated="1.6", removed="1.7") +def to_hash_str(source, encoding="ascii"): # pragma: no cover -- deprecated & unused + "deprecated, use to_native_str() instead" + return to_native_str(source, encoding, param="hash") + +#============================================================================= +# base64-variant encoding +#============================================================================= + +class Base64Engine(object): + """Provides routines for encoding/decoding base64 data using + arbitrary character mappings, selectable endianness, etc. + + :arg charmap: + A string of 64 unique characters, + which will be used to encode successive 6-bit chunks of data. + A character's position within the string should correspond + to it's 6-bit value. + + :param big: + Whether the encoding should be big-endian (default False). + + .. note:: + This class does not currently handle base64's padding characters + in any way what so ever. + + Raw Bytes <-> Encoded Bytes + =========================== + The following methods convert between raw bytes, + and strings encoded using the engine's specific base64 variant: + + .. automethod:: encode_bytes + .. automethod:: decode_bytes + .. automethod:: encode_transposed_bytes + .. automethod:: decode_transposed_bytes + + .. + .. automethod:: check_repair_unused + .. automethod:: repair_unused + + Integers <-> Encoded Bytes + ========================== + The following methods allow encoding and decoding + unsigned integers to and from the engine's specific base64 variant. + Endianess is determined by the engine's ``big`` constructor keyword. + + .. automethod:: encode_int6 + .. automethod:: decode_int6 + + .. automethod:: encode_int12 + .. automethod:: decode_int12 + + .. automethod:: encode_int24 + .. automethod:: decode_int24 + + .. automethod:: encode_int64 + .. automethod:: decode_int64 + + Informational Attributes + ======================== + .. attribute:: charmap + + unicode string containing list of characters used in encoding; + position in string matches 6bit value of character. + + .. attribute:: bytemap + + bytes version of :attr:`charmap` + + .. attribute:: big + + boolean flag indicating this using big-endian encoding. + """ + + #=================================================================== + # instance attrs + #=================================================================== + # public config + bytemap = None # charmap as bytes + big = None # little or big endian + + # filled in by init based on charmap. + # (byte elem: single byte under py2, 8bit int under py3) + _encode64 = None # maps 6bit value -> byte elem + _decode64 = None # maps byte elem -> 6bit value + + # helpers filled in by init based on endianness + _encode_bytes = None # throws IndexError if bad value (shouldn't happen) + _decode_bytes = None # throws KeyError if bad char. + + #=================================================================== + # init + #=================================================================== + def __init__(self, charmap, big=False): + # validate charmap, generate encode64/decode64 helper functions. + if isinstance(charmap, unicode): + charmap = charmap.encode("latin-1") + elif not isinstance(charmap, bytes): + raise ExpectedStringError(charmap, "charmap") + if len(charmap) != 64: + raise ValueError("charmap must be 64 characters in length") + if len(set(charmap)) != 64: + raise ValueError("charmap must not contain duplicate characters") + self.bytemap = charmap + self._encode64 = charmap.__getitem__ + lookup = dict((value, idx) for idx, value in enumerate(charmap)) + self._decode64 = lookup.__getitem__ + + # validate big, set appropriate helper functions. + self.big = big + if big: + self._encode_bytes = self._encode_bytes_big + self._decode_bytes = self._decode_bytes_big + else: + self._encode_bytes = self._encode_bytes_little + self._decode_bytes = self._decode_bytes_little + + # TODO: support padding character + ##if padding is not None: + ## if isinstance(padding, unicode): + ## padding = padding.encode("latin-1") + ## elif not isinstance(padding, bytes): + ## raise TypeError("padding char must be unicode or bytes") + ## if len(padding) != 1: + ## raise ValueError("padding must be single character") + ##self.padding = padding + + @property + def charmap(self): + "charmap as unicode" + return self.bytemap.decode("latin-1") + + #=================================================================== + # encoding byte strings + #=================================================================== + def encode_bytes(self, source): + """encode bytes to base64 string. + + :arg source: byte string to encode. + :returns: byte string containing encoded data. + """ + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + chunks, tail = divmod(len(source), 3) + if PY3: + next_value = iter(source).__next__ + else: + next_value = (ord(elem) for elem in source).next + gen = self._encode_bytes(next_value, chunks, tail) + out = join_byte_elems(imap(self._encode64, gen)) + ##if tail: + ## padding = self.padding + ## if padding: + ## out += padding * (3-tail) + return out + + def _encode_bytes_little(self, next_value, chunks, tail): + "helper used by encode_bytes() to handle little-endian encoding" + # + # output bit layout: + # + # first byte: v1 543210 + # + # second byte: v1 ....76 + # +v2 3210.. + # + # third byte: v2 ..7654 + # +v3 10.... + # + # fourth byte: v3 765432 + # + idx = 0 + while idx < chunks: + v1 = next_value() + v2 = next_value() + v3 = next_value() + yield v1 & 0x3f + yield ((v2 & 0x0f)<<2)|(v1>>6) + yield ((v3 & 0x03)<<4)|(v2>>4) + yield v3>>2 + idx += 1 + if tail: + v1 = next_value() + if tail == 1: + # note: 4 msb of last byte are padding + yield v1 & 0x3f + yield v1>>6 + else: + assert tail == 2 + # note: 2 msb of last byte are padding + v2 = next_value() + yield v1 & 0x3f + yield ((v2 & 0x0f)<<2)|(v1>>6) + yield v2>>4 + + def _encode_bytes_big(self, next_value, chunks, tail): + "helper used by encode_bytes() to handle big-endian encoding" + # + # output bit layout: + # + # first byte: v1 765432 + # + # second byte: v1 10.... + # +v2 ..7654 + # + # third byte: v2 3210.. + # +v3 ....76 + # + # fourth byte: v3 543210 + # + idx = 0 + while idx < chunks: + v1 = next_value() + v2 = next_value() + v3 = next_value() + yield v1>>2 + yield ((v1&0x03)<<4)|(v2>>4) + yield ((v2&0x0f)<<2)|(v3>>6) + yield v3 & 0x3f + idx += 1 + if tail: + v1 = next_value() + if tail == 1: + # note: 4 lsb of last byte are padding + yield v1>>2 + yield (v1&0x03)<<4 + else: + assert tail == 2 + # note: 2 lsb of last byte are padding + v2 = next_value() + yield v1>>2 + yield ((v1&0x03)<<4)|(v2>>4) + yield ((v2&0x0f)<<2) + + #=================================================================== + # decoding byte strings + #=================================================================== + + def decode_bytes(self, source): + """decode bytes from base64 string. + + :arg source: byte string to decode. + :returns: byte string containing decoded data. + """ + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + ##padding = self.padding + ##if padding: + ## # TODO: add padding size check? + ## source = source.rstrip(padding) + chunks, tail = divmod(len(source), 4) + if tail == 1: + # only 6 bits left, can't encode a whole byte! + raise ValueError("input string length cannot be == 1 mod 4") + next_value = getattr(imap(self._decode64, source), next_method_attr) + try: + return join_byte_values(self._decode_bytes(next_value, chunks, tail)) + except KeyError: + err = exc_err() + raise ValueError("invalid character: %r" % (err.args[0],)) + + def _decode_bytes_little(self, next_value, chunks, tail): + "helper used by decode_bytes() to handle little-endian encoding" + # + # input bit layout: + # + # first byte: v1 ..543210 + # +v2 10...... + # + # second byte: v2 ....5432 + # +v3 3210.... + # + # third byte: v3 ......54 + # +v4 543210.. + # + idx = 0 + while idx < chunks: + v1 = next_value() + v2 = next_value() + v3 = next_value() + v4 = next_value() + yield v1 | ((v2 & 0x3) << 6) + yield (v2>>2) | ((v3 & 0xF) << 4) + yield (v3>>4) | (v4<<2) + idx += 1 + if tail: + # tail is 2 or 3 + v1 = next_value() + v2 = next_value() + yield v1 | ((v2 & 0x3) << 6) + # NOTE: if tail == 2, 4 msb of v2 are ignored (should be 0) + if tail == 3: + # NOTE: 2 msb of v3 are ignored (should be 0) + v3 = next_value() + yield (v2>>2) | ((v3 & 0xF) << 4) + + def _decode_bytes_big(self, next_value, chunks, tail): + "helper used by decode_bytes() to handle big-endian encoding" + # + # input bit layout: + # + # first byte: v1 543210.. + # +v2 ......54 + # + # second byte: v2 3210.... + # +v3 ....5432 + # + # third byte: v3 10...... + # +v4 ..543210 + # + idx = 0 + while idx < chunks: + v1 = next_value() + v2 = next_value() + v3 = next_value() + v4 = next_value() + yield (v1<<2) | (v2>>4) + yield ((v2&0xF)<<4) | (v3>>2) + yield ((v3&0x3)<<6) | v4 + idx += 1 + if tail: + # tail is 2 or 3 + v1 = next_value() + v2 = next_value() + yield (v1<<2) | (v2>>4) + # NOTE: if tail == 2, 4 lsb of v2 are ignored (should be 0) + if tail == 3: + # NOTE: 2 lsb of v3 are ignored (should be 0) + v3 = next_value() + yield ((v2&0xF)<<4) | (v3>>2) + + #=================================================================== + # encode/decode helpers + #=================================================================== + + # padmap2/3 - dict mapping last char of string -> + # equivalent char with no padding bits set. + + def __make_padset(self, bits): + "helper to generate set of valid last chars & bytes" + pset = set(c for i,c in enumerate(self.bytemap) if not i & bits) + pset.update(c for i,c in enumerate(self.charmap) if not i & bits) + return frozenset(pset) + + @memoized_property + def _padinfo2(self): + "mask to clear padding bits, and valid last bytes (for strings 2 % 4)" + # 4 bits of last char unused (lsb for big, msb for little) + bits = 15 if self.big else (15<<2) + return ~bits, self.__make_padset(bits) + + @memoized_property + def _padinfo3(self): + "mask to clear padding bits, and valid last bytes (for strings 3 % 4)" + # 2 bits of last char unused (lsb for big, msb for little) + bits = 3 if self.big else (3<<4) + return ~bits, self.__make_padset(bits) + + def check_repair_unused(self, source): + """helper to detect & clear invalid unused bits in last character. + + :arg source: + encoded data (as ascii bytes or unicode). + + :returns: + `(True, result)` if the string was repaired, + `(False, source)` if the string was ok as-is. + """ + # figure out how many padding bits there are in last char. + tail = len(source) & 3 + if tail == 2: + mask, padset = self._padinfo2 + elif tail == 3: + mask, padset = self._padinfo3 + elif not tail: + return False, source + else: + raise ValueError("source length must != 1 mod 4") + + # check if last char is ok (padset contains bytes & unicode versions) + last = source[-1] + if last in padset: + return False, source + + # we have dirty bits - repair the string by decoding last char, + # clearing the padding bits via , and encoding new char. + if isinstance(source, unicode): + cm = self.charmap + last = cm[cm.index(last) & mask] + assert last in padset, "failed to generate valid padding char" + else: + # NOTE: this assumes ascii-compat encoding, and that + # all chars used by encoding are 7-bit ascii. + last = self._encode64(self._decode64(last) & mask) + assert last in padset, "failed to generate valid padding char" + if PY3: + last = bytes([last]) + return True, source[:-1] + last + + def repair_unused(self, source): + return self.check_repair_unused(source)[1] + + ##def transcode(self, source, other): + ## return ''.join( + ## other.charmap[self.charmap.index(char)] + ## for char in source + ## ) + + ##def random_encoded_bytes(self, size, random=None, unicode=False): + ## "return random encoded string of given size" + ## data = getrandstr(random or rng, + ## self.charmap if unicode else self.bytemap, size) + ## return self.repair_unused(data) + + #=================================================================== + # transposed encoding/decoding + #=================================================================== + def encode_transposed_bytes(self, source, offsets): + "encode byte string, first transposing source using offset list" + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + tmp = join_byte_elems(source[off] for off in offsets) + return self.encode_bytes(tmp) + + def decode_transposed_bytes(self, source, offsets): + "decode byte string, then reverse transposition described by offset list" + # NOTE: if transposition does not use all bytes of source, + # the original can't be recovered... and join_byte_elems() will throw + # an error because 1+ values in will be None. + tmp = self.decode_bytes(source) + buf = [None] * len(offsets) + for off, char in zip(offsets, tmp): + buf[off] = char + return join_byte_elems(buf) + + #=================================================================== + # integer decoding helpers - mainly used by des_crypt family + #=================================================================== + def _decode_int(self, source, bits): + """decode base64 string -> integer + + :arg source: base64 string to decode. + :arg bits: number of bits in resulting integer. + + :raises ValueError: + * if the string contains invalid base64 characters. + * if the string is not long enough - it must be at least + ``int(ceil(bits/6))`` in length. + + :returns: + a integer in the range ``0 <= n < 2**bits`` + """ + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + big = self.big + pad = -bits % 6 + chars = (bits+pad)/6 + if len(source) != chars: + raise ValueError("source must be %d chars" % (chars,)) + decode = self._decode64 + out = 0 + try: + for c in source if big else reversed(source): + out = (out<<6) + decode(c) + except KeyError: + raise ValueError("invalid character in string: %r" % (c,)) + if pad: + # strip padding bits + if big: + out >>= pad + else: + out &= (1< 6 bit integer" + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + if len(source) != 1: + raise ValueError("source must be exactly 1 byte") + if PY3: + # convert to 8bit int before doing lookup + source = source[0] + try: + return self._decode64(source) + except KeyError: + raise ValueError("invalid character") + + def decode_int12(self, source): + "decodes 2 char string -> 12-bit integer" + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + if len(source) != 2: + raise ValueError("source must be exactly 2 bytes") + decode = self._decode64 + try: + if self.big: + return decode(source[1]) + (decode(source[0])<<6) + else: + return decode(source[0]) + (decode(source[1])<<6) + except KeyError: + raise ValueError("invalid character") + + def decode_int24(self, source): + "decodes 4 char string -> 24-bit integer" + if not isinstance(source, bytes): + raise TypeError("source must be bytes, not %s" % (type(source),)) + if len(source) != 4: + raise ValueError("source must be exactly 4 bytes") + decode = self._decode64 + try: + if self.big: + return decode(source[3]) + (decode(source[2])<<6)+ \ + (decode(source[1])<<12) + (decode(source[0])<<18) + else: + return decode(source[0]) + (decode(source[1])<<6)+ \ + (decode(source[2])<<12) + (decode(source[3])<<18) + except KeyError: + raise ValueError("invalid character") + + def decode_int64(self, source): + """decode 11 char base64 string -> 64-bit integer + + this format is used primarily by des-crypt & variants to encode + the DES output value used as a checksum. + """ + return self._decode_int(source, 64) + + #=================================================================== + # integer encoding helpers - mainly used by des_crypt family + #=================================================================== + def _encode_int(self, value, bits): + """encode integer into base64 format + + :arg value: non-negative integer to encode + :arg bits: number of bits to encode + + :returns: + a string of length ``int(ceil(bits/6.0))``. + """ + assert value >= 0, "caller did not sanitize input" + pad = -bits % 6 + bits += pad + if self.big: + itr = irange(bits-6, -6, -6) + # shift to add lsb padding. + value <<= pad + else: + itr = irange(0, bits, 6) + # padding is msb, so no change needed. + return join_byte_elems(imap(self._encode64, + ((value>>off) & 0x3f for off in itr))) + + #--------------------------------------------------------------- + # optimized versions for common integer sizes + #--------------------------------------------------------------- + + def encode_int6(self, value): + "encodes 6-bit integer -> single hash64 character" + if value < 0 or value > 63: + raise ValueError("value out of range") + if PY3: + return self.bytemap[value:value+1] + else: + return self._encode64(value) + + def encode_int12(self, value): + "encodes 12-bit integer -> 2 char string" + if value < 0 or value > 0xFFF: + raise ValueError("value out of range") + raw = [value & 0x3f, (value>>6) & 0x3f] + if self.big: + raw = reversed(raw) + return join_byte_elems(imap(self._encode64, raw)) + + def encode_int24(self, value): + "encodes 24-bit integer -> 4 char string" + if value < 0 or value > 0xFFFFFF: + raise ValueError("value out of range") + raw = [value & 0x3f, (value>>6) & 0x3f, + (value>>12) & 0x3f, (value>>18) & 0x3f] + if self.big: + raw = reversed(raw) + return join_byte_elems(imap(self._encode64, raw)) + + def encode_int64(self, value): + """encode 64-bit integer -> 11 char hash64 string + + this format is used primarily by des-crypt & variants to encode + the DES output value used as a checksum. + """ + if value < 0 or value > 0xffffffffffffffff: + raise ValueError("value out of range") + return self._encode_int(value, 64) + + #=================================================================== + # eof + #=================================================================== + +class LazyBase64Engine(Base64Engine): + "Base64Engine which delays initialization until it's accessed" + _lazy_opts = None + + def __init__(self, *args, **kwds): + self._lazy_opts = (args, kwds) + + def _lazy_init(self): + args, kwds = self._lazy_opts + super(LazyBase64Engine, self).__init__(*args, **kwds) + del self._lazy_opts + self.__class__ = Base64Engine + + def __getattribute__(self, attr): + if not attr.startswith("_"): + self._lazy_init() + return object.__getattribute__(self, attr) + +# common charmaps +BASE64_CHARS = u("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/") +AB64_CHARS = u("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789./") +HASH64_CHARS = u("./0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz") +BCRYPT_CHARS = u("./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789") + +# common variants +h64 = LazyBase64Engine(HASH64_CHARS) +h64big = LazyBase64Engine(HASH64_CHARS, big=True) +bcrypt64 = LazyBase64Engine(BCRYPT_CHARS, big=True) + +#============================================================================= +# adapted-base64 encoding +#============================================================================= +_A64_ALTCHARS = b("./") +_A64_STRIP = b("=\n") +_A64_PAD1 = b("=") +_A64_PAD2 = b("==") + +def ab64_encode(data): + """encode using variant of base64 + + the output of this function is identical to stdlib's b64_encode, + except that it uses ``.`` instead of ``+``, + and omits trailing padding ``=`` and whitepsace. + + it is primarily used by Passlib's custom pbkdf2 hashes. + """ + return b64encode(data, _A64_ALTCHARS).strip(_A64_STRIP) + +def ab64_decode(data): + """decode using variant of base64 + + the input of this function is identical to stdlib's b64_decode, + except that it uses ``.`` instead of ``+``, + and should not include trailing padding ``=`` or whitespace. + + it is primarily used by Passlib's custom pbkdf2 hashes. + """ + off = len(data) & 3 + if off == 0: + return b64decode(data, _A64_ALTCHARS) + elif off == 2: + return b64decode(data + _A64_PAD2, _A64_ALTCHARS) + elif off == 3: + return b64decode(data + _A64_PAD1, _A64_ALTCHARS) + else: # off == 1 + raise ValueError("invalid base64 input") + +#============================================================================= +# host OS helpers +#============================================================================= + +try: + from crypt import crypt as _crypt +except ImportError: # pragma: no cover + has_crypt = False + def safe_crypt(secret, hash): + return None +else: + has_crypt = True + _NULL = '\x00' + + # some crypt() variants will return various constant strings when + # an invalid/unrecognized config string is passed in; instead of + # returning NULL / None. examples include ":", ":0", "*0", etc. + # safe_crypt() returns None for any string starting with one of the + # chars in this string... + _invalid_prefixes = u("*:!") + + if PY3: + def safe_crypt(secret, hash): + if isinstance(secret, bytes): + # Python 3's crypt() only accepts unicode, which is then + # encoding using utf-8 before passing to the C-level crypt(). + # so we have to decode the secret. + orig = secret + try: + secret = secret.decode("utf-8") + except UnicodeDecodeError: + return None + assert secret.encode("utf-8") == orig, \ + "utf-8 spec says this can't happen!" + if _NULL in secret: + raise ValueError("null character in secret") + if isinstance(hash, bytes): + hash = hash.decode("ascii") + result = _crypt(secret, hash) + if not result or result[0] in _invalid_prefixes: + return None + return result + else: + def safe_crypt(secret, hash): + if isinstance(secret, unicode): + secret = secret.encode("utf-8") + if _NULL in secret: + raise ValueError("null character in secret") + if isinstance(hash, unicode): + hash = hash.encode("ascii") + result = _crypt(secret, hash) + if not result: + return None + result = result.decode("ascii") + if result[0] in _invalid_prefixes: + return None + return result + +add_doc(safe_crypt, """Wrapper around stdlib's crypt. + + This is a wrapper around stdlib's :func:`!crypt.crypt`, which attempts + to provide uniform behavior across Python 2 and 3. + + :arg secret: + password, as bytes or unicode (unicode will be encoded as ``utf-8``). + + :arg hash: + hash or config string, as ascii bytes or unicode. + + :returns: + resulting hash as ascii unicode; or ``None`` if the password + couldn't be hashed due to one of the issues: + + * :func:`crypt()` not available on platform. + + * Under Python 3, if *secret* is specified as bytes, + it must be use ``utf-8`` or it can't be passed + to :func:`crypt()`. + + * Some OSes will return ``None`` if they don't recognize + the algorithm being used (though most will simply fall + back to des-crypt). + + * Some OSes will return an error string if the input config + is recognized but malformed; current code converts these to ``None`` + as well. + """) + +def test_crypt(secret, hash): + """check if :func:`crypt.crypt` supports specific hash + :arg secret: password to test + :arg hash: known hash of password to use as reference + :returns: True or False + """ + assert secret and hash + return safe_crypt(secret, hash) == hash + +# pick best timer function to expose as "tick" - lifted from timeit module. +if sys.platform == "win32": + # On Windows, the best timer is time.clock() + from time import clock as tick +else: + # On most other platforms the best timer is time.time() + from time import time as tick + +#============================================================================= +# randomness +#============================================================================= + +#------------------------------------------------------------------------ +# setup rng for generating salts +#------------------------------------------------------------------------ + +# NOTE: +# generating salts (e.g. h64_gensalt, below) doesn't require cryptographically +# strong randomness. it just requires enough range of possible outputs +# that making a rainbow table is too costly. so it should be ok to +# fall back on python's builtin mersenne twister prng, as long as it's seeded each time +# this module is imported, using a couple of minor entropy sources. + +try: + os.urandom(1) + has_urandom = True +except NotImplementedError: # pragma: no cover + has_urandom = False + +def genseed(value=None): + "generate prng seed value from system resources" + from hashlib import sha512 + text = u("%s %s %s %s %.15f %.15f %s") % ( + # if caller specified a seed value, mix it in + value, + + # if caller's seed value was an RNG, mix in bits from it's state + value.getrandbits(1<<15) if hasattr(value, "getrandbits") else None, + + # add current process id + # NOTE: not available in some environments, e.g. GAE + os.getpid() if hasattr(os, "getpid") else None, + + # id of a freshly created object. + # (at least 1 byte of which should be hard to predict) + id(object()), + + # the current time, to whatever precision os uses + time.time(), + time.clock(), + + # if urandom available, might as well mix some bytes in. + os.urandom(32).decode("latin-1") if has_urandom else 0, + ) + # hash it all up and return it as int/long + return int(sha512(text.encode("utf-8")).hexdigest(), 16) + +if has_urandom: + rng = random.SystemRandom() +else: # pragma: no cover -- runtime detection + # NOTE: to reseed use ``rng.seed(genseed(rng))`` + rng = random.Random(genseed()) + +#------------------------------------------------------------------------ +# some rng helpers +#------------------------------------------------------------------------ +def getrandbytes(rng, count): + """return byte-string containing *count* number of randomly generated bytes, using specified rng""" + # NOTE: would be nice if this was present in stdlib Random class + + ###just in case rng provides this... + ##meth = getattr(rng, "getrandbytes", None) + ##if meth: + ## return meth(count) + + if not count: + return _BEMPTY + def helper(): + # XXX: break into chunks for large number of bits? + value = rng.getrandbits(count<<3) + i = 0 + while i < count: + yield value & 0xff + value >>= 3 + i += 1 + return join_byte_values(helper()) + +def getrandstr(rng, charset, count): + """return string containing *count* number of chars/bytes, whose elements are drawn from specified charset, using specified rng""" + # NOTE: tests determined this is 4x faster than rng.sample(), + # which is why that's not being used here. + + # check alphabet & count + if count < 0: + raise ValueError("count must be >= 0") + letters = len(charset) + if letters == 0: + raise ValueError("alphabet must not be empty") + if letters == 1: + return charset * count + + # get random value, and write out to buffer + def helper(): + # XXX: break into chunks for large number of letters? + value = rng.randrange(0, letters**count) + i = 0 + while i < count: + yield charset[value % letters] + value //= letters + i += 1 + + if isinstance(charset, unicode): + return join_unicode(helper()) + else: + return join_byte_elems(helper()) + +_52charset = '2346789ABCDEFGHJKMNPQRTUVWXYZabcdefghjkmnpqrstuvwxyz' + +def generate_password(size=10, charset=_52charset): + """generate random password using given length & charset + + :param size: + size of password. + + :param charset: + optional string specified set of characters to draw from. + + the default charset contains all normal alphanumeric characters, + except for the characters ``1IiLl0OoS5``, which were omitted + due to their visual similarity. + + :returns: :class:`!str` containing randomly generated password. + + .. note:: + + Using the default character set, on a OS with :class:`!SystemRandom` support, + this function should generate passwords with 5.7 bits of entropy per character. + """ + return getrandstr(rng, charset, size) + +#============================================================================= +# object type / interface tests +#============================================================================= +_handler_attrs = ( + "name", + "setting_kwds", "context_kwds", + "genconfig", "genhash", + "verify", "encrypt", "identify", + ) + +def is_crypt_handler(obj): + "check if object follows the :ref:`password-hash-api`" + # XXX: change to use isinstance(obj, PasswordHash) under py26+? + return all(hasattr(obj, name) for name in _handler_attrs) + +_context_attrs = ( + "needs_update", + "genconfig", "genhash", + "verify", "encrypt", "identify", + ) + +def is_crypt_context(obj): + "check if object appears to be a :class:`~passlib.context.CryptContext` instance" + # XXX: change to use isinstance(obj, CryptContext)? + return all(hasattr(obj, name) for name in _context_attrs) + +##def has_many_backends(handler): +## "check if handler provides multiple baceknds" +## # NOTE: should also provide get_backend(), .has_backend(), and .backends attr +## return hasattr(handler, "set_backend") + +def has_rounds_info(handler): + "check if handler provides the optional :ref:`rounds information ` attributes" + return ('rounds' in handler.setting_kwds and + getattr(handler, "min_rounds", None) is not None) + +def has_salt_info(handler): + "check if handler provides the optional :ref:`salt information ` attributes" + return ('salt' in handler.setting_kwds and + getattr(handler, "min_salt_size", None) is not None) + +##def has_raw_salt(handler): +## "check if handler takes in encoded salt as unicode (False), or decoded salt as bytes (True)" +## sc = getattr(handler, "salt_chars", None) +## if sc is None: +## return None +## elif isinstance(sc, unicode): +## return False +## elif isinstance(sc, bytes): +## return True +## else: +## raise TypeError("handler.salt_chars must be None/unicode/bytes") + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/_blowfish/__init__.py b/passlib/utils/_blowfish/__init__.py new file mode 100644 index 00000000..16b85443 --- /dev/null +++ b/passlib/utils/_blowfish/__init__.py @@ -0,0 +1,172 @@ +"""passlib.utils._blowfish - pure-python eks-blowfish implementation for bcrypt + +This is a pure-python implementation of the EKS-Blowfish algorithm described by +Provos and Mazieres in `A Future-Adaptable Password Scheme +`_. + +This package contains two submodules: + +* ``_blowfish/base.py`` contains a class implementing the eks-blowfish algorithm + using easy-to-examine code. + +* ``_blowfish/unrolled.py`` contains a subclass which replaces some methods + of the original class with sped-up versions, mainly using unrolled loops + and local variables. this is the class which is actually used by + Passlib to perform BCrypt in pure python. + + This module is auto-generated by a script, ``_blowfish/_gen_files.py``. + +Status +------ +This implementation is usuable, but is an order of magnitude too slow to be +usuable with real security. For "ok" security, BCrypt hashes should have at +least 2**11 rounds (as of 2011). Assuming a desired response time <= 100ms, +this means a BCrypt implementation should get at least 20 rounds/ms in order +to be both usuable *and* secure. On a 2 ghz cpu, this implementation gets +roughly 0.09 rounds/ms under CPython (220x too slow), and 1.9 rounds/ms +under PyPy (10x too slow). + +History +------- +While subsequently modified considerly for Passlib, this code was originally +based on `jBcrypt 0.2 `_, which was +released under the BSD license:: + + Copyright (c) 2006 Damien Miller + + Permission to use, copy, modify, and distribute this software for any + purpose with or without fee is hereby granted, provided that the above + copyright notice and this permission notice appear in all copies. + + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +#============================================================================= +# imports +#============================================================================= +# core +from itertools import chain +import struct +# pkg +from passlib.utils import bcrypt64, getrandbytes, rng +from passlib.utils.compat import b, bytes, BytesIO, unicode, u +from passlib.utils._blowfish.unrolled import BlowfishEngine +# local +__all__ = [ + 'BlowfishEngine', + 'raw_bcrypt', +] + +#============================================================================= +# bcrypt constants +#============================================================================= + +# bcrypt constant data "OrpheanBeholderScryDoubt" as 6 integers +BCRYPT_CDATA = [ + 0x4f727068, 0x65616e42, 0x65686f6c, + 0x64657253, 0x63727944, 0x6f756274 +] + +# struct used to encode ciphertext as digest (last output byte discarded) +digest_struct = struct.Struct(">6I") + +#============================================================================= +# base bcrypt helper +# +# interface designed only for use by passlib.handlers.bcrypt:BCrypt +# probably not suitable for other purposes +#============================================================================= +BNULL = b('\x00') + +def raw_bcrypt(password, ident, salt, log_rounds): + """perform central password hashing step in bcrypt scheme. + + :param password: the password to hash + :param ident: identifier w/ minor version (e.g. 2, 2a) + :param salt: the binary salt to use (encoded in bcrypt-base64) + :param rounds: the log2 of the number of rounds (as int) + :returns: bcrypt-base64 encoded checksum + """ + #=================================================================== + # parse inputs + #=================================================================== + + # parse ident + assert isinstance(ident, unicode) + if ident == u('2'): + minor = 0 + elif ident == u('2a'): + minor = 1 + # XXX: how to indicate caller wants to use crypt_blowfish's + # workaround variant of 2a? + elif ident == u('2x'): + raise ValueError("crypt_blowfish's buggy '2x' hashes are not " + "currently supported") + elif ident == u('2y'): + # crypt_blowfish compatibility ident which guarantees compat w/ 2a + minor = 1 + else: + raise ValueError("unknown ident: %r" % (ident,)) + + # decode & validate salt + assert isinstance(salt, bytes) + salt = bcrypt64.decode_bytes(salt) + if len(salt) < 16: + raise ValueError("Missing salt bytes") + elif len(salt) > 16: + salt = salt[:16] + + # prepare password + assert isinstance(password, bytes) + if minor > 0: + password += BNULL + + # validate rounds + if log_rounds < 4 or log_rounds > 31: + raise ValueError("Bad number of rounds") + + #=================================================================== + # + # run EKS-Blowfish algorithm + # + # This uses the "enhanced key schedule" step described by + # Provos and Mazieres in "A Future-Adaptable Password Scheme" + # http://www.openbsd.org/papers/bcrypt-paper.ps + # + #=================================================================== + + engine = BlowfishEngine() + + # convert password & salt into list of 18 32-bit integers (72 bytes total). + pass_words = engine.key_to_words(password) + salt_words = engine.key_to_words(salt) + + # truncate salt_words to original 16 byte salt, or loop won't wrap + # correctly when passed to .eks_salted_expand() + salt_words16 = salt_words[:4] + + # do EKS key schedule setup + engine.eks_salted_expand(pass_words, salt_words16) + + # apply password & salt keys to key schedule a bunch more times. + rounds = 1<> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) +""".strip() + +def render_encipher(write, indent=0): + for i in irange(0, 15, 2): + write(indent, """\ + # Feistel substitution on left word (round %(i)d) + r ^= %(left)s ^ p%(i1)d + + # Feistel substitution on right word (round %(i1)d) + l ^= %(right)s ^ p%(i2)d + """, i=i, i1=i+1, i2=i+2, + left=BFSTR, right=BFSTR.replace("l","r"), + ) + +def write_encipher_function(write, indent=0): + write(indent, """\ + def encipher(self, l, r): + \"""blowfish encipher a single 64-bit block encoded as two 32-bit ints\""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + """) + render_encipher(write, indent+1) + + write(indent+1, """\ + + return r ^ p17, l + + """) + +def write_expand_function(write, indent=0): + write(indent, """\ + def expand(self, key_words): + \"""unrolled version of blowfish key expansion\""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + """) + for i in irange(18): + write(indent+1, """\ + p%(i)d = P[%(i)d] ^ key_words[%(i)d] + """, i=i) + write(indent+1, """\ + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + """) + + render_encipher(write, indent+1) + + write(indent+1, """\ + + p0, p1 = l, r = r ^ p17, l + + """) + + for i in irange(2, 18, 2): + write(indent+1, """\ + #------------------------------------------------ + # update P[%(i)d] and P[%(i1)d] + #------------------------------------------------ + l ^= p0 + + """, i=i, i1=i+1) + + render_encipher(write, indent+1) + + write(indent+1, """\ + p%(i)d, p%(i1)d = l, r = r ^ p17, l + + """, i=i, i1=i+1) + + write(indent+1, """\ + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + """) + + render_encipher(write, indent+3) + + write(indent+3, """\ + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + """) + +#============================================================================= +# main +#============================================================================= + +def main(): + target = os.path.join(os.path.dirname(__file__), "unrolled.py") + fh = file(target, "w") + + def write(indent, msg, **kwds): + literal = kwds.pop("literal", False) + if kwds: + msg %= kwds + if not literal: + msg = textwrap.dedent(msg.rstrip(" ")) + if indent: + msg = indent_block(msg, " " * (indent*4)) + fh.write(msg) + + write(0, """\ + \"""passlib.utils._blowfish.unrolled - unrolled loop implementation of bcrypt, + autogenerated by _gen_files.py + + currently this override the encipher() and expand() methods + with optimized versions, and leaves the other base.py methods alone. + \""" + #================================================================= + # imports + #================================================================= + # pkg + from passlib.utils._blowfish.base import BlowfishEngine as _BlowfishEngine + # local + __all__ = [ + "BlowfishEngine", + ] + #================================================================= + # + #================================================================= + class BlowfishEngine(_BlowfishEngine): + + """) + + write_encipher_function(write, indent=1) + write_expand_function(write, indent=1) + + write(0, """\ + #================================================================= + # eoc + #================================================================= + + #================================================================= + # eof + #================================================================= + """) + +if __name__ == "__main__": + main() + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/_blowfish/base.py b/passlib/utils/_blowfish/base.py new file mode 100644 index 00000000..f62aca24 --- /dev/null +++ b/passlib/utils/_blowfish/base.py @@ -0,0 +1,442 @@ +"""passlib.utils._blowfish.base - unoptimized pure-python blowfish engine""" +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib.utils.compat import bytes +from passlib.utils import repeat_string +# local +__all__ = [ + "BlowfishEngine", +] + +#============================================================================= +# blowfish constants +#============================================================================= +BLOWFISH_P = BLOWFISH_S = None + +def _init_constants(): + global BLOWFISH_P, BLOWFISH_S + + # NOTE: blowfish's spec states these numbers are the hex representation + # of the fractional portion of PI, in order. + + # Initial contents of key schedule - 18 integers + BLOWFISH_P = [ + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b, + ] + + # all 4 blowfish S boxes in one array - 256 integers per S box + BLOWFISH_S = [ + # sbox 1 + [ + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a, + ], + # sbox 2 + [ + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7, + ], + # sbox 3 + [ + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0, + ], + # sbox 4 + [ + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6, + ] + ] + +#============================================================================= +# engine +#============================================================================= +class BlowfishEngine(object): + + def __init__(self): + if BLOWFISH_P is None: + _init_constants() + self.P = list(BLOWFISH_P) + self.S = [ list(box) for box in BLOWFISH_S ] + + #=================================================================== + # common helpers + #=================================================================== + @staticmethod + def key_to_words(data, size=18): + """convert data to tuple of 4-byte integers, repeating or + truncating data as needed to reach specified size""" + assert isinstance(data, bytes) + dlen = len(data) + if not dlen: + # return all zeros - original C code would just read the NUL after + # the password, so mimicing that behavior for this edge case. + return [0]*size + + # repeat data until it fills up 4*size bytes + data = repeat_string(data, size<<2) + + # unpack + return struct.unpack(">%dI" % (size,), data) + + #=================================================================== + # blowfish routines + #=================================================================== + def encipher(self, l, r): + "loop version of blowfish encipher routine" + P, S = self.P, self.S + l ^= P[0] + i = 1 + while i < 17: + # Feistel substitution on left word + r = ((((S[0][l >> 24] + S[1][(l >> 16) & 0xff]) ^ S[2][(l >> 8) & 0xff]) + + S[3][l & 0xff]) & 0xffffffff) ^ P[i] ^ r + # swap vars so even rounds do Feistel substition on right word + l, r = r, l + i += 1 + return r ^ P[17], l + + # NOTE: decipher is same as above, just with reversed(P) instead. + + def expand(self, key_words): + "perform stock Blowfish keyschedule setup" + assert len(key_words) >= 18, "key_words must be at least as large as P" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + i = l = r = 0 + while i < 18: + P[i], P[i+1] = l,r = encipher(l,r) + i += 2 + + for box in S: + i = 0 + while i < 256: + box[i], box[i+1] = l,r = encipher(l,r) + i += 2 + + #=================================================================== + # eks-blowfish routines + #=================================================================== + def eks_salted_expand(self, key_words, salt_words): + "perform EKS' salted version of Blowfish keyschedule setup" + # NOTE: this is the same as expand(), except for the addition + # of the operations involving *salt_words*. + + assert len(key_words) >= 18, "key_words must be at least as large as P" + salt_size = len(salt_words) + assert salt_size, "salt_words must not be empty" + assert not salt_size & 1, "salt_words must have even length" + P, S, encipher = self.P, self.S, self.encipher + + i = 0 + while i < 18: + P[i] ^= key_words[i] + i += 1 + + s = i = l = r = 0 + while i < 18: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + P[i], P[i+1] = l,r = encipher(l,r) # next() + i += 2 + + for box in S: + i = 0 + while i < 256: + l ^= salt_words[s] + r ^= salt_words[s+1] + s += 2 + if s == salt_size: + s = 0 + box[i], box[i+1] = l,r = encipher(l,r) # next() + i += 2 + + def eks_repeated_expand(self, key_words, salt_words, rounds): + "perform rounds stage of EKS keyschedule setup" + expand = self.expand + n = 0 + while n < rounds: + expand(key_words) + expand(salt_words) + n += 1 + + def repeat_encipher(self, l, r, count): + "repeatedly apply encipher operation to a block" + encipher = self.encipher + n = 0 + while n < count: + l, r = encipher(l, r) + n += 1 + return l, r + + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/_blowfish/unrolled.py b/passlib/utils/_blowfish/unrolled.py new file mode 100644 index 00000000..7ce0bcc5 --- /dev/null +++ b/passlib/utils/_blowfish/unrolled.py @@ -0,0 +1,771 @@ +"""passlib.utils._blowfish.unrolled - unrolled loop implementation of bcrypt, +autogenerated by _gen_files.py + +currently this override the encipher() and expand() methods +with optimized versions, and leaves the other base.py methods alone. +""" +#============================================================================= +# imports +#============================================================================= +# pkg +from passlib.utils._blowfish.base import BlowfishEngine as _BlowfishEngine +# local +__all__ = [ + "BlowfishEngine", +] +#============================================================================= +# +#============================================================================= +class BlowfishEngine(_BlowfishEngine): + + def encipher(self, l, r): + """blowfish encipher a single 64-bit block encoded as two 32-bit ints""" + + (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) = self.P + S0, S1, S2, S3 = self.S + + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + return r ^ p17, l + + def expand(self, key_words): + """unrolled version of blowfish key expansion""" + ##assert len(key_words) >= 18, "size of key_words must be >= 18" + + P, S = self.P, self.S + S0, S1, S2, S3 = S + + #============================================================= + # integrate key + #============================================================= + p0 = P[0] ^ key_words[0] + p1 = P[1] ^ key_words[1] + p2 = P[2] ^ key_words[2] + p3 = P[3] ^ key_words[3] + p4 = P[4] ^ key_words[4] + p5 = P[5] ^ key_words[5] + p6 = P[6] ^ key_words[6] + p7 = P[7] ^ key_words[7] + p8 = P[8] ^ key_words[8] + p9 = P[9] ^ key_words[9] + p10 = P[10] ^ key_words[10] + p11 = P[11] ^ key_words[11] + p12 = P[12] ^ key_words[12] + p13 = P[13] ^ key_words[13] + p14 = P[14] ^ key_words[14] + p15 = P[15] ^ key_words[15] + p16 = P[16] ^ key_words[16] + p17 = P[17] ^ key_words[17] + + #============================================================= + # update P + #============================================================= + + #------------------------------------------------ + # update P[0] and P[1] + #------------------------------------------------ + l, r = p0, 0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + p0, p1 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[2] and P[3] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p2, p3 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[4] and P[5] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p4, p5 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[6] and P[7] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p6, p7 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[8] and P[9] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p8, p9 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[10] and P[11] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p10, p11 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[12] and P[13] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p12, p13 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[14] and P[15] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p14, p15 = l, r = r ^ p17, l + + #------------------------------------------------ + # update P[16] and P[17] + #------------------------------------------------ + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + p16, p17 = l, r = r ^ p17, l + + + #------------------------------------------------ + # save changes to original P array + #------------------------------------------------ + P[:] = (p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, + p10, p11, p12, p13, p14, p15, p16, p17) + + #============================================================= + # update S + #============================================================= + + for box in S: + j = 0 + while j < 256: + l ^= p0 + + # Feistel substitution on left word (round 0) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p1 + + # Feistel substitution on right word (round 1) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p2 + # Feistel substitution on left word (round 2) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p3 + + # Feistel substitution on right word (round 3) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p4 + # Feistel substitution on left word (round 4) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p5 + + # Feistel substitution on right word (round 5) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p6 + # Feistel substitution on left word (round 6) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p7 + + # Feistel substitution on right word (round 7) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p8 + # Feistel substitution on left word (round 8) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p9 + + # Feistel substitution on right word (round 9) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p10 + # Feistel substitution on left word (round 10) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p11 + + # Feistel substitution on right word (round 11) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p12 + # Feistel substitution on left word (round 12) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p13 + + # Feistel substitution on right word (round 13) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p14 + # Feistel substitution on left word (round 14) + r ^= ((((S0[l >> 24] + S1[(l >> 16) & 0xff]) ^ S2[(l >> 8) & 0xff]) + + S3[l & 0xff]) & 0xffffffff) ^ p15 + + # Feistel substitution on right word (round 15) + l ^= ((((S0[r >> 24] + S1[(r >> 16) & 0xff]) ^ S2[(r >> 8) & 0xff]) + + S3[r & 0xff]) & 0xffffffff) ^ p16 + + box[j], box[j+1] = l, r = r ^ p17, l + j += 2 + #=================================================================== + # eoc + #=================================================================== + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/compat.py b/passlib/utils/compat.py new file mode 100644 index 00000000..a7bb626c --- /dev/null +++ b/passlib/utils/compat.py @@ -0,0 +1,436 @@ +"""passlib.utils.compat - python 2/3 compatibility helpers""" +#============================================================================= +# figure out what we're running +#============================================================================= + +#------------------------------------------------------------------------ +# python version +#------------------------------------------------------------------------ +import sys +PY2 = sys.version_info < (3,0) +PY3 = sys.version_info >= (3,0) +PY_MAX_25 = sys.version_info < (2,6) # py 2.5 or earlier +PY27 = sys.version_info[:2] == (2,7) # supports last 2.x release +PY_MIN_32 = sys.version_info >= (3,2) # py 3.2 or later + +#------------------------------------------------------------------------ +# python implementation +#------------------------------------------------------------------------ +PYPY = hasattr(sys, "pypy_version_info") +JYTHON = sys.platform.startswith('java') + +#------------------------------------------------------------------------ +# capabilities +#------------------------------------------------------------------------ + +# __dir__() added in py2.6 +SUPPORTS_DIR_METHOD = not PY_MAX_25 and not (PYPY and sys.pypy_version_info < (1,6)) + +#============================================================================= +# common imports +#============================================================================= +import logging; log = logging.getLogger(__name__) +if PY3: + import builtins +else: + import __builtin__ as builtins + +def add_doc(obj, doc): + """add docstring to an object""" + obj.__doc__ = doc + +#============================================================================= +# the default exported vars +#============================================================================= +__all__ = [ + # python versions + 'PY2', 'PY3', 'PY_MAX_25', 'PY27', 'PY_MIN_32', + + # io + 'BytesIO', 'StringIO', 'NativeStringIO', 'SafeConfigParser', + 'print_', + + # type detection +## 'is_mapping', + 'callable', + 'int_types', + 'num_types', + 'base_string_types', + + # unicode/bytes types & helpers + 'u', 'b', + 'unicode', 'bytes', + 'uascii_to_str', 'bascii_to_str', + 'str_to_uascii', 'str_to_bascii', + 'join_unicode', 'join_bytes', + 'join_byte_values', 'join_byte_elems', + 'byte_elem_value', + 'iter_byte_values', + + # iteration helpers + 'irange', #'lrange', + 'imap', 'lmap', + 'iteritems', 'itervalues', + 'next', + + # introspection + 'exc_err', 'get_method_function', 'add_doc', +] + +# begin accumulating mapping of lazy-loaded attrs, +# 'merged' into module at bottom +_lazy_attrs = dict() + +#============================================================================= +# unicode & bytes types +#============================================================================= +if PY3: + unicode = str + bytes = builtins.bytes + + def u(s): + assert isinstance(s, str) + return s + + def b(s): + assert isinstance(s, str) + return s.encode("latin-1") + + base_string_types = (unicode, bytes) + +else: + unicode = builtins.unicode + bytes = str if PY_MAX_25 else builtins.bytes + + def u(s): + assert isinstance(s, str) + return s.decode("unicode_escape") + + def b(s): + assert isinstance(s, str) + return s + + base_string_types = basestring + +#============================================================================= +# unicode & bytes helpers +#============================================================================= +# function to join list of unicode strings +join_unicode = u('').join + +# function to join list of byte strings +join_bytes = b('').join + +if PY3: + def uascii_to_str(s): + assert isinstance(s, unicode) + return s + + def bascii_to_str(s): + assert isinstance(s, bytes) + return s.decode("ascii") + + def str_to_uascii(s): + assert isinstance(s, str) + return s + + def str_to_bascii(s): + assert isinstance(s, str) + return s.encode("ascii") + + join_byte_values = join_byte_elems = bytes + + def byte_elem_value(elem): + assert isinstance(elem, int) + return elem + + def iter_byte_values(s): + assert isinstance(s, bytes) + return s + + def iter_byte_chars(s): + assert isinstance(s, bytes) + # FIXME: there has to be a better way to do this + return (bytes([c]) for c in s) + +else: + def uascii_to_str(s): + assert isinstance(s, unicode) + return s.encode("ascii") + + def bascii_to_str(s): + assert isinstance(s, bytes) + return s + + def str_to_uascii(s): + assert isinstance(s, str) + return s.decode("ascii") + + def str_to_bascii(s): + assert isinstance(s, str) + return s + + def join_byte_values(values): + return join_bytes(chr(v) for v in values) + + join_byte_elems = join_bytes + + byte_elem_value = ord + + def iter_byte_values(s): + assert isinstance(s, bytes) + return (ord(c) for c in s) + + def iter_byte_chars(s): + assert isinstance(s, bytes) + return s + +add_doc(uascii_to_str, "helper to convert ascii unicode -> native str") +add_doc(bascii_to_str, "helper to convert ascii bytes -> native str") +add_doc(str_to_uascii, "helper to convert ascii native str -> unicode") +add_doc(str_to_bascii, "helper to convert ascii native str -> bytes") + +# join_byte_values -- function to convert list of ordinal integers to byte string. + +# join_byte_elems -- function to convert list of byte elements to byte string; +# i.e. what's returned by ``b('a')[0]``... +# this is b('a') under PY2, but 97 under PY3. + +# byte_elem_value -- function to convert byte element to integer -- a noop under PY3 + +add_doc(iter_byte_values, "iterate over byte string as sequence of ints 0-255") +add_doc(iter_byte_chars, "iterate over byte string as sequence of 1-byte strings") + +#============================================================================= +# numeric +#============================================================================= +if PY3: + int_types = (int,) + num_types = (int, float) +else: + int_types = (int, long) + num_types = (int, long, float) + +#============================================================================= +# iteration helpers +# +# irange - range iterable / view (xrange under py2, range under py3) +# lrange - range list (range under py2, list(range()) under py3) +# +# imap - map to iterator +# lmap - map to list +#============================================================================= +if PY3: + irange = range + ##def lrange(*a,**k): + ## return list(range(*a,**k)) + + def lmap(*a, **k): + return list(map(*a,**k)) + imap = map + + def iteritems(d): + return d.items() + def itervalues(d): + return d.values() + + next_method_attr = "__next__" + +else: + irange = xrange + ##lrange = range + + lmap = map + from itertools import imap + + def iteritems(d): + return d.iteritems() + def itervalues(d): + return d.itervalues() + + next_method_attr = "next" + +if PY_MAX_25: + _undef = object() + def next(itr, default=_undef): + "compat wrapper for next()" + if default is _undef: + return itr.next() + try: + return itr.next() + except StopIteration: + return default +else: + next = builtins.next + +#============================================================================= +# typing +#============================================================================= +##def is_mapping(obj): +## # non-exhaustive check, enough to distinguish from lists, etc +## return hasattr(obj, "items") + +if (3,0) <= sys.version_info < (3,2): + # callable isn't dead, it's just resting + from collections import Callable + def callable(obj): + return isinstance(obj, Callable) +else: + callable = builtins.callable + +#============================================================================= +# introspection +#============================================================================= +def exc_err(): + "return current error object (to avoid try/except syntax change)" + return sys.exc_info()[1] + +if PY3: + method_function_attr = "__func__" +else: + method_function_attr = "im_func" + +def get_method_function(func): + "given (potential) method, return underlying function" + return getattr(func, method_function_attr, func) + +#============================================================================= +# input/output +#============================================================================= +if PY3: + _lazy_attrs = dict( + BytesIO="io.BytesIO", + UnicodeIO="io.StringIO", + NativeStringIO="io.StringIO", + SafeConfigParser="configparser.SafeConfigParser", + ) + if sys.version_info >= (3,2): + # py32 renamed this, removing old ConfigParser + _lazy_attrs["SafeConfigParser"] = "configparser.ConfigParser" + + print_ = getattr(builtins, "print") + +else: + _lazy_attrs = dict( + BytesIO="cStringIO.StringIO", + UnicodeIO="StringIO.StringIO", + NativeStringIO="cStringIO.StringIO", + SafeConfigParser="ConfigParser.SafeConfigParser", + ) + + def print_(*args, **kwds): + """The new-style print function.""" + # extract kwd args + fp = kwds.pop("file", sys.stdout) + sep = kwds.pop("sep", None) + end = kwds.pop("end", None) + if kwds: + raise TypeError("invalid keyword arguments") + + # short-circuit if no target + if fp is None: + return + + # use unicode or bytes ? + want_unicode = isinstance(sep, unicode) or isinstance(end, unicode) or \ + any(isinstance(arg, unicode) for arg in args) + + # pick default end sequence + if end is None: + end = u("\n") if want_unicode else "\n" + elif not isinstance(end, base_string_types): + raise TypeError("end must be None or a string") + + # pick default separator + if sep is None: + sep = u(" ") if want_unicode else " " + elif not isinstance(sep, base_string_types): + raise TypeError("sep must be None or a string") + + # write to buffer + first = True + write = fp.write + for arg in args: + if first: + first = False + else: + write(sep) + if not isinstance(arg, basestring): + arg = str(arg) + write(arg) + write(end) + +#============================================================================= +# lazy overlay module +#============================================================================= +from types import ModuleType + +def _import_object(source): + "helper to import object from module; accept format `path.to.object`" + modname, modattr = source.rsplit(".",1) + mod = __import__(modname, fromlist=[modattr], level=0) + return getattr(mod, modattr) + +class _LazyOverlayModule(ModuleType): + """proxy module which overlays original module, + and lazily imports specified attributes. + + this is mainly used to prevent importing of resources + that are only needed by certain password hashes, + yet allow them to be imported from a single location. + + used by :mod:`passlib.utils`, :mod:`passlib.utils.crypto`, + and :mod:`passlib.utils.compat`. + """ + + @classmethod + def replace_module(cls, name, attrmap): + orig = sys.modules[name] + self = cls(name, attrmap, orig) + sys.modules[name] = self + return self + + def __init__(self, name, attrmap, proxy=None): + ModuleType.__init__(self, name) + self.__attrmap = attrmap + self.__proxy = proxy + self.__log = logging.getLogger(name) + + def __getattr__(self, attr): + proxy = self.__proxy + if proxy and hasattr(proxy, attr): + return getattr(proxy, attr) + attrmap = self.__attrmap + if attr in attrmap: + source = attrmap[attr] + if callable(source): + value = source() + else: + value = _import_object(source) + setattr(self, attr, value) + self.__log.debug("loaded lazy attr %r: %r", attr, value) + return value + raise AttributeError("'module' object has no attribute '%s'" % (attr,)) + + def __repr__(self): + proxy = self.__proxy + if proxy: + return repr(proxy) + else: + return ModuleType.__repr__(self) + + def __dir__(self): + attrs = set(dir(self.__class__)) + attrs.update(self.__dict__) + attrs.update(self.__attrmap) + proxy = self.__proxy + if proxy is not None: + attrs.update(dir(proxy)) + return list(attrs) + +# replace this module with overlay that will lazily import attributes. +_LazyOverlayModule.replace_module(__name__, _lazy_attrs) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/des.py b/passlib/utils/des.py new file mode 100644 index 00000000..def894d3 --- /dev/null +++ b/passlib/utils/des.py @@ -0,0 +1,859 @@ +"""passlib.utils.des -- DES block encryption routines + +History +======= +These routines (which have since been drastically modified for python) +are based on a Java implementation of the des-crypt algorithm, +found at ``_. + +The copyright & license for that source is as follows:: + + UnixCrypt.java 0.9 96/11/25 + Copyright (c) 1996 Aki Yoshida. All rights reserved. + Permission to use, copy, modify and distribute this software + for non-commercial or commercial purposes and without fee is + hereby granted provided that this copyright notice appears in + all copies. + + --- + + Unix crypt(3C) utility + @version 0.9, 11/25/96 + @author Aki Yoshida + + --- + + modified April 2001 + by Iris Van den Broeke, Daniel Deville + + --- + Unix Crypt. + Implements the one way cryptography used by Unix systems for + simple password protection. + @version $Id: UnixCrypt2.txt,v 1.1.1.1 2005/09/13 22:20:13 christos Exp $ + @author Greg Wilkins (gregw) + +The netbsd des-crypt implementation has some nice notes on how this all works - + http://fxr.googlebit.com/source/lib/libcrypt/crypt.c?v=NETBSD-CURRENT +""" + +# TODO: could use an accelerated C version of this module to speed up lmhash, +# des-crypt, and ext-des-crypt + +#============================================================================= +# imports +#============================================================================= +# core +import struct +# pkg +from passlib import exc +from passlib.utils.compat import bytes, join_byte_values, byte_elem_value, \ + b, irange, irange, int_types +from passlib.utils import deprecated_function +# local +__all__ = [ + "expand_des_key", + "des_encrypt_block", + "mdes_encrypt_int_block", +] + +#============================================================================= +# constants +#============================================================================= + +# masks/upper limits for various integer sizes +INT_24_MASK = 0xffffff +INT_56_MASK = 0xffffffffffffff +INT_64_MASK = 0xffffffffffffffff + +# mask to clear parity bits from 64-bit key +_KDATA_MASK = 0xfefefefefefefefe +_KPARITY_MASK = 0x0101010101010101 + +# mask used to setup key schedule +_KS_MASK = 0xfcfcfcfcffffffff + +#============================================================================= +# static DES tables +#============================================================================= + +# placeholders filled in by _load_tables() +PCXROT = IE3264 = SPE = CF6464 = None + +def _load_tables(): + "delay loading tables until they are actually needed" + global PCXROT, IE3264, SPE, CF6464 + + #--------------------------------------------------------------- + # Initial key schedule permutation + # PC1ROT - bit reverse, then PC1, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC1ROT=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000002000, 0x0000000000002000, + 0x0000000000000020, 0x0000000000000020, 0x0000000000002020, 0x0000000000002020, + 0x0000000000000400, 0x0000000000000400, 0x0000000000002400, 0x0000000000002400, + 0x0000000000000420, 0x0000000000000420, 0x0000000000002420, 0x0000000000002420, ), + ( 0x0000000000000000, 0x2000000000000000, 0x0000000400000000, 0x2000000400000000, + 0x0000800000000000, 0x2000800000000000, 0x0000800400000000, 0x2000800400000000, + 0x0008000000000000, 0x2008000000000000, 0x0008000400000000, 0x2008000400000000, + 0x0008800000000000, 0x2008800000000000, 0x0008800400000000, 0x2008800400000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000040, 0x0000000000000040, + 0x0000000020000000, 0x0000000020000000, 0x0000000020000040, 0x0000000020000040, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200040, 0x0000000000200040, + 0x0000000020200000, 0x0000000020200000, 0x0000000020200040, 0x0000000020200040, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0800000000000000, 0x0802000000000000, + 0x0100000000000000, 0x0102000000000000, 0x0900000000000000, 0x0902000000000000, + 0x4000000000000000, 0x4002000000000000, 0x4800000000000000, 0x4802000000000000, + 0x4100000000000000, 0x4102000000000000, 0x4900000000000000, 0x4902000000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000040000, 0x0000000000040000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000040000, 0x0000020000040000, + 0x0000000000000004, 0x0000000000000004, 0x0000000000040004, 0x0000000000040004, + 0x0000020000000004, 0x0000020000000004, 0x0000020000040004, 0x0000020000040004, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0200000000000000, 0x0200400000000000, + 0x0080000000000000, 0x0080400000000000, 0x0280000000000000, 0x0280400000000000, + 0x0000008000000000, 0x0000408000000000, 0x0200008000000000, 0x0200408000000000, + 0x0080008000000000, 0x0080408000000000, 0x0280008000000000, 0x0280408000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000010000000, 0x0000000010000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000010001000, 0x0000000010001000, + 0x0000000040000000, 0x0000000040000000, 0x0000000050000000, 0x0000000050000000, + 0x0000000040001000, 0x0000000040001000, 0x0000000050001000, 0x0000000050001000, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000080000000000, 0x0000081000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000080000000000, 0x1000081000000000, + 0x0004000000000000, 0x0004001000000000, 0x0004080000000000, 0x0004081000000000, + 0x1004000000000000, 0x1004001000000000, 0x1004080000000000, 0x1004081000000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000080, 0x0000000000000080, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080080, 0x0000000000080080, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800080, 0x0000000000800080, + 0x0000000000880000, 0x0000000000880000, 0x0000000000880080, 0x0000000000880080, ), + ( 0x0000000000000000, 0x0000000008000000, 0x0000002000000000, 0x0000002008000000, + 0x0000100000000000, 0x0000100008000000, 0x0000102000000000, 0x0000102008000000, + 0x0000200000000000, 0x0000200008000000, 0x0000202000000000, 0x0000202008000000, + 0x0000300000000000, 0x0000300008000000, 0x0000302000000000, 0x0000302008000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000400000, 0x0000000000400000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004400000, 0x0000000004400000, + 0x0000000000000800, 0x0000000000000800, 0x0000000000400800, 0x0000000000400800, + 0x0000000004000800, 0x0000000004000800, 0x0000000004400800, 0x0000000004400800, ), + ( 0x0000000000000000, 0x0000000000008000, 0x0040000000000000, 0x0040000000008000, + 0x0000004000000000, 0x0000004000008000, 0x0040004000000000, 0x0040004000008000, + 0x8000000000000000, 0x8000000000008000, 0x8040000000000000, 0x8040000000008000, + 0x8000004000000000, 0x8000004000008000, 0x8040004000000000, 0x8040004000008000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000004000, 0x0000000000004000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000004008, 0x0000000000004008, + 0x0000000000000010, 0x0000000000000010, 0x0000000000004010, 0x0000000000004010, + 0x0000000000000018, 0x0000000000000018, 0x0000000000004018, 0x0000000000004018, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0001000000000000, 0x0001000200000000, + 0x0400000000000000, 0x0400000200000000, 0x0401000000000000, 0x0401000200000000, + 0x0020000000000000, 0x0020000200000000, 0x0021000000000000, 0x0021000200000000, + 0x0420000000000000, 0x0420000200000000, 0x0421000000000000, 0x0421000200000000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000010000000000, 0x0000010000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000010100000000, 0x0000010100000000, + 0x0000000000100000, 0x0000000000100000, 0x0000010000100000, 0x0000010000100000, + 0x0000000100100000, 0x0000000100100000, 0x0000010100100000, 0x0000010100100000, ), + ( 0x0000000000000000, 0x0000000080000000, 0x0000040000000000, 0x0000040080000000, + 0x0010000000000000, 0x0010000080000000, 0x0010040000000000, 0x0010040080000000, + 0x0000000800000000, 0x0000000880000000, 0x0000040800000000, 0x0000040880000000, + 0x0010000800000000, 0x0010000880000000, 0x0010040800000000, 0x0010040880000000, ), + ) + #--------------------------------------------------------------- + # Subsequent key schedule rotation permutations + # PC2ROT - PC2 inverse, then Rotate, then PC2 + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTA=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, 0x0000000000200000, + 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, 0x0000000004000000, + 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, 0x0000000004200000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000010000000000, 0x0000010000000800, + 0x0000000000002000, 0x0000000000002800, 0x0000010000002000, 0x0000010000002800, + 0x0000000010000000, 0x0000000010000800, 0x0000010010000000, 0x0000010010000800, + 0x0000000010002000, 0x0000000010002800, 0x0000010010002000, 0x0000010010002800, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, 0x0000000100000000, + 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, 0x0000000000800000, + 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, 0x0000000100800000, ), + ( 0x0000000000000000, 0x0000020000000000, 0x0000000080000000, 0x0000020080000000, + 0x0000000000400000, 0x0000020000400000, 0x0000000080400000, 0x0000020080400000, + 0x0000000008000000, 0x0000020008000000, 0x0000000088000000, 0x0000020088000000, + 0x0000000008400000, 0x0000020008400000, 0x0000000088400000, 0x0000020088400000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, 0x0000000000000040, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, 0x0000000000001040, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000000400, 0x0000000000000410, + 0x0000000000000080, 0x0000000000000090, 0x0000000000000480, 0x0000000000000490, + 0x0000000040000000, 0x0000000040000010, 0x0000000040000400, 0x0000000040000410, + 0x0000000040000080, 0x0000000040000090, 0x0000000040000480, 0x0000000040000490, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, 0x0000000000100000, + 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, 0x0000000000180000, ), + ( 0x0000000000000000, 0x0000000000040000, 0x0000000000000020, 0x0000000000040020, + 0x0000000000000004, 0x0000000000040004, 0x0000000000000024, 0x0000000000040024, + 0x0000000200000000, 0x0000000200040000, 0x0000000200000020, 0x0000000200040020, + 0x0000000200000004, 0x0000000200040004, 0x0000000200000024, 0x0000000200040024, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000008000, 0x0000000000008008, + 0x0010000000000000, 0x0010000000000008, 0x0010000000008000, 0x0010000000008008, + 0x0020000000000000, 0x0020000000000008, 0x0020000000008000, 0x0020000000008008, + 0x0030000000000000, 0x0030000000000008, 0x0030000000008000, 0x0030000000008008, ), + ( 0x0000000000000000, 0x0000400000000000, 0x0000080000000000, 0x0000480000000000, + 0x0000100000000000, 0x0000500000000000, 0x0000180000000000, 0x0000580000000000, + 0x4000000000000000, 0x4000400000000000, 0x4000080000000000, 0x4000480000000000, + 0x4000100000000000, 0x4000500000000000, 0x4000180000000000, 0x4000580000000000, ), + ( 0x0000000000000000, 0x0000000000004000, 0x0000000020000000, 0x0000000020004000, + 0x0001000000000000, 0x0001000000004000, 0x0001000020000000, 0x0001000020004000, + 0x0200000000000000, 0x0200000000004000, 0x0200000020000000, 0x0200000020004000, + 0x0201000000000000, 0x0201000000004000, 0x0201000020000000, 0x0201000020004000, ), + ( 0x0000000000000000, 0x1000000000000000, 0x0004000000000000, 0x1004000000000000, + 0x0002000000000000, 0x1002000000000000, 0x0006000000000000, 0x1006000000000000, + 0x0000000800000000, 0x1000000800000000, 0x0004000800000000, 0x1004000800000000, + 0x0002000800000000, 0x1002000800000000, 0x0006000800000000, 0x1006000800000000, ), + ( 0x0000000000000000, 0x0040000000000000, 0x2000000000000000, 0x2040000000000000, + 0x0000008000000000, 0x0040008000000000, 0x2000008000000000, 0x2040008000000000, + 0x0000001000000000, 0x0040001000000000, 0x2000001000000000, 0x2040001000000000, + 0x0000009000000000, 0x0040009000000000, 0x2000009000000000, 0x2040009000000000, ), + ( 0x0000000000000000, 0x0400000000000000, 0x8000000000000000, 0x8400000000000000, + 0x0000002000000000, 0x0400002000000000, 0x8000002000000000, 0x8400002000000000, + 0x0100000000000000, 0x0500000000000000, 0x8100000000000000, 0x8500000000000000, + 0x0100002000000000, 0x0500002000000000, 0x8100002000000000, 0x8500002000000000, ), + ( 0x0000000000000000, 0x0000800000000000, 0x0800000000000000, 0x0800800000000000, + 0x0000004000000000, 0x0000804000000000, 0x0800004000000000, 0x0800804000000000, + 0x0000000400000000, 0x0000800400000000, 0x0800000400000000, 0x0800800400000000, + 0x0000004400000000, 0x0000804400000000, 0x0800004400000000, 0x0800804400000000, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000040000000000, 0x0080040000000000, + 0x0008000000000000, 0x0088000000000000, 0x0008040000000000, 0x0088040000000000, + 0x0000200000000000, 0x0080200000000000, 0x0000240000000000, 0x0080240000000000, + 0x0008200000000000, 0x0088200000000000, 0x0008240000000000, 0x0088240000000000, ), + ) + + # NOTE: this was reordered from original table to make perm3264 logic simpler + PC2ROTB=( + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, 0x0000000000000400, + 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, 0x0000000000080000, + 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, 0x0000000000080400, ), + ( 0x0000000000000000, 0x0000000000800000, 0x0000000000004000, 0x0000000000804000, + 0x0000000080000000, 0x0000000080800000, 0x0000000080004000, 0x0000000080804000, + 0x0000000000040000, 0x0000000000840000, 0x0000000000044000, 0x0000000000844000, + 0x0000000080040000, 0x0000000080840000, 0x0000000080044000, 0x0000000080844000, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, 0x0000000000000008, + 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, 0x0000000040000000, + 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, 0x0000000040000008, ), + ( 0x0000000000000000, 0x0000000020000000, 0x0000000200000000, 0x0000000220000000, + 0x0000000000000080, 0x0000000020000080, 0x0000000200000080, 0x0000000220000080, + 0x0000000000100000, 0x0000000020100000, 0x0000000200100000, 0x0000000220100000, + 0x0000000000100080, 0x0000000020100080, 0x0000000200100080, 0x0000000220100080, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, 0x0000000000002000, + 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, 0x0000020000000000, + 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, 0x0000020000002000, ), + ( 0x0000000000000000, 0x0000000000000800, 0x0000000100000000, 0x0000000100000800, + 0x0000000010000000, 0x0000000010000800, 0x0000000110000000, 0x0000000110000800, + 0x0000000000000004, 0x0000000000000804, 0x0000000100000004, 0x0000000100000804, + 0x0000000010000004, 0x0000000010000804, 0x0000000110000004, 0x0000000110000804, ), + ( 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, 0x0000000000001000, + 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, 0x0000000000000010, + 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, 0x0000000000001010, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000010000000000, 0x0000010000000040, + 0x0000000000200000, 0x0000000000200040, 0x0000010000200000, 0x0000010000200040, + 0x0000000000008000, 0x0000000000008040, 0x0000010000008000, 0x0000010000008040, + 0x0000000000208000, 0x0000000000208040, 0x0000010000208000, 0x0000010000208040, ), + ( 0x0000000000000000, 0x0000000004000000, 0x0000000008000000, 0x000000000c000000, + 0x0400000000000000, 0x0400000004000000, 0x0400000008000000, 0x040000000c000000, + 0x8000000000000000, 0x8000000004000000, 0x8000000008000000, 0x800000000c000000, + 0x8400000000000000, 0x8400000004000000, 0x8400000008000000, 0x840000000c000000, ), + ( 0x0000000000000000, 0x0002000000000000, 0x0200000000000000, 0x0202000000000000, + 0x1000000000000000, 0x1002000000000000, 0x1200000000000000, 0x1202000000000000, + 0x0008000000000000, 0x000a000000000000, 0x0208000000000000, 0x020a000000000000, + 0x1008000000000000, 0x100a000000000000, 0x1208000000000000, 0x120a000000000000, ), + ( 0x0000000000000000, 0x0000000000400000, 0x0000000000000020, 0x0000000000400020, + 0x0040000000000000, 0x0040000000400000, 0x0040000000000020, 0x0040000000400020, + 0x0800000000000000, 0x0800000000400000, 0x0800000000000020, 0x0800000000400020, + 0x0840000000000000, 0x0840000000400000, 0x0840000000000020, 0x0840000000400020, ), + ( 0x0000000000000000, 0x0080000000000000, 0x0000008000000000, 0x0080008000000000, + 0x2000000000000000, 0x2080000000000000, 0x2000008000000000, 0x2080008000000000, + 0x0020000000000000, 0x00a0000000000000, 0x0020008000000000, 0x00a0008000000000, + 0x2020000000000000, 0x20a0000000000000, 0x2020008000000000, 0x20a0008000000000, ), + ( 0x0000000000000000, 0x0000002000000000, 0x0000040000000000, 0x0000042000000000, + 0x4000000000000000, 0x4000002000000000, 0x4000040000000000, 0x4000042000000000, + 0x0000400000000000, 0x0000402000000000, 0x0000440000000000, 0x0000442000000000, + 0x4000400000000000, 0x4000402000000000, 0x4000440000000000, 0x4000442000000000, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000200000000000, 0x0000204000000000, + 0x0000080000000000, 0x0000084000000000, 0x0000280000000000, 0x0000284000000000, + 0x0000800000000000, 0x0000804000000000, 0x0000a00000000000, 0x0000a04000000000, + 0x0000880000000000, 0x0000884000000000, 0x0000a80000000000, 0x0000a84000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000000400000000, 0x0000000c00000000, + 0x0000100000000000, 0x0000100800000000, 0x0000100400000000, 0x0000100c00000000, + 0x0010000000000000, 0x0010000800000000, 0x0010000400000000, 0x0010000c00000000, + 0x0010100000000000, 0x0010100800000000, 0x0010100400000000, 0x0010100c00000000, ), + ( 0x0000000000000000, 0x0100000000000000, 0x0001000000000000, 0x0101000000000000, + 0x0000001000000000, 0x0100001000000000, 0x0001001000000000, 0x0101001000000000, + 0x0004000000000000, 0x0104000000000000, 0x0005000000000000, 0x0105000000000000, + 0x0004001000000000, 0x0104001000000000, 0x0005001000000000, 0x0105001000000000, ), + ) + #--------------------------------------------------------------- + # PCXROT - PC1ROT, PC2ROTA, PC2ROTB listed in order + # of the PC1 rotation schedule, as used by des_setkey + #--------------------------------------------------------------- + ##ROTATES = (1, 1, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 1) + ##PCXROT = ( + ## PC1ROT, PC2ROTA, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTA, PC2ROTB, PC2ROTB, PC2ROTB, + ## PC2ROTB, PC2ROTB, PC2ROTB, PC2ROTA, + ## ) + + # NOTE: modified PCXROT to contain entrys broken into pairs, + # to help generate them in format best used by encoder. + PCXROT = ( + (PC1ROT, PC2ROTA), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTA, PC2ROTB), (PC2ROTB, PC2ROTB), + (PC2ROTB, PC2ROTB), (PC2ROTB, PC2ROTA), + ) + + #--------------------------------------------------------------- + # Bit reverse, intial permupation, expantion + # Initial permutation/expansion table + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm3264 logic simpler + IE3264=( + ( 0x0000000000000000, 0x0000000000800800, 0x0000000000008008, 0x0000000000808808, + 0x0000008008000000, 0x0000008008800800, 0x0000008008008008, 0x0000008008808808, + 0x0000000080080000, 0x0000000080880800, 0x0000000080088008, 0x0000000080888808, + 0x0000008088080000, 0x0000008088880800, 0x0000008088088008, 0x0000008088888808, ), + ( 0x0000000000000000, 0x0080080000000000, 0x0000800800000000, 0x0080880800000000, + 0x0800000000000080, 0x0880080000000080, 0x0800800800000080, 0x0880880800000080, + 0x8008000000000000, 0x8088080000000000, 0x8008800800000000, 0x8088880800000000, + 0x8808000000000080, 0x8888080000000080, 0x8808800800000080, 0x8888880800000080, ), + ( 0x0000000000000000, 0x0000000000001000, 0x0000000000000010, 0x0000000000001010, + 0x0000000010000000, 0x0000000010001000, 0x0000000010000010, 0x0000000010001010, + 0x0000000000100000, 0x0000000000101000, 0x0000000000100010, 0x0000000000101010, + 0x0000000010100000, 0x0000000010101000, 0x0000000010100010, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000100000000000, 0x0000001000000000, 0x0000101000000000, + 0x1000000000000000, 0x1000100000000000, 0x1000001000000000, 0x1000101000000000, + 0x0010000000000000, 0x0010100000000000, 0x0010001000000000, 0x0010101000000000, + 0x1010000000000000, 0x1010100000000000, 0x1010001000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000000002000, 0x0000000000000020, 0x0000000000002020, + 0x0000000020000000, 0x0000000020002000, 0x0000000020000020, 0x0000000020002020, + 0x0000000000200000, 0x0000000000202000, 0x0000000000200020, 0x0000000000202020, + 0x0000000020200000, 0x0000000020202000, 0x0000000020200020, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000200000000000, 0x0000002000000000, 0x0000202000000000, + 0x2000000000000000, 0x2000200000000000, 0x2000002000000000, 0x2000202000000000, + 0x0020000000000000, 0x0020200000000000, 0x0020002000000000, 0x0020202000000000, + 0x2020000000000000, 0x2020200000000000, 0x2020002000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000000004004, 0x0400000000000040, 0x0400000000004044, + 0x0000000040040000, 0x0000000040044004, 0x0400000040040040, 0x0400000040044044, + 0x0000000000400400, 0x0000000000404404, 0x0400000000400440, 0x0400000000404444, + 0x0000000040440400, 0x0000000040444404, 0x0400000040440440, 0x0400000040444444, ), + ( 0x0000000000000000, 0x0000400400000000, 0x0000004004000000, 0x0000404404000000, + 0x4004000000000000, 0x4004400400000000, 0x4004004004000000, 0x4004404404000000, + 0x0040040000000000, 0x0040440400000000, 0x0040044004000000, 0x0040444404000000, + 0x4044040000000000, 0x4044440400000000, 0x4044044004000000, 0x4044444404000000, ), + ) + + #--------------------------------------------------------------- + # Table that combines the S, P, and E operations. + #--------------------------------------------------------------- + SPE=( + ( 0x0080088008200000, 0x0000008008000000, 0x0000000000200020, 0x0080088008200020, + 0x0000000000200000, 0x0080088008000020, 0x0000008008000020, 0x0000000000200020, + 0x0080088008000020, 0x0080088008200000, 0x0000008008200000, 0x0080080000000020, + 0x0080080000200020, 0x0000000000200000, 0x0000000000000000, 0x0000008008000020, + 0x0000008008000000, 0x0000000000000020, 0x0080080000200000, 0x0080088008000000, + 0x0080088008200020, 0x0000008008200000, 0x0080080000000020, 0x0080080000200000, + 0x0000000000000020, 0x0080080000000000, 0x0080088008000000, 0x0000008008200020, + 0x0080080000000000, 0x0080080000200020, 0x0000008008200020, 0x0000000000000000, + 0x0000000000000000, 0x0080088008200020, 0x0080080000200000, 0x0000008008000020, + 0x0080088008200000, 0x0000008008000000, 0x0080080000000020, 0x0080080000200000, + 0x0000008008200020, 0x0080080000000000, 0x0080088008000000, 0x0000000000200020, + 0x0080088008000020, 0x0000000000000020, 0x0000000000200020, 0x0000008008200000, + 0x0080088008200020, 0x0080088008000000, 0x0000008008200000, 0x0080080000200020, + 0x0000000000200000, 0x0080080000000020, 0x0000008008000020, 0x0000000000000000, + 0x0000008008000000, 0x0000000000200000, 0x0080080000200020, 0x0080088008200000, + 0x0000000000000020, 0x0000008008200020, 0x0080080000000000, 0x0080088008000020, ), + ( 0x1000800810004004, 0x0000000000000000, 0x0000800810000000, 0x0000000010004004, + 0x1000000000004004, 0x1000800800000000, 0x0000800800004004, 0x0000800810000000, + 0x0000800800000000, 0x1000000010004004, 0x1000000000000000, 0x0000800800004004, + 0x1000000010000000, 0x0000800810004004, 0x0000000010004004, 0x1000000000000000, + 0x0000000010000000, 0x1000800800004004, 0x1000000010004004, 0x0000800800000000, + 0x1000800810000000, 0x0000000000004004, 0x0000000000000000, 0x1000000010000000, + 0x1000800800004004, 0x1000800810000000, 0x0000800810004004, 0x1000000000004004, + 0x0000000000004004, 0x0000000010000000, 0x1000800800000000, 0x1000800810004004, + 0x1000000010000000, 0x0000800810004004, 0x0000800800004004, 0x1000800810000000, + 0x1000800810004004, 0x1000000010000000, 0x1000000000004004, 0x0000000000000000, + 0x0000000000004004, 0x1000800800000000, 0x0000000010000000, 0x1000000010004004, + 0x0000800800000000, 0x0000000000004004, 0x1000800810000000, 0x1000800800004004, + 0x0000800810004004, 0x0000800800000000, 0x0000000000000000, 0x1000000000004004, + 0x1000000000000000, 0x1000800810004004, 0x0000800810000000, 0x0000000010004004, + 0x1000000010004004, 0x0000000010000000, 0x1000800800000000, 0x0000800800004004, + 0x1000800800004004, 0x1000000000000000, 0x0000000010004004, 0x0000800810000000, ), + ( 0x0000000000400410, 0x0010004004400400, 0x0010000000000000, 0x0010000000400410, + 0x0000004004000010, 0x0000000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000004004000000, 0x0000004004400400, 0x0000000000000010, + 0x0010004004400410, 0x0010000000000010, 0x0000000000000010, 0x0000004004400410, + 0x0000000000000000, 0x0000004004000010, 0x0010004004400400, 0x0010000000000000, + 0x0010000000000010, 0x0010004004400410, 0x0000004004000000, 0x0000000000400410, + 0x0000004004400410, 0x0010000000400400, 0x0010004004000010, 0x0000004004400400, + 0x0010004004000000, 0x0000000000000000, 0x0000000000400400, 0x0010004004000010, + 0x0010004004400400, 0x0010000000000000, 0x0000000000000010, 0x0000004004000000, + 0x0010000000000010, 0x0000004004000010, 0x0000004004400400, 0x0010000000400410, + 0x0000000000000000, 0x0010004004400400, 0x0010004004000000, 0x0000004004400410, + 0x0000004004000010, 0x0000000000400400, 0x0010004004400410, 0x0000000000000010, + 0x0010004004000010, 0x0000000000400410, 0x0000000000400400, 0x0010004004400410, + 0x0000004004000000, 0x0010000000400400, 0x0010000000400410, 0x0010004004000000, + 0x0010000000400400, 0x0000000000000000, 0x0000004004400410, 0x0010000000000010, + 0x0000000000400410, 0x0010004004000010, 0x0010000000000000, 0x0000004004400400, ), + ( 0x0800100040040080, 0x0000100000001000, 0x0800000000000080, 0x0800100040041080, + 0x0000000000000000, 0x0000000040041000, 0x0800100000001080, 0x0800000040040080, + 0x0000100040041000, 0x0800000000001080, 0x0000000000001000, 0x0800100000000080, + 0x0800000000001080, 0x0800100040040080, 0x0000000040040000, 0x0000000000001000, + 0x0800000040041080, 0x0000100040040000, 0x0000100000000000, 0x0800000000000080, + 0x0000100040040000, 0x0800100000001080, 0x0000000040041000, 0x0000100000000000, + 0x0800100000000080, 0x0000000000000000, 0x0800000040040080, 0x0000100040041000, + 0x0000100000001000, 0x0800000040041080, 0x0800100040041080, 0x0000000040040000, + 0x0800000040041080, 0x0800100000000080, 0x0000000040040000, 0x0800000000001080, + 0x0000100040040000, 0x0000100000001000, 0x0800000000000080, 0x0000000040041000, + 0x0800100000001080, 0x0000000000000000, 0x0000100000000000, 0x0800000040040080, + 0x0000000000000000, 0x0800000040041080, 0x0000100040041000, 0x0000100000000000, + 0x0000000000001000, 0x0800100040041080, 0x0800100040040080, 0x0000000040040000, + 0x0800100040041080, 0x0800000000000080, 0x0000100000001000, 0x0800100040040080, + 0x0800000040040080, 0x0000100040040000, 0x0000000040041000, 0x0800100000001080, + 0x0800100000000080, 0x0000000000001000, 0x0800000000001080, 0x0000100040041000, ), + ( 0x0000000000800800, 0x0000001000000000, 0x0040040000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040040000800800, 0x2040041000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040041000000000, + 0x2040040000800800, 0x2000001000800800, 0x0040041000800800, 0x0000000000000000, + 0x0040041000000000, 0x0000000000800800, 0x2000001000000000, 0x2040040000000000, + 0x0040040000800800, 0x2040041000000000, 0x0000000000000000, 0x2000000000800800, + 0x2000000000000000, 0x2040040000800800, 0x2040041000800800, 0x2000001000000000, + 0x0000001000800800, 0x0040040000000000, 0x2040040000000000, 0x0040041000800800, + 0x0040041000800800, 0x2040040000800800, 0x2000001000000000, 0x0000001000800800, + 0x0000001000000000, 0x2000000000000000, 0x2000000000800800, 0x0040040000800800, + 0x0000000000800800, 0x0040041000000000, 0x2040041000800800, 0x0000000000000000, + 0x2040041000000000, 0x0000000000800800, 0x0040040000000000, 0x2000001000000000, + 0x2040040000800800, 0x0040040000000000, 0x0000000000000000, 0x2040041000800800, + 0x2000001000800800, 0x0040041000800800, 0x2040040000000000, 0x0000001000000000, + 0x0040041000000000, 0x2000001000800800, 0x0040040000800800, 0x2040040000000000, + 0x2000000000000000, 0x2040041000000000, 0x0000001000800800, 0x2000000000800800, ), + ( 0x4004000000008008, 0x4004000020000000, 0x0000000000000000, 0x0000200020008008, + 0x4004000020000000, 0x0000200000000000, 0x4004200000008008, 0x0000000020000000, + 0x4004200000000000, 0x4004200020008008, 0x0000200020000000, 0x0000000000008008, + 0x0000200000008008, 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, + 0x0000000020000000, 0x4004200000008008, 0x4004000020008008, 0x0000000000000000, + 0x0000200000000000, 0x4004000000000000, 0x0000200020008008, 0x4004000020008008, + 0x4004200020008008, 0x0000000020008008, 0x0000000000008008, 0x4004200000000000, + 0x4004000000000000, 0x0000200020000000, 0x4004200020000000, 0x0000200000008008, + 0x4004200000000000, 0x0000000000008008, 0x0000200000008008, 0x4004200020000000, + 0x0000200020008008, 0x4004000020000000, 0x0000000000000000, 0x0000200000008008, + 0x0000000000008008, 0x0000200000000000, 0x4004000020008008, 0x0000000020000000, + 0x4004000020000000, 0x4004200020008008, 0x0000200020000000, 0x4004000000000000, + 0x4004200020008008, 0x0000200020000000, 0x0000000020000000, 0x4004200000008008, + 0x4004000000008008, 0x0000000020008008, 0x4004200020000000, 0x0000000000000000, + 0x0000200000000000, 0x4004000000008008, 0x4004200000008008, 0x0000200020008008, + 0x0000000020008008, 0x4004200000000000, 0x4004000000000000, 0x4004000020008008, ), + ( 0x0000400400000000, 0x0020000000000000, 0x0020000000100000, 0x0400000000100040, + 0x0420400400100040, 0x0400400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0000000000100000, 0x0420000000100040, 0x0420000000000040, 0x0000400400100000, + 0x0400000000000040, 0x0020400400100000, 0x0000400400100000, 0x0420000000000040, + 0x0420000000100040, 0x0000400400000000, 0x0400400400000040, 0x0420400400100040, + 0x0000000000000000, 0x0020000000100000, 0x0400000000100040, 0x0020400400000000, + 0x0400400400100040, 0x0420400400000040, 0x0020400400100000, 0x0400000000000040, + 0x0420400400000040, 0x0400400400100040, 0x0020000000000000, 0x0000000000100000, + 0x0420400400000040, 0x0000400400100000, 0x0400400400100040, 0x0420000000000040, + 0x0000400400000000, 0x0020000000000000, 0x0000000000100000, 0x0400400400100040, + 0x0420000000100040, 0x0420400400000040, 0x0020400400000000, 0x0000000000000000, + 0x0020000000000000, 0x0400000000100040, 0x0400000000000040, 0x0020000000100000, + 0x0000000000000000, 0x0420000000100040, 0x0020000000100000, 0x0020400400000000, + 0x0420000000000040, 0x0000400400000000, 0x0420400400100040, 0x0000000000100000, + 0x0020400400100000, 0x0400000000000040, 0x0400400400000040, 0x0420400400100040, + 0x0400000000100040, 0x0020400400100000, 0x0000400400100000, 0x0400400400000040, ), + ( 0x8008000080082000, 0x0000002080082000, 0x8008002000000000, 0x0000000000000000, + 0x0000002000002000, 0x8008000080080000, 0x0000000080082000, 0x8008002080082000, + 0x8008000000000000, 0x0000000000002000, 0x0000002080080000, 0x8008002000000000, + 0x8008002080080000, 0x8008002000002000, 0x8008000000002000, 0x0000000080082000, + 0x0000002000000000, 0x8008002080080000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x8008000000002000, 0x0000000000000000, 0x0000002080080000, + 0x0000000000002000, 0x0000000080080000, 0x8008002000002000, 0x8008000080082000, + 0x0000000080080000, 0x0000002000000000, 0x0000002080082000, 0x8008000000000000, + 0x0000000080080000, 0x0000002000000000, 0x8008000000002000, 0x8008002080082000, + 0x8008002000000000, 0x0000000000002000, 0x0000000000000000, 0x0000002080080000, + 0x8008000080082000, 0x8008002000002000, 0x0000002000002000, 0x8008000080080000, + 0x0000002080082000, 0x8008000000000000, 0x8008000080080000, 0x0000002000002000, + 0x8008002080082000, 0x0000000080080000, 0x0000000080082000, 0x8008000000002000, + 0x0000002080080000, 0x8008002000000000, 0x8008002000002000, 0x0000000080082000, + 0x8008000000000000, 0x0000002080082000, 0x8008002080080000, 0x0000000000000000, + 0x0000000000002000, 0x8008000080082000, 0x0000002000000000, 0x8008002080080000, ), + ) + + #--------------------------------------------------------------- + # compressed/interleaved => final permutation table + # Compression, final permutation, bit reverse + #--------------------------------------------------------------- + # NOTE: this was reordered from original table to make perm6464 logic simpler + CF6464=( + ( 0x0000000000000000, 0x0000002000000000, 0x0000200000000000, 0x0000202000000000, + 0x0020000000000000, 0x0020002000000000, 0x0020200000000000, 0x0020202000000000, + 0x2000000000000000, 0x2000002000000000, 0x2000200000000000, 0x2000202000000000, + 0x2020000000000000, 0x2020002000000000, 0x2020200000000000, 0x2020202000000000, ), + ( 0x0000000000000000, 0x0000000200000000, 0x0000020000000000, 0x0000020200000000, + 0x0002000000000000, 0x0002000200000000, 0x0002020000000000, 0x0002020200000000, + 0x0200000000000000, 0x0200000200000000, 0x0200020000000000, 0x0200020200000000, + 0x0202000000000000, 0x0202000200000000, 0x0202020000000000, 0x0202020200000000, ), + ( 0x0000000000000000, 0x0000000000000020, 0x0000000000002000, 0x0000000000002020, + 0x0000000000200000, 0x0000000000200020, 0x0000000000202000, 0x0000000000202020, + 0x0000000020000000, 0x0000000020000020, 0x0000000020002000, 0x0000000020002020, + 0x0000000020200000, 0x0000000020200020, 0x0000000020202000, 0x0000000020202020, ), + ( 0x0000000000000000, 0x0000000000000002, 0x0000000000000200, 0x0000000000000202, + 0x0000000000020000, 0x0000000000020002, 0x0000000000020200, 0x0000000000020202, + 0x0000000002000000, 0x0000000002000002, 0x0000000002000200, 0x0000000002000202, + 0x0000000002020000, 0x0000000002020002, 0x0000000002020200, 0x0000000002020202, ), + ( 0x0000000000000000, 0x0000008000000000, 0x0000800000000000, 0x0000808000000000, + 0x0080000000000000, 0x0080008000000000, 0x0080800000000000, 0x0080808000000000, + 0x8000000000000000, 0x8000008000000000, 0x8000800000000000, 0x8000808000000000, + 0x8080000000000000, 0x8080008000000000, 0x8080800000000000, 0x8080808000000000, ), + ( 0x0000000000000000, 0x0000000800000000, 0x0000080000000000, 0x0000080800000000, + 0x0008000000000000, 0x0008000800000000, 0x0008080000000000, 0x0008080800000000, + 0x0800000000000000, 0x0800000800000000, 0x0800080000000000, 0x0800080800000000, + 0x0808000000000000, 0x0808000800000000, 0x0808080000000000, 0x0808080800000000, ), + ( 0x0000000000000000, 0x0000000000000080, 0x0000000000008000, 0x0000000000008080, + 0x0000000000800000, 0x0000000000800080, 0x0000000000808000, 0x0000000000808080, + 0x0000000080000000, 0x0000000080000080, 0x0000000080008000, 0x0000000080008080, + 0x0000000080800000, 0x0000000080800080, 0x0000000080808000, 0x0000000080808080, ), + ( 0x0000000000000000, 0x0000000000000008, 0x0000000000000800, 0x0000000000000808, + 0x0000000000080000, 0x0000000000080008, 0x0000000000080800, 0x0000000000080808, + 0x0000000008000000, 0x0000000008000008, 0x0000000008000800, 0x0000000008000808, + 0x0000000008080000, 0x0000000008080008, 0x0000000008080800, 0x0000000008080808, ), + ( 0x0000000000000000, 0x0000001000000000, 0x0000100000000000, 0x0000101000000000, + 0x0010000000000000, 0x0010001000000000, 0x0010100000000000, 0x0010101000000000, + 0x1000000000000000, 0x1000001000000000, 0x1000100000000000, 0x1000101000000000, + 0x1010000000000000, 0x1010001000000000, 0x1010100000000000, 0x1010101000000000, ), + ( 0x0000000000000000, 0x0000000100000000, 0x0000010000000000, 0x0000010100000000, + 0x0001000000000000, 0x0001000100000000, 0x0001010000000000, 0x0001010100000000, + 0x0100000000000000, 0x0100000100000000, 0x0100010000000000, 0x0100010100000000, + 0x0101000000000000, 0x0101000100000000, 0x0101010000000000, 0x0101010100000000, ), + ( 0x0000000000000000, 0x0000000000000010, 0x0000000000001000, 0x0000000000001010, + 0x0000000000100000, 0x0000000000100010, 0x0000000000101000, 0x0000000000101010, + 0x0000000010000000, 0x0000000010000010, 0x0000000010001000, 0x0000000010001010, + 0x0000000010100000, 0x0000000010100010, 0x0000000010101000, 0x0000000010101010, ), + ( 0x0000000000000000, 0x0000000000000001, 0x0000000000000100, 0x0000000000000101, + 0x0000000000010000, 0x0000000000010001, 0x0000000000010100, 0x0000000000010101, + 0x0000000001000000, 0x0000000001000001, 0x0000000001000100, 0x0000000001000101, + 0x0000000001010000, 0x0000000001010001, 0x0000000001010100, 0x0000000001010101, ), + ( 0x0000000000000000, 0x0000004000000000, 0x0000400000000000, 0x0000404000000000, + 0x0040000000000000, 0x0040004000000000, 0x0040400000000000, 0x0040404000000000, + 0x4000000000000000, 0x4000004000000000, 0x4000400000000000, 0x4000404000000000, + 0x4040000000000000, 0x4040004000000000, 0x4040400000000000, 0x4040404000000000, ), + ( 0x0000000000000000, 0x0000000400000000, 0x0000040000000000, 0x0000040400000000, + 0x0004000000000000, 0x0004000400000000, 0x0004040000000000, 0x0004040400000000, + 0x0400000000000000, 0x0400000400000000, 0x0400040000000000, 0x0400040400000000, + 0x0404000000000000, 0x0404000400000000, 0x0404040000000000, 0x0404040400000000, ), + ( 0x0000000000000000, 0x0000000000000040, 0x0000000000004000, 0x0000000000004040, + 0x0000000000400000, 0x0000000000400040, 0x0000000000404000, 0x0000000000404040, + 0x0000000040000000, 0x0000000040000040, 0x0000000040004000, 0x0000000040004040, + 0x0000000040400000, 0x0000000040400040, 0x0000000040404000, 0x0000000040404040, ), + ( 0x0000000000000000, 0x0000000000000004, 0x0000000000000400, 0x0000000000000404, + 0x0000000000040000, 0x0000000000040004, 0x0000000000040400, 0x0000000000040404, + 0x0000000004000000, 0x0000000004000004, 0x0000000004000400, 0x0000000004000404, + 0x0000000004040000, 0x0000000004040004, 0x0000000004040400, 0x0000000004040404, ), + ) + #=================================================================== + # eof _load_tables() + #=================================================================== + +#============================================================================= +# support +#============================================================================= + +def _permute(c, p): + """Returns the permutation of the given 32-bit or 64-bit code with + the specified permutation table.""" + # NOTE: only difference between 32 & 64 bit permutations + # is that len(p)==8 for 32 bit, and len(p)==16 for 64 bit. + out = 0 + for r in p: + out |= r[c&0xf] + c >>= 4 + return out + +#============================================================================= +# packing & unpacking +#============================================================================= +_uint64_struct = struct.Struct(">Q") + +_BNULL = b('\x00') + +def _pack64(value): + return _uint64_struct.pack(value) + +def _unpack64(value): + return _uint64_struct.unpack(value)[0] + +def _pack56(value): + return _uint64_struct.pack(value)[1:] + +def _unpack56(value): + return _uint64_struct.unpack(_BNULL+value)[0] + +#============================================================================= +# 56->64 key manipulation +#============================================================================= + +##def expand_7bit(value): +## "expand 7-bit integer => 7-bits + 1 odd-parity bit" +## # parity calc adapted from 32-bit even parity alg found at +## # http://graphics.stanford.edu/~seander/bithacks.html#ParityParallel +## assert 0 <= value < 0x80, "value out of range" +## return (value<<1) | (0x9669 >> ((value ^ (value >> 4)) & 0xf)) & 1 + +_EXPAND_ITER = irange(49,-7,-7) + +def expand_des_key(key): + "convert DES from 7 bytes to 8 bytes (by inserting empty parity bits)" + if isinstance(key, bytes): + if len(key) != 7: + raise ValueError("key must be 7 bytes in size") + elif isinstance(key, int_types): + if key < 0 or key > INT_56_MASK: + raise ValueError("key must be 56-bit non-negative integer") + return _unpack64(expand_des_key(_pack56(key))) + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key = _unpack56(key) + # NOTE: the following would insert correctly-valued parity bits in each key, + # but the parity bit would just be ignored in des_encrypt_block(), + # so not bothering to use it. + ##return join_byte_values(expand_7bit((key >> shift) & 0x7f) + ## for shift in _EXPAND_ITER) + return join_byte_values(((key>>shift) & 0x7f)<<1 for shift in _EXPAND_ITER) + +def shrink_des_key(key): + "convert DES key from 8 bytes to 7 bytes (by discarding the parity bits)" + if isinstance(key, bytes): + if len(key) != 8: + raise ValueError("key must be 8 bytes in size") + return _pack56(shrink_des_key(_unpack64(key))) + elif isinstance(key, int_types): + if key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + else: + raise exc.ExpectedTypeError(key, "bytes or int", "key") + key >>= 1 + result = 0 + offset = 0 + while offset < 56: + result |= (key & 0x7f)<>= 8 + offset += 7 + assert not (result & ~INT_64_MASK) + return result + +#============================================================================= +# des encryption +#============================================================================= +def des_encrypt_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 8-byte strings. + + :arg key: + DES key as 7 byte string, or 8 byte string with parity bits + (parity bit values are ignored). + + :arg input: + plaintext block to encrypt, as 8 byte string. + + :arg salt: + Optional 24-bit integer used to mutate the base DES algorithm in a + manner specific to :class:`~passlib.hash.des_crypt` and it's variants. + The default value ``0`` provides the normal (unsalted) DES behavior. + The salt functions as follows: + if the ``i``'th bit of ``salt`` is set, + bits ``i`` and ``i+24`` are swapped in the DES E-box output. + + :arg rounds: + Optional number of rounds of to apply the DES key schedule. + the default (``rounds=1``) provides the normal DES behavior, + but :class:`~passlib.hash.des_crypt` and it's variants use + alternate rounds values. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting 8-byte ciphertext block. + """ + # validate & unpack key + if isinstance(key, bytes): + if len(key) == 7: + key = expand_des_key(key) + elif len(key) != 8: + raise ValueError("key must be 7 or 8 bytes") + key = _unpack64(key) + else: + raise exc.ExpectedTypeError(key, "bytes", "key") + + # validate & unpack input + if isinstance(input, bytes): + if len(input) != 8: + raise ValueError("input block must be 8 bytes") + input = _unpack64(input) + else: + raise exc.ExpectedTypeError(input, "bytes", "input") + + # hand things off to other func + result = des_encrypt_int_block(key, input, salt, rounds) + + # repack result + return _pack64(result) + +def des_encrypt_int_block(key, input, salt=0, rounds=1): + """encrypt single block of data using DES, operates on 64-bit integers. + + this function is essentially the same as :func:`des_encrypt_block`, + except that it operates on integers, and will NOT automatically + expand 56-bit keys if provided (since there's no way to detect them). + + :arg key: + DES key as 64-bit integer (the parity bits are ignored). + + :arg input: + input block as 64-bit integer + + :arg salt: + optional 24-bit integer used to mutate the base DES algorithm. + defaults to ``0`` (no mutation applied). + + :arg rounds: + optional number of rounds of to apply the DES key schedule. + defaults to ``1``. + + :raises TypeError: if any of the provided args are of the wrong type. + :raises ValueError: + if any of the input blocks are the wrong size, + or the salt/rounds values are out of range. + + :returns: + resulting ciphertext as 64-bit integer. + """ + #--------------------------------------------------------------- + # input validation + #--------------------------------------------------------------- + + # validate salt, rounds + if rounds < 1: + raise ValueError("rounds must be positive integer") + if salt < 0 or salt > INT_24_MASK: + raise ValueError("salt must be 24-bit non-negative integer") + + # validate & unpack key + if not isinstance(key, int_types): + raise exc.ExpectedTypeError(key, "int", "key") + elif key < 0 or key > INT_64_MASK: + raise ValueError("key must be 64-bit non-negative integer") + + # validate & unpack input + if not isinstance(input, int_types): + raise exc.ExpectedTypeError(input, "int", "input") + elif input < 0 or input > INT_64_MASK: + raise ValueError("input must be 64-bit non-negative integer") + + #--------------------------------------------------------------- + # DES setup + #--------------------------------------------------------------- + # load tables if not already done + global SPE, PCXROT, IE3264, CF6464 + if PCXROT is None: + _load_tables() + + # load SPE into local vars to speed things up and remove an array access call + SPE0, SPE1, SPE2, SPE3, SPE4, SPE5, SPE6, SPE7 = SPE + + # NOTE: parity bits are ignored completely + # (UTs do fuzz testing to ensure this) + + # generate key schedule + # NOTE: generation was modified to output two elements at a time, + # so that per-round loop could do two passes at once. + def _iter_key_schedule(ks_odd): + "given 64-bit key, iterates over the 8 (even,odd) key schedule pairs" + for p_even, p_odd in PCXROT: + ks_even = _permute(ks_odd, p_even) + ks_odd = _permute(ks_even, p_odd) + yield ks_even & _KS_MASK, ks_odd & _KS_MASK + ks_list = list(_iter_key_schedule(key)) + + # expand 24 bit salt -> 32 bit per des_crypt & bsdi_crypt + salt = ( + ((salt & 0x00003f) << 26) | + ((salt & 0x000fc0) << 12) | + ((salt & 0x03f000) >> 2) | + ((salt & 0xfc0000) >> 16) + ) + + # init L & R + if input == 0: + L = R = 0 + else: + L = ((input >> 31) & 0xaaaaaaaa) | (input & 0x55555555) + L = _permute(L, IE3264) + + R = ((input >> 32) & 0xaaaaaaaa) | ((input >> 1) & 0x55555555) + R = _permute(R, IE3264) + + #--------------------------------------------------------------- + # main DES loop - run for specified number of rounds + #--------------------------------------------------------------- + while rounds: + rounds -= 1 + + # run over each part of the schedule, 2 parts at a time + for ks_even, ks_odd in ks_list: + k = ((R>>32) ^ R) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ R ^ ks_even + + L ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + k = ((L>>32) ^ L) & salt # use the salt to flip specific bits + B = (k<<32) ^ k ^ L ^ ks_odd + + R ^= (SPE0[(B>>58)&0x3f] ^ SPE1[(B>>50)&0x3f] ^ + SPE2[(B>>42)&0x3f] ^ SPE3[(B>>34)&0x3f] ^ + SPE4[(B>>26)&0x3f] ^ SPE5[(B>>18)&0x3f] ^ + SPE6[(B>>10)&0x3f] ^ SPE7[(B>>2)&0x3f]) + + # swap L and R + L, R = R, L + + #--------------------------------------------------------------- + # return final result + #--------------------------------------------------------------- + C = ( + ((L>>3) & 0x0f0f0f0f00000000) + | + ((L<<33) & 0xf0f0f0f000000000) + | + ((R>>35) & 0x000000000f0f0f0f) + | + ((R<<1) & 0x00000000f0f0f0f0) + ) + return _permute(C, CF6464) + +@deprecated_function(deprecated="1.6", removed="1.8", + replacement="des_encrypt_int_block()") +def mdes_encrypt_int_block(key, input, salt=0, rounds=1): # pragma: no cover -- deprecated & unused + if isinstance(key, bytes): + if len(key) == 7: + key = expand_des_key(key) + key = _unpack64(key) + return des_encrypt_int_block(key, input, salt, rounds) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/handlers.py b/passlib/utils/handlers.py new file mode 100644 index 00000000..4d03b3b2 --- /dev/null +++ b/passlib/utils/handlers.py @@ -0,0 +1,1664 @@ +"""passlib.handler - code for implementing handlers, and global registry for handlers""" +#============================================================================= +# imports +#============================================================================= +from __future__ import with_statement +# core +import inspect +import re +import hashlib +import logging; log = logging.getLogger(__name__) +import time +import os +from warnings import warn +# site +# pkg +import passlib.exc as exc +from passlib.exc import MissingBackendError, PasslibConfigWarning, \ + PasslibHashWarning +from passlib.ifc import PasswordHash +from passlib.registry import get_crypt_handler +from passlib.utils import classproperty, consteq, getrandstr, getrandbytes,\ + BASE64_CHARS, HASH64_CHARS, rng, to_native_str, \ + is_crypt_handler, to_unicode, \ + MAX_PASSWORD_SIZE +from passlib.utils.compat import b, join_byte_values, bytes, irange, u, \ + uascii_to_str, join_unicode, unicode, str_to_uascii, \ + join_unicode, base_string_types, PY2, int_types +# local +__all__ = [ + # helpers for implementing MCF handlers + 'parse_mc2', + 'parse_mc3', + 'render_mc2', + 'render_mc3', + + # framework for implementing handlers + 'GenericHandler', + 'StaticHandler', + 'HasUserContext', + 'HasRawChecksum', + 'HasManyIdents', + 'HasSalt', + 'HasRawSalt', + 'HasRounds', + 'HasManyBackends', + + # other helpers + 'PrefixWrapper', +] + +#============================================================================= +# constants +#============================================================================= + +# common salt_chars & checksum_chars values +# (BASE64_CHARS, HASH64_CHARS imported above) +PADDED_BASE64_CHARS = BASE64_CHARS + u("=") +HEX_CHARS = u("0123456789abcdefABCDEF") +UPPER_HEX_CHARS = u("0123456789ABCDEF") +LOWER_HEX_CHARS = u("0123456789abcdef") + +# special byte string containing all possible byte values +# XXX: treated as singleton by some of the code for efficiency. +ALL_BYTE_VALUES = join_byte_values(irange(256)) + +# deprecated aliases - will be removed after passlib 1.8 +H64_CHARS = HASH64_CHARS +B64_CHARS = BASE64_CHARS +PADDED_B64_CHARS = PADDED_BASE64_CHARS +UC_HEX_CHARS = UPPER_HEX_CHARS +LC_HEX_CHARS = LOWER_HEX_CHARS + +#============================================================================= +# support functions +#============================================================================= +def _bitsize(count, chars): + """helper for bitsize() methods""" + if chars and count: + import math + return int(count * math.log(len(chars), 2)) + else: + return 0 + +#============================================================================= +# parsing helpers +#============================================================================= +_UDOLLAR = u("$") +_UZERO = u("0") + +def validate_secret(secret): + "ensure secret has correct type & size" + if not isinstance(secret, base_string_types): + raise exc.ExpectedStringError(secret, "secret") + if len(secret) > MAX_PASSWORD_SIZE: + raise exc.PasswordSizeError() + +def to_unicode_for_identify(hash): + "convert hash to unicode for identify method" + if isinstance(hash, unicode): + return hash + elif isinstance(hash, bytes): + # try as utf-8, but if it fails, use foolproof latin-1, + # since we don't really care about non-ascii chars + # when running identify. + try: + return hash.decode("utf-8") + except UnicodeDecodeError: + return hash.decode("latin-1") + else: + raise exc.ExpectedStringError(hash, "hash") + +def parse_mc2(hash, prefix, sep=_UDOLLAR, handler=None): + """parse hash using 2-part modular crypt format. + + this expects a hash of the format :samp:`{prefix}{salt}[${checksum}]`, + such as md5_crypt, and parses it into salt / checksum portions. + + :arg hash: the hash to parse (bytes or unicode) + :arg prefix: the identifying prefix (unicode) + :param sep: field separator (unicode, defaults to ``$``). + :param handler: handler class to pass to error constructors. + + :returns: + a ``(salt, chk | None)`` tuple. + """ + # detect prefix + hash = to_unicode(hash, "ascii", "hash") + assert isinstance(prefix, unicode) + if not hash.startswith(prefix): + raise exc.InvalidHashError(handler) + + # parse 2-part hash or 1-part config string + assert isinstance(sep, unicode) + parts = hash[len(prefix):].split(sep) + if len(parts) == 2: + salt, chk = parts + return salt, chk or None + elif len(parts) == 1: + return parts[0], None + else: + raise exc.MalformedHashError(handler) + +def parse_mc3(hash, prefix, sep=_UDOLLAR, rounds_base=10, + default_rounds=None, handler=None): + """parse hash using 3-part modular crypt format. + + this expects a hash of the format :samp:`{prefix}[{rounds}]${salt}[${checksum}]`, + such as sha1_crypt, and parses it into rounds / salt / checksum portions. + tries to convert the rounds to an integer, + and throws error if it has zero-padding. + + :arg hash: the hash to parse (bytes or unicode) + :arg prefix: the identifying prefix (unicode) + :param sep: field separator (unicode, defaults to ``$``). + :param rounds_base: + the numeric base the rounds are encoded in (defaults to base 10). + :param default_rounds: + the default rounds value to return if the rounds field was omitted. + if this is ``None`` (the default), the rounds field is *required*. + :param handler: handler class to pass to error constructors. + + :returns: + a ``(rounds : int, salt, chk | None)`` tuple. + """ + # detect prefix + hash = to_unicode(hash, "ascii", "hash") + assert isinstance(prefix, unicode) + if not hash.startswith(prefix): + raise exc.InvalidHashError(handler) + + # parse 3-part hash or 2-part config string + assert isinstance(sep, unicode) + parts = hash[len(prefix):].split(sep) + if len(parts) == 3: + rounds, salt, chk = parts + elif len(parts) == 2: + rounds, salt = parts + chk = None + else: + raise exc.MalformedHashError(handler) + + # validate & parse rounds portion + if rounds.startswith(_UZERO) and rounds != _UZERO: + raise exc.ZeroPaddedRoundsError(handler) + elif rounds: + rounds = int(rounds, rounds_base) + elif default_rounds is None: + raise exc.MalformedHashError(handler, "empty rounds field") + else: + rounds = default_rounds + + # return result + return rounds, salt, chk or None + +#============================================================================= +# formatting helpers +#============================================================================= +def render_mc2(ident, salt, checksum, sep=u("$")): + """format hash using 2-part modular crypt format; inverse of parse_mc2() + + returns native string with format :samp:`{ident}{salt}[${checksum}]`, + such as used by md5_crypt. + + :arg ident: identifier prefix (unicode) + :arg salt: encoded salt (unicode) + :arg checksum: encoded checksum (unicode or None) + :param sep: separator char (unicode, defaults to ``$``) + + :returns: + config or hash (native str) + """ + if checksum: + parts = [ident, salt, sep, checksum] + else: + parts = [ident, salt] + return uascii_to_str(join_unicode(parts)) + +def render_mc3(ident, rounds, salt, checksum, sep=u("$"), rounds_base=10): + """format hash using 3-part modular crypt format; inverse of parse_mc3() + + returns native string with format :samp:`{ident}[{rounds}$]{salt}[${checksum}]`, + such as used by sha1_crypt. + + :arg ident: identifier prefix (unicode) + :arg rounds: rounds field (int or None) + :arg salt: encoded salt (unicode) + :arg checksum: encoded checksum (unicode or None) + :param sep: separator char (unicode, defaults to ``$``) + :param rounds_base: base to encode rounds value (defaults to base 10) + + :returns: + config or hash (native str) + """ + if rounds is None: + rounds = u('') + elif rounds_base == 16: + rounds = u("%x") % rounds + else: + assert rounds_base == 10 + rounds = unicode(rounds) + if checksum: + parts = [ident, rounds, sep, salt, sep, checksum] + else: + parts = [ident, rounds, sep, salt] + return uascii_to_str(join_unicode(parts)) + +#============================================================================= +# GenericHandler +#============================================================================= +class GenericHandler(PasswordHash): + """helper class for implementing hash handlers. + + GenericHandler-derived classes will have (at least) the following + constructor options, though others may be added by mixins + and by the class itself: + + :param checksum: + this should contain the digest portion of a + parsed hash (mainly provided when the constructor is called + by :meth:`from_string()`). + defaults to ``None``. + + :param use_defaults: + If ``False`` (the default), a :exc:`TypeError` should be thrown + if any settings required by the handler were not explicitly provided. + + If ``True``, the handler should attempt to provide a default for any + missing values. This means generate missing salts, fill in default + cost parameters, etc. + + This is typically only set to ``True`` when the constructor + is called by :meth:`encrypt`, allowing user-provided values + to be handled in a more permissive manner. + + :param relaxed: + If ``False`` (the default), a :exc:`ValueError` should be thrown + if any settings are out of bounds or otherwise invalid. + + If ``True``, they should be corrected if possible, and a warning + issue. If not possible, only then should an error be raised. + (e.g. under ``relaxed=True``, rounds values will be clamped + to min/max rounds). + + This is mainly used when parsing the config strings of certain + hashes, whose specifications implementations to be tolerant + of incorrect values in salt strings. + + Class Attributes + ================ + + .. attribute:: ident + + [optional] + If this attribute is filled in, the default :meth:`identify` method will use + it as a identifying prefix that can be used to recognize instances of this handler's + hash. Filling this out is recommended for speed. + + This should be a unicode str. + + .. attribute:: _hash_regex + + [optional] + If this attribute is filled in, the default :meth:`identify` method + will use it to recognize instances of the hash. If :attr:`ident` + is specified, this will be ignored. + + This should be a unique regex object. + + .. attribute:: checksum_size + + [optional] + Specifies the number of characters that should be expected in the checksum string. + If omitted, no check will be performed. + + .. attribute:: checksum_chars + + [optional] + A string listing all the characters allowed in the checksum string. + If omitted, no check will be performed. + + This should be a unicode str. + + .. attribute:: _stub_checksum + + [optional] + If specified, hashes with this checksum will have their checksum + normalized to ``None``, treating it like a config string. + This is mainly used by hash formats which don't have a concept + of a config string, so a unlikely-to-occur checksum (e.g. all zeros) + is used by some implementations. + + This should be a string of the same datatype as :attr:`checksum`, + or ``None``. + + Instance Attributes + =================== + .. attribute:: checksum + + The checksum string provided to the constructor (after passing it + through :meth:`_norm_checksum`). + + Required Subclass Methods + ========================= + The following methods must be provided by handler subclass: + + .. automethod:: from_string + .. automethod:: to_string + .. automethod:: _calc_checksum + + Default Methods + =============== + The following methods have default implementations that should work for + most cases, though they may be overridden if the hash subclass needs to: + + .. automethod:: _norm_checksum + + .. automethod:: genconfig + .. automethod:: genhash + .. automethod:: identify + .. automethod:: encrypt + .. automethod:: verify + """ + + #=================================================================== + # class attr + #=================================================================== + # this must be provided by the actual class. + setting_kwds = None + + # providing default since most classes don't use this at all. + context_kwds = () + + # optional prefix that uniquely identifies hash + ident = None + + # optional regexp for recognizing hashes, + # used by default identify() if .ident isn't specified. + _hash_regex = None + + # if specified, _norm_checksum will require this length + checksum_size = None + + # if specified, _norm_checksum() will validate this + checksum_chars = None + + # if specified, hashes with this checksum will be treated + # as if no checksum was specified. + _stub_checksum = None + + # private flag used by HasRawChecksum + _checksum_is_bytes = False + + #=================================================================== + # instance attrs + #=================================================================== + checksum = None # stores checksum +# use_defaults = False # whether _norm_xxx() funcs should fill in defaults. +# relaxed = False # when _norm_xxx() funcs should be strict about inputs + + #=================================================================== + # init + #=================================================================== + def __init__(self, checksum=None, use_defaults=False, relaxed=False, + **kwds): + self.use_defaults = use_defaults + self.relaxed = relaxed + super(GenericHandler, self).__init__(**kwds) + self.checksum = self._norm_checksum(checksum) + + def _norm_checksum(self, checksum): + """validates checksum keyword against class requirements, + returns normalized version of checksum. + """ + # NOTE: by default this code assumes checksum should be unicode. + # For classes where the checksum is raw bytes, the HasRawChecksum sets + # the _checksum_is_bytes flag which alters various code paths below. + if checksum is None: + return None + + # normalize to bytes / unicode + raw = self._checksum_is_bytes + if raw: + # NOTE: no clear route to reasonbly convert unicode -> raw bytes, + # so relaxed does nothing here + if not isinstance(checksum, bytes): + raise exc.ExpectedTypeError(checksum, "bytes", "checksum") + + elif not isinstance(checksum, unicode): + if isinstance(checksum, bytes) and self.relaxed: + warn("checksum should be unicode, not bytes", + PasslibHashWarning) + checksum = checksum.decode("ascii") + else: + raise exc.ExpectedTypeError(checksum, "unicode", "checksum") + + # handle stub + if checksum == self._stub_checksum: + return None + + # check size + cc = self.checksum_size + if cc and len(checksum) != cc: + raise exc.ChecksumSizeError(self, raw=raw) + + # check charset + if not raw: + cs = self.checksum_chars + if cs and any(c not in cs for c in checksum): + raise ValueError("invalid characters in %s checksum" % + (self.name,)) + + return checksum + + #=================================================================== + # password hash api - formatting interface + #=================================================================== + @classmethod + def identify(cls, hash): + # NOTE: subclasses may wish to use faster / simpler identify, + # and raise value errors only when an invalid (but identifiable) + # string is parsed + hash = to_unicode_for_identify(hash) + if not hash: + return False + + # does class specify a known unique prefix to look for? + ident = cls.ident + if ident is not None: + return hash.startswith(ident) + + # does class provide a regexp to use? + pat = cls._hash_regex + if pat is not None: + return pat.match(hash) is not None + + # as fallback, try to parse hash, and see if we succeed. + # inefficient, but works for most cases. + try: + cls.from_string(hash) + return True + except ValueError: + return False + + @classmethod + def from_string(cls, hash, **context): # pragma: no cover + """return parsed instance from hash/configuration string + + :param \*\*context: + context keywords to pass to constructor (if applicable). + + :raises ValueError: if hash is incorrectly formatted + + :returns: + hash parsed into components, + for formatting / calculating checksum. + """ + raise NotImplementedError("%s must implement from_string()" % (cls,)) + + def to_string(self): # pragma: no cover + """render instance to hash or configuration string + + :returns: + if :attr:`checksum` is set, should return full hash string. + if not, should either return abbreviated configuration string, + or fill in a stub checksum. + + should return native string type (ascii-bytes under python 2, + unicode under python 3) + """ + # NOTE: documenting some non-standardized but common kwd flags + # that passlib to_string() method may have: + # + # withchk=True -- if false, omit checksum portion of hash + # + raise NotImplementedError("%s must implement from_string()" % + (self.__class__,)) + + ##def to_config_string(self): + ## "helper for generating configuration string (ignoring hash)" + ## orig = self.checksum + ## try: + ## self.checksum = None + ## return self.to_string() + ## finally: + ## self.checksum = orig + + #=================================================================== + #'crypt-style' interface (default implementation) + #=================================================================== + @classmethod + def genconfig(cls, **settings): + return cls(use_defaults=True, **settings).to_string() + + @classmethod + def genhash(cls, secret, config, **context): + validate_secret(secret) + self = cls.from_string(config, **context) + self.checksum = self._calc_checksum(secret) + return self.to_string() + + def _calc_checksum(self, secret): # pragma: no cover + """given secret; calcuate and return encoded checksum portion of hash + string, taking config from object state + + calc checksum implementations may assume secret is always + either unicode or bytes, checks are performed by verify/etc. + """ + raise NotImplementedError("%s must implement _calc_checksum()" % + (self.__class__,)) + + #=================================================================== + #'application' interface (default implementation) + #=================================================================== + @classmethod + def encrypt(cls, secret, **kwds): + validate_secret(secret) + self = cls(use_defaults=True, **kwds) + self.checksum = self._calc_checksum(secret) + return self.to_string() + + @classmethod + def verify(cls, secret, hash, **context): + # NOTE: classes with multiple checksum encodings should either + # override this method, or ensure that from_string() / _norm_checksum() + # ensures .checksum always uses a single canonical representation. + validate_secret(secret) + self = cls.from_string(hash, **context) + chk = self.checksum + if chk is None: + raise exc.MissingDigestError(cls) + return consteq(self._calc_checksum(secret), chk) + + #=================================================================== + # experimental - the following methods are not finished or tested, + # but way work correctly for some hashes + #=================================================================== + _unparsed_settings = ("salt_size", "relaxed") + _unsafe_settings = ("salt", "checksum") + + @classproperty + def _parsed_settings(cls): + return (key for key in cls.setting_kwds + if key not in cls._unparsed_settings) + + @staticmethod + def _sanitize(value, char=u("*")): + "default method to obscure sensitive fields" + if value is None: + return None + if isinstance(value, bytes): + from passlib.utils import ab64_encode + value = ab64_encode(value).decode("ascii") + elif not isinstance(value, unicode): + value = unicode(value) + size = len(value) + clip = min(4, size//8) + return value[:clip] + char * (size-clip) + + @classmethod + def parsehash(cls, hash, checksum=True, sanitize=False): + """[experimental method] parse hash into dictionary of settings. + + this essentially acts as the inverse of :meth:`encrypt`: for most + cases, if ``hash = cls.encrypt(secret, **opts)``, then + ``cls.parsehash(hash)`` will return a dict matching the original options + (with the extra keyword *checksum*). + + this method may not work correctly for all hashes, + and may not be available on some few. it's interface may + change in future releases, if it's kept around at all. + + :arg hash: hash to parse + :param checksum: include checksum keyword? (defaults to True) + :param sanitize: mask data for sensitive fields? (defaults to False) + """ + # FIXME: this may not work for hashes with non-standard settings. + # XXX: how should this handle checksum/salt encoding? + # need to work that out for encrypt anyways. + self = cls.from_string(hash) + # XXX: could split next few lines out as self._parsehash() for subclassing + # XXX: could try to resolve ident/variant to publically suitable alias. + UNSET = object() + kwds = dict((key, getattr(self, key)) for key in self._parsed_settings + if getattr(self, key) != getattr(cls, key, UNSET)) + if checksum and self.checksum is not None: + kwds['checksum'] = self.checksum + if sanitize: + if sanitize is True: + sanitize = cls._sanitize + for key in cls._unsafe_settings: + if key in kwds: + kwds[key] = sanitize(kwds[key]) + return kwds + + @classmethod + def bitsize(cls, **kwds): + "[experimental method] return info about bitsizes of hash" + try: + info = super(GenericHandler, cls).bitsize(**kwds) + except AttributeError: + info = {} + cc = ALL_BYTE_VALUES if cls._checksum_is_bytes else cls.checksum_chars + if cls.checksum_size and cc: + # FIXME: this may overestimate size due to padding bits (e.g. bcrypt) + # FIXME: this will be off by 1 for case-insensitive hashes. + info['checksum'] = _bitsize(cls.checksum_size, cc) + return info + + #=================================================================== + # eoc + #=================================================================== + +class StaticHandler(GenericHandler): + """GenericHandler mixin for classes which have no settings. + + This mixin assumes the entirety of the hash ise stored in the + :attr:`checksum` attribute; that the hash has no rounds, salt, + etc. This class provides the following: + + * a default :meth:`genconfig` that always returns None. + * a default :meth:`from_string` and :meth:`to_string` + that store the entire hash within :attr:`checksum`, + after optionally stripping a constant prefix. + + All that is required by subclasses is an implementation of + the :meth:`_calc_checksum` method. + """ + # TODO: document _norm_hash() + + setting_kwds = () + + # optional constant prefix subclasses can specify + _hash_prefix = u("") + + @classmethod + def from_string(cls, hash, **context): + # default from_string() which strips optional prefix, + # and passes rest unchanged as checksum value. + hash = to_unicode(hash, "ascii", "hash") + hash = cls._norm_hash(hash) + # could enable this for extra strictness + ##pat = cls._hash_regex + ##if pat and pat.match(hash) is None: + ## raise ValueError("not a valid %s hash" % (cls.name,)) + prefix = cls._hash_prefix + if prefix: + if hash.startswith(prefix): + hash = hash[len(prefix):] + else: + raise exc.InvalidHashError(cls) + return cls(checksum=hash, **context) + + @classmethod + def _norm_hash(cls, hash): + "helper for subclasses to normalize case if needed" + return hash + + def to_string(self): + assert self.checksum is not None + return uascii_to_str(self._hash_prefix + self.checksum) + + @classmethod + def genconfig(cls): + # since it has no settings, there's no need for a config string. + return None + + @classmethod + def genhash(cls, secret, config, **context): + # since it has no settings, just verify config, and call encrypt() + if config is not None and not cls.identify(config): + raise exc.InvalidHashError(cls) + return cls.encrypt(secret, **context) + + # per-subclass: stores dynamically created subclass used by _calc_checksum() stub + __cc_compat_hack = None + + def _calc_checksum(self, secret): + """given secret; calcuate and return encoded checksum portion of hash + string, taking config from object state + """ + # NOTE: prior to 1.6, StaticHandler required classes implement genhash + # instead of this method. so if we reach here, we try calling genhash. + # if that succeeds, we issue deprecation warning. if it fails, + # we'll just recurse back to here, but in a different instance. + # so before we call genhash, we create a subclass which handles + # throwing the NotImplementedError. + cls = self.__class__ + assert cls.__module__ != __name__ + wrapper_cls = cls.__cc_compat_hack + if wrapper_cls is None: + def inner(self, secret): + raise NotImplementedError("%s must implement _calc_checksum()" % + (cls,)) + wrapper_cls = cls.__cc_compat_hack = type(cls.__name__ + "_wrapper", + (cls,), dict(_calc_checksum=inner, __module__=cls.__module__)) + context = dict((k,getattr(self,k)) for k in self.context_kwds) + hash = wrapper_cls.genhash(secret, None, **context) + warn("%r should be updated to implement StaticHandler._calc_checksum() " + "instead of StaticHandler.genhash(), support for the latter " + "style will be removed in Passlib 1.8" % (cls), + DeprecationWarning) + return str_to_uascii(hash) + +#============================================================================= +# GenericHandler mixin classes +#============================================================================= +class HasEncodingContext(GenericHandler): + """helper for classes which require knowledge of the encoding used""" + context_kwds = ("encoding",) + default_encoding = "utf-8" + + def __init__(self, encoding=None, **kwds): + super(HasEncodingContext, self).__init__(**kwds) + self.encoding = encoding or self.default_encoding + +class HasUserContext(GenericHandler): + """helper for classes which require a user context keyword""" + context_kwds = ("user",) + + def __init__(self, user=None, **kwds): + super(HasUserContext, self).__init__(**kwds) + self.user = user + + # XXX: would like to validate user input here, but calls to from_string() + # which lack context keywords would then fail; so leaving code per-handler. + + # wrap funcs to accept 'user' as positional arg for ease of use. + @classmethod + def encrypt(cls, secret, user=None, **context): + return super(HasUserContext, cls).encrypt(secret, user=user, **context) + + @classmethod + def verify(cls, secret, hash, user=None, **context): + return super(HasUserContext, cls).verify(secret, hash, user=user, **context) + + @classmethod + def genhash(cls, secret, config, user=None, **context): + return super(HasUserContext, cls).genhash(secret, config, user=user, **context) + + # XXX: how to guess the entropy of a username? + # most of these hashes are for a system (e.g. Oracle) + # which has a few *very common* names and thus really low entropy; + # while the rest are slightly less predictable. + # need to find good reference about this. + ##@classmethod + ##def bitsize(cls, **kwds): + ## info = super(HasUserContext, cls).bitsize(**kwds) + ## info['user'] = xxx + ## return info + +#------------------------------------------------------------------------ +# checksum mixins +#------------------------------------------------------------------------ +class HasRawChecksum(GenericHandler): + """mixin for classes which work with decoded checksum bytes + + .. todo:: + + document this class's usage + """ + # NOTE: GenericHandler.checksum_chars is ignored by this implementation. + + # NOTE: all HasRawChecksum code is currently part of GenericHandler, + # using private '_checksum_is_bytes' flag. + # this arrangement may be changed in the future. + _checksum_is_bytes = True + +#------------------------------------------------------------------------ +# ident mixins +#------------------------------------------------------------------------ +class HasManyIdents(GenericHandler): + """mixin for hashes which use multiple prefix identifiers + + For the hashes which may use multiple identifier prefixes, + this mixin adds an ``ident`` keyword to constructor. + Any value provided is passed through the :meth:`norm_idents` method, + which takes care of validating the identifier, + as well as allowing aliases for easier specification + of the identifiers by the user. + + .. todo:: + + document this class's usage + """ + + #=================================================================== + # class attrs + #=================================================================== + default_ident = None # should be unicode + ident_values = None # should be list of unicode strings + ident_aliases = None # should be dict of unicode -> unicode + # NOTE: any aliases provided to norm_ident() as bytes + # will have been converted to unicode before + # comparing against this dictionary. + + # NOTE: relying on test_06_HasManyIdents() to verify + # these are configured correctly. + + #=================================================================== + # instance attrs + #=================================================================== + ident = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, ident=None, **kwds): + super(HasManyIdents, self).__init__(**kwds) + self.ident = self._norm_ident(ident) + + def _norm_ident(self, ident): + # fill in default identifier + if ident is None: + if not self.use_defaults: + raise TypeError("no ident specified") + ident = self.default_ident + assert ident is not None, "class must define default_ident" + + # handle unicode + if isinstance(ident, bytes): + ident = ident.decode('ascii') + + # check if identifier is valid + iv = self.ident_values + if ident in iv: + return ident + + # resolve aliases, and recheck against ident_values + ia = self.ident_aliases + if ia: + try: + value = ia[ident] + except KeyError: + pass + else: + if value in iv: + return value + + # failure! + raise ValueError("invalid ident: %r" % (ident,)) + + #=================================================================== + # password hash api + #=================================================================== + @classmethod + def identify(cls, hash): + hash = to_unicode_for_identify(hash) + return any(hash.startswith(ident) for ident in cls.ident_values) + + @classmethod + def _parse_ident(cls, hash): + """extract ident prefix from hash, helper for subclasses' from_string()""" + hash = to_unicode(hash, "ascii", "hash") + for ident in cls.ident_values: + if hash.startswith(ident): + return ident, hash[len(ident):] + raise exc.InvalidHashError(cls) + + #=================================================================== + # eoc + #=================================================================== + +#------------------------------------------------------------------------ +# salt mixins +#------------------------------------------------------------------------ +class HasSalt(GenericHandler): + """mixin for validating salts. + + This :class:`GenericHandler` mixin adds a ``salt`` keyword to the class constuctor; + any value provided is passed through the :meth:`_norm_salt` method, + which takes care of validating salt length and content, + as well as generating new salts if one it not provided. + + :param salt: + optional salt string + + :param salt_size: + optional size of salt (only used if no salt provided); + defaults to :attr:`default_salt_size`. + + Class Attributes + ================ + In order for :meth:`!_norm_salt` to do it's job, the following + attributes should be provided by the handler subclass: + + .. attribute:: min_salt_size + + The minimum number of characters allowed in a salt string. + An :exc:`ValueError` will be throw if the provided salt is too small. + Defaults to ``None``, for no minimum. + + .. attribute:: max_salt_size + + The maximum number of characters allowed in a salt string. + By default an :exc:`ValueError` will be throw if the provided salt is + too large; but if ``relaxed=True``, it will be clipped and a warning + issued instead. Defaults to ``None``, for no maximum. + + .. attribute:: default_salt_size + + [required] + If no salt is provided, this should specify the size of the salt + that will be generated by :meth:`_generate_salt`. By default + this will fall back to :attr:`max_salt_size`. + + .. attribute:: salt_chars + + A string containing all the characters which are allowed in the salt + string. An :exc:`ValueError` will be throw if any other characters + are encountered. May be set to ``None`` to skip this check (but see + in :attr:`default_salt_chars`). + + .. attribute:: default_salt_chars + + [required] + This attribute controls the set of characters use to generate + *new* salt strings. By default, it mirrors :attr:`salt_chars`. + If :attr:`!salt_chars` is ``None``, this attribute must be specified + in order to generate new salts. Aside from that purpose, + the main use of this attribute is for hashes which wish to generate + salts from a restricted subset of :attr:`!salt_chars`; such as + accepting all characters, but only using a-z. + + Instance Attributes + =================== + .. attribute:: salt + + This instance attribute will be filled in with the salt provided + to the constructor (as adapted by :meth:`_norm_salt`) + + Subclassable Methods + ==================== + .. automethod:: _norm_salt + .. automethod:: _generate_salt + """ + # TODO: document _truncate_salt() + # XXX: allow providing raw salt to this class, and encoding it? + + #=================================================================== + # class attrs + #=================================================================== + + min_salt_size = None + max_salt_size = None + salt_chars = None + + @classproperty + def default_salt_size(cls): + "default salt size (defaults to *max_salt_size*)" + return cls.max_salt_size + + @classproperty + def default_salt_chars(cls): + "charset used to generate new salt strings (defaults to *salt_chars*)" + return cls.salt_chars + + # private helpers for HasRawSalt, shouldn't be used by subclasses + _salt_is_bytes = False + _salt_unit = "chars" + + #=================================================================== + # instance attrs + #=================================================================== + salt = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, salt=None, salt_size=None, **kwds): + super(HasSalt, self).__init__(**kwds) + self.salt = self._norm_salt(salt, salt_size=salt_size) + + def _norm_salt(self, salt, salt_size=None): + """helper to normalize & validate user-provided salt string + + If no salt provided, a random salt is generated + using :attr:`default_salt_size` and :attr:`default_salt_chars`. + + :arg salt: salt string or ``None`` + :param salt_size: optionally specified size of autogenerated salt + + :raises TypeError: + If salt not provided and ``use_defaults=False``. + + :raises ValueError: + + * if salt contains chars that aren't in :attr:`salt_chars`. + * if salt contains less than :attr:`min_salt_size` characters. + * if ``relaxed=False`` and salt has more than :attr:`max_salt_size` + characters (if ``relaxed=True``, the salt is truncated + and a warning is issued instead). + + :returns: + normalized or generated salt + """ + # generate new salt if none provided + if salt is None: + if not self.use_defaults: + raise TypeError("no salt specified") + if salt_size is None: + salt_size = self.default_salt_size + salt = self._generate_salt(salt_size) + + # check type + if self._salt_is_bytes: + if not isinstance(salt, bytes): + raise exc.ExpectedTypeError(salt, "bytes", "salt") + else: + if not isinstance(salt, unicode): + # NOTE: allowing bytes under py2 so salt can be native str. + if isinstance(salt, bytes) and (PY2 or self.relaxed): + salt = salt.decode("ascii") + else: + raise exc.ExpectedTypeError(salt, "unicode", "salt") + + # check charset + sc = self.salt_chars + if sc is not None and any(c not in sc for c in salt): + raise ValueError("invalid characters in %s salt" % self.name) + + # check min size + mn = self.min_salt_size + if mn and len(salt) < mn: + msg = "salt too small (%s requires %s %d %s)" % (self.name, + "exactly" if mn == self.max_salt_size else ">=", mn, + self._salt_unit) + raise ValueError(msg) + + # check max size + mx = self.max_salt_size + if mx and len(salt) > mx: + msg = "salt too large (%s requires %s %d %s)" % (self.name, + "exactly" if mx == mn else "<=", mx, self._salt_unit) + if self.relaxed: + warn(msg, PasslibHashWarning) + salt = self._truncate_salt(salt, mx) + else: + raise ValueError(msg) + + return salt + + @staticmethod + def _truncate_salt(salt, mx): + # NOTE: some hashes (e.g. bcrypt) has structure within their + # salt string. this provides a method to overide to perform + # the truncation properly + return salt[:mx] + + def _generate_salt(self, salt_size): + """helper method for _norm_salt(); generates a new random salt string. + + :arg salt_size: salt size to generate + """ + return getrandstr(rng, self.default_salt_chars, salt_size) + + @classmethod + def bitsize(cls, salt_size=None, **kwds): + "[experimental method] return info about bitsizes of hash" + info = super(HasSalt, cls).bitsize(**kwds) + if salt_size is None: + salt_size = cls.default_salt_size + # FIXME: this may overestimate size due to padding bits + # FIXME: this will be off by 1 for case-insensitive hashes. + info['salt'] = _bitsize(salt_size, cls.default_salt_chars) + return info + + #=================================================================== + # eoc + #=================================================================== + +class HasRawSalt(HasSalt): + """mixin for classes which use decoded salt parameter + + A variant of :class:`!HasSalt` which takes in decoded bytes instead of an encoded string. + + .. todo:: + + document this class's usage + """ + + salt_chars = ALL_BYTE_VALUES + + # NOTE: all HasRawSalt code is currently part of HasSalt, using private + # '_salt_is_bytes' flag. this arrangement may be changed in the future. + _salt_is_bytes = True + _salt_unit = "bytes" + + def _generate_salt(self, salt_size): + assert self.salt_chars in [None, ALL_BYTE_VALUES] + return getrandbytes(rng, salt_size) + +#------------------------------------------------------------------------ +# rounds mixin +#------------------------------------------------------------------------ +class HasRounds(GenericHandler): + """mixin for validating rounds parameter + + This :class:`GenericHandler` mixin adds a ``rounds`` keyword to the class + constuctor; any value provided is passed through the :meth:`_norm_rounds` + method, which takes care of validating the number of rounds. + + :param rounds: optional number of rounds hash should use + + Class Attributes + ================ + In order for :meth:`!_norm_rounds` to do it's job, the following + attributes must be provided by the handler subclass: + + .. attribute:: min_rounds + + The minimum number of rounds allowed. A :exc:`ValueError` will be + thrown if the rounds value is too small. Defaults to ``0``. + + .. attribute:: max_rounds + + The maximum number of rounds allowed. A :exc:`ValueError` will be + thrown if the rounds value is larger than this. Defaults to ``None`` + which indicates no limit to the rounds value. + + .. attribute:: default_rounds + + If no rounds value is provided to constructor, this value will be used. + If this is not specified, a rounds value *must* be specified by the + application. + + .. attribute:: rounds_cost + + [required] + The ``rounds`` parameter typically encodes a cpu-time cost + for calculating a hash. This should be set to ``"linear"`` + (the default) or ``"log2"``, depending on how the rounds value relates + to the actual amount of time that will be required. + + Instance Attributes + =================== + .. attribute:: rounds + + This instance attribute will be filled in with the rounds value provided + to the constructor (as adapted by :meth:`_norm_rounds`) + + Subclassable Methods + ==================== + .. automethod:: _norm_rounds + """ + #=================================================================== + # class attrs + #=================================================================== + min_rounds = 0 + max_rounds = None + default_rounds = None + rounds_cost = "linear" # default to the common case + + #=================================================================== + # instance attrs + #=================================================================== + rounds = None + + #=================================================================== + # init + #=================================================================== + def __init__(self, rounds=None, **kwds): + super(HasRounds, self).__init__(**kwds) + self.rounds = self._norm_rounds(rounds) + + def _norm_rounds(self, rounds): + """helper routine for normalizing rounds + + :arg rounds: ``None``, or integer cost parameter. + + + :raises TypeError: + * if ``use_defaults=False`` and no rounds is specified + * if rounds is not an integer. + + :raises ValueError: + + * if rounds is ``None`` and class does not specify a value for + :attr:`default_rounds`. + * if ``relaxed=False`` and rounds is outside bounds of + :attr:`min_rounds` and :attr:`max_rounds` (if ``relaxed=True``, + the rounds value will be clamped, and a warning issued). + + :returns: + normalized rounds value + """ + # fill in default + if rounds is None: + if not self.use_defaults: + raise TypeError("no rounds specified") + rounds = self.default_rounds + if rounds is None: + raise TypeError("%s rounds value must be specified explicitly" + % (self.name,)) + + # check type + if not isinstance(rounds, int_types): + raise exc.ExpectedTypeError(rounds, "integer", "rounds") + + # check bounds + mn = self.min_rounds + if rounds < mn: + msg = "rounds too low (%s requires >= %d rounds)" % (self.name, mn) + if self.relaxed: + warn(msg, PasslibHashWarning) + rounds = mn + else: + raise ValueError(msg) + + mx = self.max_rounds + if mx and rounds > mx: + msg = "rounds too high (%s requires <= %d rounds)" % (self.name, mx) + if self.relaxed: + warn(msg, PasslibHashWarning) + rounds = mx + else: + raise ValueError(msg) + + return rounds + + @classmethod + def bitsize(cls, rounds=None, vary_rounds=.1, **kwds): + "[experimental method] return info about bitsizes of hash" + info = super(HasRounds, cls).bitsize(**kwds) + # NOTE: this essentially estimates how many bits of "salt" + # can be added by varying the rounds value just a little bit. + if cls.rounds_cost != "log2": + # assume rounds can be randomized within the range + # rounds*(1-vary_rounds) ... rounds*(1+vary_rounds) + # then this can be used to encode + # log2(rounds*(1+vary_rounds)-rounds*(1-vary_rounds)) + # worth of salt-like bits. this works out to + # 1+log2(rounds*vary_rounds) + import math + if rounds is None: + rounds = cls.default_rounds + info['rounds'] = max(0, int(1+math.log(rounds*vary_rounds,2))) + ## else: # log2 rounds + # all bits of the rounds value are critical to choosing + # the time-cost, and can't be randomized. + return info + + #=================================================================== + # eoc + #=================================================================== + +#------------------------------------------------------------------------ +# backend mixin & helpers +#------------------------------------------------------------------------ +##def _clear_backend(cls): +## "restore HasManyBackend subclass to unloaded state - used by unittests" +## assert issubclass(cls, HasManyBackends) and cls is not HasManyBackends +## if cls._backend: +## del cls._backend +## del cls._calc_checksum + +class HasManyBackends(GenericHandler): + """GenericHandler mixin which provides selecting from multiple backends. + + .. todo:: + + finish documenting this class's usage + + For hashes which need to select from multiple backends, + depending on the host environment, this class + offers a way to specify alternate :meth:`_calc_checksum` methods, + and will dynamically chose the best one at runtime. + + Backend Methods + --------------- + + .. automethod:: get_backend + .. automethod:: set_backend + .. automethod:: has_backend + + Subclass Hooks + -------------- + The following attributes and methods should be filled in by the subclass + which is using :class:`HasManyBackends` as a mixin: + + .. attribute:: backends + + This attribute should be a tuple containing the names of the backends + which are supported. Two common names are ``"os_crypt"`` (if backend + uses :mod:`crypt`), and ``"builtin"`` (if the backend is a pure-python + fallback). + + .. attribute:: _has_backend_{name} + + private class attribute checked by :meth:`has_backend` to see if a + specific backend is available, it should be either ``True`` + or ``False``. One of these should be provided by + the subclass for each backend listed in :attr:`backends`. + + .. classmethod:: _calc_checksum_{name} + + private class method that should implement :meth:`_calc_checksum` + for a given backend. it will only be called if the backend has + been selected by :meth:`set_backend`. One of these should be provided + by the subclass for each backend listed in :attr:`backends`. + """ + + # NOTE: + # subclass must provide: + # * attr 'backends' containing list of known backends (top priority backend first) + # * attr '_has_backend_xxx' for each backend 'xxx', indicating if backend is available on system + # * attr '_calc_checksum_xxx' for each backend 'xxx', containing calc_checksum implementation using that backend + + backends = None # list of backend names, provided by subclass. + + _backend = None # holds currently loaded backend (if any) or None + + @classmethod + def get_backend(cls): + """return name of currently active backend. + + if no backend has been loaded, + loads and returns name of default backend. + + :raises passlib.exc.MissingBackendError: if no backends are available. + + :returns: name of active backend + """ + name = cls._backend + if not name: + cls.set_backend() + name = cls._backend + assert name, "set_backend() didn't load any backends" + return name + + @classmethod + def has_backend(cls, name="any"): + """check if support is currently available for specified backend. + + :arg name: + name of backend to check for. + defaults to ``"any"``, + but can be any string accepted by :meth:`set_backend`. + + :raises ValueError: if backend name is unknown + + :returns: + ``True`` if backend is currently supported, else ``False``. + """ + if name in ("any", "default"): + if name == "any" and cls._backend: + return True + return any(getattr(cls, "_has_backend_" + name) + for name in cls.backends) + elif name in cls.backends: + return getattr(cls, "_has_backend_" + name) + else: + raise ValueError("unknown backend: %r" % (name,)) + + @classmethod + def _no_backends_msg(cls): + return "no %s backends available" % (cls.name,) + + @classmethod + def set_backend(cls, name="any"): + """load specified backend to be used for future _calc_checksum() calls + + this method replaces :meth:`_calc_checksum` with a method + which uses the specified backend. + + :arg name: + name of backend to load, defaults to ``"any"``. + this can be any of the following values: + + * any string in :attr:`backends`, + indicating the specific backend to use. + + * the special string ``"default"``, which means to use + the preferred backend on the given host + (this is generally the first backend in :attr:`backends` + which can be loaded). + + * the special string ``"any"``, which means to use + the current backend if one has been loaded, + else acts like ``"default"``. + + :raises passlib.exc.MissingBackendError: + * ... if a specific backend was requested, + but is not currently available. + + * ... if ``"any"`` or ``"default"`` was specified, + and *no* backends are currently available. + + :returns: + + The return value of this function should be ignored. + """ + if name == "any": + name = cls._backend + if name: + return name + name = "default" + if name == "default": + for name in cls.backends: + if cls.has_backend(name): + break + else: + raise exc.MissingBackendError(cls._no_backends_msg()) + elif not cls.has_backend(name): + raise exc.MissingBackendError("%s backend not available: %r" % + (cls.name, name)) + cls._calc_checksum_backend = getattr(cls, "_calc_checksum_" + name) + cls._backend = name + return name + + def _calc_checksum_backend(self, secret): + "stub for _calc_checksum_backend(), default backend will be selected first time stub is called" + # if we got here, no backend has been loaded; so load default backend + assert not self._backend, "set_backend() failed to replace lazy loader" + self.set_backend() + assert self._backend, "set_backend() failed to load a default backend" + + # this should now invoke the backend-specific version, so call it again. + return self._calc_checksum_backend(secret) + + def _calc_checksum(self, secret): + "wrapper for backend, for common code""" + return self._calc_checksum_backend(secret) + +#============================================================================= +# wrappers +#============================================================================= +class PrefixWrapper(object): + """wraps another handler, adding a constant prefix. + + instances of this class wrap another password hash handler, + altering the constant prefix that's prepended to the wrapped + handlers' hashes. + + this is used mainly by the :doc:`ldap crypt ` handlers; + such as :class:`~passlib.hash.ldap_md5_crypt` which wraps :class:`~passlib.hash.md5_crypt` and adds a ``{CRYPT}`` prefix. + + usage:: + + myhandler = PrefixWrapper("myhandler", "md5_crypt", prefix="$mh$", orig_prefix="$1$") + + :param name: name to assign to handler + :param wrapped: handler object or name of registered handler + :param prefix: identifying prefix to prepend to all hashes + :param orig_prefix: prefix to strip (defaults to ''). + :param lazy: if True and wrapped handler is specified by name, don't look it up until needed. + """ + + def __init__(self, name, wrapped, prefix=u(''), orig_prefix=u(''), lazy=False, + doc=None, ident=None): + self.name = name + if isinstance(prefix, bytes): + prefix = prefix.decode("ascii") + self.prefix = prefix + if isinstance(orig_prefix, bytes): + orig_prefix = orig_prefix.decode("ascii") + self.orig_prefix = orig_prefix + if doc: + self.__doc__ = doc + if hasattr(wrapped, "name"): + self._check_handler(wrapped) + self._wrapped_handler = wrapped + else: + self._wrapped_name = wrapped + if not lazy: + self._get_wrapped() + + if ident is not None: + if ident is True: + # signal that prefix is identifiable in itself. + if prefix: + ident = prefix + else: + raise ValueError("no prefix specified") + if isinstance(ident, bytes): + ident = ident.decode("ascii") + # XXX: what if ident includes parts of wrapped hash's ident? + if ident[:len(prefix)] != prefix[:len(ident)]: + raise ValueError("ident must agree with prefix") + self._ident = ident + + _wrapped_name = None + _wrapped_handler = None + + def _check_handler(self, handler): + if 'ident' in handler.setting_kwds and self.orig_prefix: + # TODO: look into way to fix the issues. + warn("PrefixWrapper: 'orig_prefix' option may not work correctly " + "for handlers which have multiple identifiers: %r" % + (handler.name,), exc.PasslibRuntimeWarning) + + def _get_wrapped(self): + handler = self._wrapped_handler + if handler is None: + handler = get_crypt_handler(self._wrapped_name) + self._check_handler(handler) + self._wrapped_handler = handler + return handler + + wrapped = property(_get_wrapped) + + _ident = False + + @property + def ident(self): + value = self._ident + if value is False: + value = None + # XXX: how will this interact with orig_prefix ? + # not exposing attrs for now if orig_prefix is set. + if not self.orig_prefix: + wrapped = self.wrapped + ident = getattr(wrapped, "ident", None) + if ident is not None: + value = self._wrap_hash(ident) + self._ident = value + return value + + _ident_values = False + + @property + def ident_values(self): + value = self._ident_values + if value is False: + value = None + # XXX: how will this interact with orig_prefix ? + # not exposing attrs for now if orig_prefix is set. + if not self.orig_prefix: + wrapped = self.wrapped + idents = getattr(wrapped, "ident_values", None) + if idents: + value = [ self._wrap_hash(ident) for ident in idents ] + ##else: + ## ident = self.ident + ## if ident is not None: + ## value = [ident] + self._ident_values = value + return value + + # attrs that should be proxied + _proxy_attrs = ( + "setting_kwds", "context_kwds", + "default_rounds", "min_rounds", "max_rounds", "rounds_cost", + "default_salt_size", "min_salt_size", "max_salt_size", + "salt_chars", "default_salt_chars", + "backends", "has_backend", "get_backend", "set_backend", + ) + + def __repr__(self): + args = [ repr(self._wrapped_name or self._wrapped_handler) ] + if self.prefix: + args.append("prefix=%r" % self.prefix) + if self.orig_prefix: + args.append("orig_prefix=%r" % self.orig_prefix) + args = ", ".join(args) + return 'PrefixWrapper(%r, %s)' % (self.name, args) + + def __dir__(self): + attrs = set(dir(self.__class__)) + attrs.update(self.__dict__) + wrapped = self.wrapped + attrs.update( + attr for attr in self._proxy_attrs + if hasattr(wrapped, attr) + ) + return list(attrs) + + def __getattr__(self, attr): + "proxy most attributes from wrapped class (e.g. rounds, salt size, etc)" + if attr in self._proxy_attrs: + return getattr(self.wrapped, attr) + raise AttributeError("missing attribute: %r" % (attr,)) + + def _unwrap_hash(self, hash): + "given hash belonging to wrapper, return orig version" + # NOTE: assumes hash has been validated as unicode already + prefix = self.prefix + if not hash.startswith(prefix): + raise exc.InvalidHashError(self) + # NOTE: always passing to handler as unicode, to save reconversion + return self.orig_prefix + hash[len(prefix):] + + def _wrap_hash(self, hash): + "given orig hash; return one belonging to wrapper" + # NOTE: should usually be native string. + # (which does mean extra work under py2, but not py3) + if isinstance(hash, bytes): + hash = hash.decode("ascii") + orig_prefix = self.orig_prefix + if not hash.startswith(orig_prefix): + raise exc.InvalidHashError(self.wrapped) + wrapped = self.prefix + hash[len(orig_prefix):] + return uascii_to_str(wrapped) + + def identify(self, hash): + hash = to_unicode_for_identify(hash) + if not hash.startswith(self.prefix): + return False + hash = self._unwrap_hash(hash) + return self.wrapped.identify(hash) + + def genconfig(self, **kwds): + config = self.wrapped.genconfig(**kwds) + if config is None: + return None + else: + return self._wrap_hash(config) + + def genhash(self, secret, config, **kwds): + if config is not None: + config = to_unicode(config, "ascii", "config/hash") + config = self._unwrap_hash(config) + return self._wrap_hash(self.wrapped.genhash(secret, config, **kwds)) + + def encrypt(self, secret, **kwds): + return self._wrap_hash(self.wrapped.encrypt(secret, **kwds)) + + def verify(self, secret, hash, **kwds): + hash = to_unicode(hash, "ascii", "hash") + hash = self._unwrap_hash(hash) + return self.wrapped.verify(secret, hash, **kwds) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/md4.py b/passlib/utils/md4.py new file mode 100644 index 00000000..cdc14939 --- /dev/null +++ b/passlib/utils/md4.py @@ -0,0 +1,266 @@ +""" +helper implementing insecure and obsolete md4 algorithm. +used for NTHASH format, which is also insecure and broken, +since it's just md4(password) + +implementated based on rfc at http://www.faqs.org/rfcs/rfc1320.html + +""" + +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +import struct +from warnings import warn +# site +from passlib.utils.compat import b, bytes, bascii_to_str, irange, PY3 +# local +__all__ = [ "md4" ] +#============================================================================= +# utils +#============================================================================= +def F(x,y,z): + return (x&y) | ((~x) & z) + +def G(x,y,z): + return (x&y) | (x&z) | (y&z) + +##def H(x,y,z): +## return x ^ y ^ z + +MASK_32 = 2**32-1 + +#============================================================================= +# main class +#============================================================================= +class md4(object): + """pep-247 compatible implementation of MD4 hash algorithm + + .. attribute:: digest_size + + size of md4 digest in bytes (16 bytes) + + .. method:: update + + update digest by appending additional content + + .. method:: copy + + create clone of digest object, including current state + + .. method:: digest + + return bytes representing md4 digest of current content + + .. method:: hexdigest + + return hexdecimal version of digest + """ + # FIXME: make this follow hash object PEP better. + # FIXME: this isn't threadsafe + # XXX: should we monkeypatch ourselves into hashlib for general use? probably wouldn't be nice. + + name = "md4" + digest_size = digestsize = 16 + + _count = 0 # number of 64-byte blocks processed so far (not including _buf) + _state = None # list of [a,b,c,d] 32 bit ints used as internal register + _buf = None # data processed in 64 byte blocks, this holds leftover from last update + + def __init__(self, content=None): + self._count = 0 + self._state = [0x67452301, 0xefcdab89, 0x98badcfe, 0x10325476] + self._buf = b('') + if content: + self.update(content) + + # round 1 table - [abcd k s] + _round1 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 1,7], + [2,3,0,1, 2,11], + [1,2,3,0, 3,19], + + [0,1,2,3, 4,3], + [3,0,1,2, 5,7], + [2,3,0,1, 6,11], + [1,2,3,0, 7,19], + + [0,1,2,3, 8,3], + [3,0,1,2, 9,7], + [2,3,0,1, 10,11], + [1,2,3,0, 11,19], + + [0,1,2,3, 12,3], + [3,0,1,2, 13,7], + [2,3,0,1, 14,11], + [1,2,3,0, 15,19], + ] + + # round 2 table - [abcd k s] + _round2 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 4,5], + [2,3,0,1, 8,9], + [1,2,3,0, 12,13], + + [0,1,2,3, 1,3], + [3,0,1,2, 5,5], + [2,3,0,1, 9,9], + [1,2,3,0, 13,13], + + [0,1,2,3, 2,3], + [3,0,1,2, 6,5], + [2,3,0,1, 10,9], + [1,2,3,0, 14,13], + + [0,1,2,3, 3,3], + [3,0,1,2, 7,5], + [2,3,0,1, 11,9], + [1,2,3,0, 15,13], + ] + + # round 3 table - [abcd k s] + _round3 = [ + [0,1,2,3, 0,3], + [3,0,1,2, 8,9], + [2,3,0,1, 4,11], + [1,2,3,0, 12,15], + + [0,1,2,3, 2,3], + [3,0,1,2, 10,9], + [2,3,0,1, 6,11], + [1,2,3,0, 14,15], + + [0,1,2,3, 1,3], + [3,0,1,2, 9,9], + [2,3,0,1, 5,11], + [1,2,3,0, 13,15], + + [0,1,2,3, 3,3], + [3,0,1,2, 11,9], + [2,3,0,1, 7,11], + [1,2,3,0, 15,15], + ] + + def _process(self, block): + "process 64 byte block" + # unpack block into 16 32-bit ints + X = struct.unpack("<16I", block) + + # clone state + orig = self._state + state = list(orig) + + # round 1 - F function - (x&y)|(~x & z) + for a,b,c,d,k,s in self._round1: + t = (state[a] + F(state[b],state[c],state[d]) + X[k]) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 2 - G function + for a,b,c,d,k,s in self._round2: + t = (state[a] + G(state[b],state[c],state[d]) + X[k] + 0x5a827999) & MASK_32 + state[a] = ((t<>(32-s)) + + # round 3 - H function - x ^ y ^ z + for a,b,c,d,k,s in self._round3: + t = (state[a] + (state[b] ^ state[c] ^ state[d]) + X[k] + 0x6ed9eba1) & MASK_32 + state[a] = ((t<>(32-s)) + + # add back into original state + for i in irange(4): + orig[i] = (orig[i]+state[i]) & MASK_32 + + def update(self, content): + if not isinstance(content, bytes): + raise TypeError("expected bytes") + buf = self._buf + if buf: + content = buf + content + idx = 0 + end = len(content) + while True: + next = idx + 64 + if next <= end: + self._process(content[idx:next]) + self._count += 1 + idx = next + else: + self._buf = content[idx:] + return + + def copy(self): + other = _builtin_md4() + other._count = self._count + other._state = list(self._state) + other._buf = self._buf + return other + + def digest(self): + # NOTE: backing up state so we can restore it after _process is called, + # in case object is updated again (this is only attr altered by this method) + orig = list(self._state) + + # final block: buf + 0x80, + # then 0x00 padding until congruent w/ 56 mod 64 bytes + # then last 8 bytes = msg length in bits + buf = self._buf + msglen = self._count*512 + len(buf)*8 + block = buf + b('\x80') + b('\x00') * ((119-len(buf)) % 64) + \ + struct.pack("<2I", msglen & MASK_32, (msglen>>32) & MASK_32) + if len(block) == 128: + self._process(block[:64]) + self._process(block[64:]) + else: + assert len(block) == 64 + self._process(block) + + # render digest & restore un-finalized state + out = struct.pack("<4I", *self._state) + self._state = orig + return out + + def hexdigest(self): + return bascii_to_str(hexlify(self.digest())) + + #=================================================================== + # eoc + #=================================================================== + +# keep ref around for unittest, 'md4' usually replaced by ssl wrapper, below. +_builtin_md4 = md4 + +#============================================================================= +# check if hashlib provides accelarated md4 +#============================================================================= +import hashlib +from passlib.utils import PYPY + +def _has_native_md4(): # pragma: no cover -- runtime detection + try: + h = hashlib.new("md4") + except ValueError: + # not supported - ssl probably missing (e.g. ironpython) + return False + result = h.hexdigest() + if result == '31d6cfe0d16ae931b73c59d7e0c089c0': + return True + if PYPY and result == '': + # workaround for https://bugs.pypy.org/issue957, fixed in PyPy 1.8 + return False + # anything else and we should alert user + from passlib.exc import PasslibRuntimeWarning + warn("native md4 support disabled, sanity check failed!", PasslibRuntimeWarning) + return False + +if _has_native_md4(): + # overwrite md4 class w/ hashlib wrapper + def md4(content=None): + "wrapper for hashlib.new('md4')" + return hashlib.new('md4', content or b('')) + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/utils/pbkdf2.py b/passlib/utils/pbkdf2.py new file mode 100644 index 00000000..1cd0d8f1 --- /dev/null +++ b/passlib/utils/pbkdf2.py @@ -0,0 +1,415 @@ +"""passlib.pbkdf2 - PBKDF2 support + +this module is getting increasingly poorly named. +maybe rename to "kdf" since it's getting more key derivation functions added. +""" +#============================================================================= +# imports +#============================================================================= +# core +import hashlib +import logging; log = logging.getLogger(__name__) +import re +from struct import pack +from warnings import warn +# site +try: + from M2Crypto import EVP as _EVP +except ImportError: + _EVP = None +# pkg +from passlib.exc import PasslibRuntimeWarning, ExpectedTypeError +from passlib.utils import join_bytes, to_native_str, bytes_to_int, int_to_bytes, join_byte_values +from passlib.utils.compat import b, bytes, BytesIO, irange, callable, int_types +# local +__all__ = [ + "get_prf", + "pbkdf1", + "pbkdf2", +] + +#============================================================================= +# hash helpers +#============================================================================= + +# known hash names +_nhn_formats = dict(hashlib=0, iana=1) +_nhn_hash_names = [ + # (hashlib/ssl name, iana name or standin, ... other known aliases) + + # hashes with official IANA-assigned names + # (as of 2012-03 - http://www.iana.org/assignments/hash-function-text-names) + ("md2", "md2"), + ("md5", "md5"), + ("sha1", "sha-1"), + ("sha224", "sha-224", "sha2-224"), + ("sha256", "sha-256", "sha2-256"), + ("sha384", "sha-384", "sha2-384"), + ("sha512", "sha-512", "sha2-512"), + + # hashlib/ssl-supported hashes without official IANA names, + # hopefully compatible stand-ins have been chosen. + ("md4", "md4"), + ("sha", "sha-0", "sha0"), + ("ripemd", "ripemd"), + ("ripemd160", "ripemd-160"), +] + +# cache for norm_hash_name() +_nhn_cache = {} + +def norm_hash_name(name, format="hashlib"): + """Normalize hash function name + + :arg name: + Original hash function name. + + This name can be a Python :mod:`~hashlib` digest name, + a SCRAM mechanism name, IANA assigned hash name, etc. + Case is ignored, and underscores are converted to hyphens. + + :param format: + Naming convention to normalize to. + Possible values are: + + * ``"hashlib"`` (the default) - normalizes name to be compatible + with Python's :mod:`!hashlib`. + + * ``"iana"`` - normalizes name to IANA-assigned hash function name. + for hashes which IANA hasn't assigned a name for, issues a warning, + and then uses a heuristic to give a "best guess". + + :returns: + Hash name, returned as native :class:`!str`. + """ + # check cache + try: + idx = _nhn_formats[format] + except KeyError: + raise ValueError("unknown format: %r" % (format,)) + try: + return _nhn_cache[name][idx] + except KeyError: + pass + orig = name + + # normalize input + if not isinstance(name, str): + name = to_native_str(name, 'utf-8', 'hash name') + name = re.sub("[_ /]", "-", name.strip().lower()) + if name.startswith("scram-"): + name = name[6:] + if name.endswith("-plus"): + name = name[:-5] + + # look through standard names and known aliases + def check_table(name): + for row in _nhn_hash_names: + if name in row: + _nhn_cache[orig] = row + return row[idx] + result = check_table(name) + if result: + return result + + # try to clean name up, and recheck table + m = re.match("^(?P[a-z]+)-?(?P\d)?-?(?P\d{3,4})?$", name) + if m: + name, rev, size = m.group("name", "rev", "size") + if rev: + name += rev + if size: + name += "-" + size + result = check_table(name) + if result: + return result + + # else we've done what we can + warn("norm_hash_name(): unknown hash: %r" % (orig,), PasslibRuntimeWarning) + name2 = name.replace("-", "") + row = _nhn_cache[orig] = (name2, name) + return row[idx] + +# TODO: get_hash() func which wraps norm_hash_name(), hashlib., and hashlib.new + +#============================================================================= +# general prf lookup +#============================================================================= +_BNULL = b('\x00') +_XY_DIGEST = b(',\x1cb\xe0H\xa5\x82M\xfb>\xd6\x98\xef\x8e\xf9oQ\x85\xa3i') + +_trans_5C = join_byte_values((x ^ 0x5C) for x in irange(256)) +_trans_36 = join_byte_values((x ^ 0x36) for x in irange(256)) + +def _get_hmac_prf(digest): + "helper to return HMAC prf for specific digest" + def tag_wrapper(prf): + prf.__name__ = "hmac_" + digest + prf.__doc__ = ("hmac_%s(key, msg) -> digest;" + " generated by passlib.utils.pbkdf2.get_prf()" % + digest) + + if _EVP and digest == "sha1": + # use m2crypto function directly for sha1, since that's it's default digest + try: + result = _EVP.hmac(b('x'),b('y')) + except ValueError: # pragma: no cover + pass + else: + if result == _XY_DIGEST: + return _EVP.hmac, 20 + # don't expect to ever get here, but will fall back to pure-python if we do. + warn("M2Crypto.EVP.HMAC() returned unexpected result " # pragma: no cover -- sanity check + "during Passlib self-test!", PasslibRuntimeWarning) + elif _EVP: + # use m2crypto if it's present and supports requested digest + try: + result = _EVP.hmac(b('x'), b('y'), digest) + except ValueError: + pass + else: + # it does. so use M2Crypto's hmac & digest code + hmac_const = _EVP.hmac + def prf(key, msg): + return hmac_const(key, msg, digest) + digest_size = len(result) + tag_wrapper(prf) + return prf, digest_size + + # fall back to hashlib-based implementation + digest_const = getattr(hashlib, digest, None) + if not digest_const: + raise ValueError("unknown hash algorithm: %r" % (digest,)) + tmp = digest_const() + block_size = tmp.block_size + assert block_size >= 16, "unacceptably low block size" + digest_size = tmp.digest_size + del tmp + def prf(key, msg): + # simplified version of stdlib's hmac module + if len(key) > block_size: + key = digest_const(key).digest() + key += _BNULL * (block_size - len(key)) + tmp = digest_const(key.translate(_trans_36) + msg).digest() + return digest_const(key.translate(_trans_5C) + tmp).digest() + tag_wrapper(prf) + return prf, digest_size + +# cache mapping prf name/func -> (func, digest_size) +_prf_cache = {} + +def _clear_prf_cache(): + "helper for unit tests" + _prf_cache.clear() + +def get_prf(name): + """lookup pseudo-random family (prf) by name. + + :arg name: + this must be the name of a recognized prf. + currently this only recognizes names with the format + :samp:`hmac-{digest}`, where :samp:`{digest}` + is the name of a hash function such as + ``md5``, ``sha256``, etc. + + this can also be a callable with the signature + ``prf(secret, message) -> digest``, + in which case it will be returned unchanged. + + :raises ValueError: if the name is not known + :raises TypeError: if the name is not a callable or string + + :returns: + a tuple of :samp:`({func}, {digest_size})`. + + * :samp:`{func}` is a function implementing + the specified prf, and has the signature + ``func(secret, message) -> digest``. + + * :samp:`{digest_size}` is an integer indicating + the number of bytes the function returns. + + usage example:: + + >>> from passlib.utils.pbkdf2 import get_prf + >>> hmac_sha256, dsize = get_prf("hmac-sha256") + >>> hmac_sha256 + + >>> dsize + 32 + >>> digest = hmac_sha256('password', 'message') + + this function will attempt to return the fastest implementation + it can find; if M2Crypto is present, and supports the specified prf, + :func:`M2Crypto.EVP.hmac` will be used behind the scenes. + """ + global _prf_cache + if name in _prf_cache: + return _prf_cache[name] + if isinstance(name, str): + if name.startswith("hmac-") or name.startswith("hmac_"): + retval = _get_hmac_prf(name[5:]) + else: + raise ValueError("unknown prf algorithm: %r" % (name,)) + elif callable(name): + # assume it's a callable, use it directly + digest_size = len(name(b('x'),b('y'))) + retval = (name, digest_size) + else: + raise ExpectedTypeError(name, "str or callable", "prf name") + _prf_cache[name] = retval + return retval + +#============================================================================= +# pbkdf1 support +#============================================================================= +def pbkdf1(secret, salt, rounds, keylen=None, hash="sha1"): + """pkcs#5 password-based key derivation v1.5 + + :arg secret: passphrase to use to generate key + :arg salt: salt string to use when generating key + :param rounds: number of rounds to use to generate key + :arg keylen: number of bytes to generate (if ``None``, uses digest's native size) + :param hash: + hash function to use. must be name of a hash recognized by hashlib. + + :returns: + raw bytes of generated key + + .. note:: + + This algorithm has been deprecated, new code should use PBKDF2. + Among other limitations, ``keylen`` cannot be larger + than the digest size of the specified hash. + + """ + # validate secret & salt + if not isinstance(secret, bytes): + raise ExpectedTypeError(secret, "bytes", "secret") + if not isinstance(salt, bytes): + raise ExpectedTypeError(salt, "bytes", "salt") + + # validate rounds + if not isinstance(rounds, int_types): + raise ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # resolve hash + try: + hash_const = getattr(hashlib, hash) + except AttributeError: + # check for ssl hash + # NOTE: if hash unknown, new() will throw ValueError, which we'd just + # reraise anyways; so instead of checking, we just let it get + # thrown during first use, below + # TODO: use builtin md4 class if hashlib doesn't have it. + def hash_const(msg): + return hashlib.new(hash, msg) + + # prime pbkdf1 loop, get block size + block = hash_const(secret + salt).digest() + + # validate keylen + if keylen is None: + keylen = len(block) + elif not isinstance(keylen, int_types): + raise ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 0: + raise ValueError("keylen must be at least 0") + elif keylen > len(block): + raise ValueError("keylength too large for digest: %r > %r" % + (keylen, len(block))) + + # main pbkdf1 loop + for _ in irange(rounds-1): + block = hash_const(block).digest() + return block[:keylen] + +#============================================================================= +# pbkdf2 +#============================================================================= +MAX_BLOCKS = 0xffffffff # 2**32-1 +MAX_HMAC_SHA1_KEYLEN = MAX_BLOCKS*20 +# NOTE: the pbkdf2 spec does not specify a maximum number of rounds. +# however, many of the hashes in passlib are currently clamped +# at the 32-bit limit, just for sanity. once realistic pbkdf2 rounds +# start approaching 24 bits, this limit will be raised. + +def pbkdf2(secret, salt, rounds, keylen=None, prf="hmac-sha1"): + """pkcs#5 password-based key derivation v2.0 + + :arg secret: passphrase to use to generate key + :arg salt: salt string to use when generating key + :param rounds: number of rounds to use to generate key + :arg keylen: + number of bytes to generate. + if set to ``None``, will use digest size of selected prf. + :param prf: + psuedo-random family to use for key strengthening. + this can be any string or callable accepted by :func:`get_prf`. + this defaults to ``"hmac-sha1"`` (the only prf explicitly listed in + the PBKDF2 specification) + + :returns: + raw bytes of generated key + """ + # validate secret & salt + if not isinstance(secret, bytes): + raise ExpectedTypeError(secret, "bytes", "secret") + if not isinstance(salt, bytes): + raise ExpectedTypeError(salt, "bytes", "salt") + + # validate rounds + if not isinstance(rounds, int_types): + raise ExpectedTypeError(rounds, "int", "rounds") + if rounds < 1: + raise ValueError("rounds must be at least 1") + + # validate keylen + if keylen is not None: + if not isinstance(keylen, int_types): + raise ExpectedTypeError(keylen, "int or None", "keylen") + elif keylen < 0: + raise ValueError("keylen must be at least 0") + + # special case for m2crypto + hmac-sha1 + if prf == "hmac-sha1" and _EVP: + if keylen is None: + keylen = 20 + # NOTE: doing check here, because M2crypto won't take 'long' instances + # (which this is when running under 32bit) + if keylen > MAX_HMAC_SHA1_KEYLEN: + raise ValueError("key length too long for digest") + + # NOTE: as of 2012-4-4, m2crypto has buffer overflow issue + # which may cause segfaults if keylen > 32 (EVP_MAX_KEY_LENGTH). + # therefore we're avoiding m2crypto for large keys until that's fixed. + # see https://bugzilla.osafoundation.org/show_bug.cgi?id=13052 + if keylen < 32: + return _EVP.pbkdf2(secret, salt, rounds, keylen) + + # resolve prf + prf_func, digest_size = get_prf(prf) + if keylen is None: + keylen = digest_size + + # figure out how many blocks we'll need + block_count = (keylen+digest_size-1)//digest_size + if block_count >= MAX_BLOCKS: + raise ValueError("key length too long for digest") + + # build up result from blocks + def gen(): + for i in irange(block_count): + digest = prf_func(secret, salt + pack(">L", i+1)) + accum = bytes_to_int(digest) + for _ in irange(rounds-1): + digest = prf_func(secret, digest) + accum ^= bytes_to_int(digest) + yield int_to_bytes(accum, digest_size) + return join_bytes(gen())[:keylen] + +#============================================================================= +# eof +#============================================================================= diff --git a/passlib/win32.py b/passlib/win32.py new file mode 100644 index 00000000..78155976 --- /dev/null +++ b/passlib/win32.py @@ -0,0 +1,68 @@ +"""passlib.win32 - MS Windows support - DEPRECATED, WILL BE REMOVED IN 1.8 + +the LMHASH and NTHASH algorithms are used in various windows related contexts, +but generally not in a manner compatible with how passlib is structured. + +in particular, they have no identifying marks, both being +32 bytes of binary data. thus, they can't be easily identified +in a context with other hashes, so a CryptHandler hasn't been defined for them. + +this module provided two functions to aid in any use-cases which exist. + +.. warning:: + + these functions should not be used for new code unless an existing + system requires them, they are both known broken, + and are beyond insecure on their own. + +.. autofunction:: raw_lmhash +.. autofunction:: raw_nthash + +See also :mod:`passlib.hash.nthash`. +""" + +from warnings import warn +warn("the 'passlib.win32' module is deprecated, and will be removed in " + "passlib 1.8; please use the 'passlib.hash.nthash' and " + "'passlib.hash.lmhash' classes instead.", + DeprecationWarning) + +#============================================================================= +# imports +#============================================================================= +# core +from binascii import hexlify +# site +# pkg +from passlib.utils.compat import b, unicode +from passlib.utils.des import des_encrypt_block +from passlib.hash import nthash +# local +__all__ = [ + "nthash", + "raw_lmhash", + "raw_nthash", +] +#============================================================================= +# helpers +#============================================================================= +LM_MAGIC = b("KGS!@#$%") + +raw_nthash = nthash.raw_nthash + +def raw_lmhash(secret, encoding="ascii", hex=False): + "encode password using des-based LMHASH algorithm; returns string of raw bytes, or unicode hex" + # NOTE: various references say LMHASH uses the OEM codepage of the host + # for it's encoding. until a clear reference is found, + # as well as a path for getting the encoding, + # letting this default to "ascii" to prevent incorrect hashes + # from being made w/o user explicitly choosing an encoding. + if isinstance(secret, unicode): + secret = secret.encode(encoding) + ns = secret.upper()[:14] + b("\x00") * (14-len(secret)) + out = des_encrypt_block(ns[:7], LM_MAGIC) + des_encrypt_block(ns[7:], LM_MAGIC) + return hexlify(out).decode("ascii") if hex else out + +#============================================================================= +# eoc +#=============================================================================