Refactor token and map parsing

The aim of this change is to make it possible to verify nested EATs.
This requires finer grade control over how the token structure is
parsed, as CBOR envelopes can now be present inside the tree.

So this change makes the parsing the token and the map a recursive
operation, calling the necessary methods of the objects at each level.

Change-Id: I4c1e29deae7b238f2d82a73bd95c533f89492d40
Signed-off-by: Mate Toth-Pal <mate.toth-pal@arm.com>
diff --git a/iat-verifier/dev_scripts/generate-sample-iat.py b/iat-verifier/dev_scripts/generate-sample-iat.py
index 7350575..29bfa47 100755
--- a/iat-verifier/dev_scripts/generate-sample-iat.py
+++ b/iat-verifier/dev_scripts/generate-sample-iat.py
@@ -6,20 +6,24 @@
 #
 # -----------------------------------------------------------------------------
 
-import base64
+"""
+Generate a sample token, signing it with the specified key, and writing
+the output to the specified file.
+
+This script is deprecated - use ``compile_token`` (see above) instead.
+"""
 import struct
 
-import cbor2
-from ecdsa import SigningKey
-from pycose.sign1message import Sign1Message
-
-from iatverifier.util import sign_eat
-
-from iatverifier.psa_iot_profile1_token_claims import InstanceIdClaim, ImplementationIdClaim, ChallengeClaim
-from iatverifier.psa_iot_profile1_token_claims import ClientIdClaim, SecurityLifecycleClaim, ProfileIdClaim
-from iatverifier.psa_iot_profile1_token_claims import BootSeedClaim, SWComponentsClaim, SWComponentTypeClaim
-from iatverifier.psa_iot_profile1_token_claims import SignerIdClaim, SwComponentVersionClaim
-from iatverifier.psa_iot_profile1_token_claims import MeasurementValueClaim, MeasurementDescriptionClaim
+from iatverifier.util import convert_map_to_token, read_keyfile
+from iatverifier.attest_token_verifier import AttestationTokenVerifier
+from iatverifier.psa_iot_profile1_token_claims import InstanceIdClaim, ImplementationIdClaim
+from iatverifier.psa_iot_profile1_token_claims import ChallengeClaim, ClientIdClaim
+from iatverifier.psa_iot_profile1_token_claims import SecurityLifecycleClaim, ProfileIdClaim
+from iatverifier.psa_iot_profile1_token_claims import BootSeedClaim, SWComponentsClaim
+from iatverifier.psa_iot_profile1_token_claims import SWComponentTypeClaim, SignerIdClaim
+from iatverifier.psa_iot_profile1_token_claims import SwComponentVersionClaim
+from iatverifier.psa_iot_profile1_token_claims import MeasurementValueClaim
+from iatverifier.psa_iot_profile1_token_claims import MeasurementDescriptionClaim
 from iatverifier.psa_iot_profile1_token_verifier import PSAIoTProfile1TokenVerifier
 
 # First byte indicates "GUID"
@@ -81,15 +85,17 @@
 if __name__ == '__main__':
     import sys
     if len(sys.argv) != 3:
-        print('Usage: {} KEYFILE OUTFILE'.format(sys.argv[0]))
+        print(f'Usage: {sys.argv[0]} KEYFILE OUTFILE')
         sys.exit(1)
     keyfile = sys.argv[1]
     outfile = sys.argv[2]
 
-    sk = SigningKey.from_pem(open(keyfile, 'rb').read())
-    token = cbor2.dumps(token_map)
-    verifier = PSAIoTProfile1TokenVerifier.get_verifier()
-    signed_token = sign_eat(token, verifier, add_p_header=False, key=sk)
-
+    key = read_keyfile(keyfile,
+                       method=AttestationTokenVerifier.SIGN_METHOD_SIGN1)
+    verifier = PSAIoTProfile1TokenVerifier(signing_key=key,
+                                           method=AttestationTokenVerifier.SIGN_METHOD_SIGN1,
+                                           cose_alg=AttestationTokenVerifier.COSE_ALG_ES256,
+                                           configuration=None)
     with open(outfile, 'wb') as wfh:
-        wfh.write(signed_token)
+        convert_map_to_token(token_map, verifier, wfh, add_p_header=False,
+            name_as_key=False, parse_raw_value=False)
diff --git a/iat-verifier/iatverifier/attest_token_verifier.py b/iat-verifier/iatverifier/attest_token_verifier.py
index 2ee2bba..bd93441 100644
--- a/iat-verifier/iatverifier/attest_token_verifier.py
+++ b/iat-verifier/iatverifier/attest_token_verifier.py
@@ -5,13 +5,29 @@
 #
 # -----------------------------------------------------------------------------
 
+"""
+Class definitions to use as base for claim and verifier classes.
+"""
+
+
 import logging
 from abc import ABC, abstractmethod
+from dataclasses import dataclass
+from io import BytesIO
+
+from pycose.attributes import CoseAttrs
+from pycose.sign1message import Sign1Message
+from pycose.mac0message import Mac0Message
 
 import cbor2
+from cbor2 import CBOREncoder
 
 logger = logging.getLogger('iat-verifiers')
 
+_CBOR_MAJOR_TYPE_ARRAY = 4
+_CBOR_MAJOR_TYPE_MAP = 5
+_CBOR_MAJOR_TYPE_SEMANTIC_TAG = 6
+
 class AttestationClaim(ABC):
     """
     This class represents a claim.
@@ -33,13 +49,16 @@
     RECOMMENDED = 1
     OPTIONAL = 2
 
-    def __init__(self, verifier, *, necessity=MANDATORY):
+    def __init__(self, *, verifier, necessity=MANDATORY):
         self.config = verifier.config
         self.verifier = verifier
         self.necessity = necessity
         self.verify_count = 0
+        self.cross_claim_requirement_checker = None
 
+    #
     # Abstract methods
+    #
 
     @abstractmethod
     def verify(self, value):
@@ -64,42 +83,91 @@
         calling this method with or without an instance as well."""
         raise NotImplementedError
 
+    #
     # Default methods that a derived class might override
-
-    def get_contained_claim_key_list(self):
-        """Return a dictionary of the claims that can be present in this claim
-
-        Return a dictionary where keys are the claim keys (the same that is
-        returned by get_claim_key), and the values are the claim classes for
-        that key.
-        """
-        return {}
+    #
 
     def decode(self, value):
         """
         Decode the value of the claim if the value is an UTF-8 string
         """
-        if self.is_utf_8():
+        if type(self).is_utf_8():
             try:
                 return value.decode()
-            except UnicodeDecodeError as e:
+            except UnicodeDecodeError as exc:
                 msg = 'Error decodeing value for "{}": {}'
-                self.verifier.error(msg.format(self.get_claim_name(), e))
+                self.verifier.error(msg.format(self.get_claim_name(), exc))
                 return str(value)[2:-1]
         else:  # not a UTF-8 value, i.e. a bytestring
             return value
 
-    def add_value_to_dict(self, token, value):
-        """Add 'value' to the dict 'token'"""
-        entry_name = self.get_claim_name()
-        if isinstance(value, bytes):
-            value = self.decode(value)
-        token[entry_name] = value
-
     def claim_found(self):
         """Return true if verify was called on tis claim instance"""
         return self.verify_count>0
 
+    @classmethod
+    def is_utf_8(cls):
+        """Returns whether the value of this claim should be UTF-8"""
+        return False
+
+    def convert_map_to_token(self,
+                             token_encoder,
+                             token_map,
+                             *, add_p_header,
+                             name_as_key,
+                             parse_raw_value):
+        """Encode a map in cbor format using the 'token_encoder'"""
+        # pylint: disable=unused-argument
+        value = token_map
+        if parse_raw_value:
+            value = type(self).parse_raw(value)
+        return token_encoder.encode(value)
+
+    def parse_token(self, *, token, verify, check_p_header, lower_case_key):
+        """Parse a token into a map
+
+        This function is recursive for composite claims and for token verifiers.
+        A big difference is that the parameter token should be a map for claim
+        objects, and a 'bytes' object for verifiers. The entry point to this
+        function is calling the parse_token function of a verifier.
+
+        From some aspects it would be cleaner to have different functions for
+        this in verifiers and claims, but that would require to do a type check
+        in every recursive step to see which method to call. So instead the
+        method name is the same, and the 'token' parameter is interpreted
+        differently."""
+        # pylint: disable=unused-argument
+        if verify:
+            self.verify(token)
+
+        formatted = type(self).get_formatted_value(token)
+
+        # If the formatted value is still a bytestring then try to decode
+        if isinstance(formatted, bytes):
+            formatted = self.decode(formatted)
+        return formatted
+
+    @classmethod
+    def parse_raw(cls, raw_value):
+        """Parse a raw value
+
+        Takes a string, as it appears in a yaml file, and converts it to a
+        numeric value according to the claim's definition.
+        """
+        return raw_value
+
+    @classmethod
+    def get_formatted_value(cls, value):
+        """Format the value according to this claim"""
+        if cls.is_utf_8():
+            # this is an UTF-8 value, force string type
+            return f'{value}'
+        return value
+
+    #
+    # Helper functions to be called from derived classes
+    #
+
     def _check_type(self, name, value, expected_type):
         """Check that a value's type is as expected"""
         if not isinstance(value, expected_type):
@@ -109,7 +177,7 @@
         return True
 
     def _validate_bytestring_length_equals(self, value, name, expected_len):
-        """Check that a bytestreams length is as expected"""
+        """Check that a bytestring length is as expected"""
         self._check_type(name, value, bytes)
 
         value_len = len(value)
@@ -117,8 +185,26 @@
             msg = 'Invalid {} length: must be exactly {} bytes, found {} bytes'
             self.verifier.error(msg.format(name, expected_len, value_len))
 
+    def _validate_bytestring_length_one_of(self, value, name, possible_lens):
+        """Check that a bytestring length is as expected"""
+        self._check_type(name, value, bytes)
+
+        value_len = len(value)
+        if value_len not in possible_lens:
+            msg = 'Invalid {} length: must be one of {} bytes, found {} bytes'
+            self.verifier.error(msg.format(name, possible_lens, value_len))
+
+    def _validate_bytestring_length_between(self, value, name, min_len, max_len):
+        """Check that a bytestring length is as expected"""
+        self._check_type(name, value, bytes)
+
+        value_len = len(value)
+        if value_len < min_len or value_len > max_len:
+            msg = 'Invalid {} length: must be between {} and {} bytes, found {} bytes'
+            self.verifier.error(msg.format(name, min_len, max_len, value_len))
+
     def _validate_bytestring_length_is_at_least(self, value, name, minimal_length):
-        """Check that a bytestream has a minimum length"""
+        """Check that a bytestring has a minimum length"""
         self._check_type(name, value, bytes)
 
         value_len = len(value)
@@ -126,28 +212,11 @@
             msg = 'Invalid {} length: must be at least {} bytes, found {} bytes'
             self.verifier.error(msg.format(name, minimal_length, value_len))
 
-    @staticmethod
-    def parse_raw(raw_value):
-        """Parse a raw value
-
-        As it appears in a yaml file
-        """
-        return raw_value
-
-    @staticmethod
-    def get_formatted_value(value):
-        """Format the value according to this claim"""
-        return value
-
-    def is_utf_8(self):
-        """Returns whether the value of this claim should be UTF-8"""
-        return False
-
-    def check_cross_claim_requirements(self):
-        """Check whether the claims inside this claim satisfy requirements"""
-
 
 class NonVerifiedClaim(AttestationClaim):
+    """An abstract claim type for which verify() always passes.
+
+    Can be used for claims for which no verification is implemented."""
     def verify(self, value):
         self.verify_count += 1
 
@@ -161,7 +230,12 @@
     claim.
     """
 
-    def __init__(self, verifier, *, claims, is_list, necessity=AttestationClaim.MANDATORY):
+    def __init__(self,
+                 *, verifier,
+                 claims,
+                 is_list,
+                 cross_claim_requirement_checker,
+                 necessity=AttestationClaim.MANDATORY):
         """ Initialise a composite claim.
 
         In case 'is_list' is False, the expected type of value is a dictionary,
@@ -170,106 +244,186 @@
         containing a number of dictionaries, each one containing the necessary
         claims determined by the 'claims' list.
         """
-        super().__init__(verifier, necessity=necessity)
+        super().__init__(verifier=verifier, necessity=necessity)
         self.is_list = is_list
         self.claims = claims
+        self.cross_claim_requirement_checker = cross_claim_requirement_checker
 
     def _get_contained_claims(self):
-        return [claim(self.verifier, **args) for claim, args in self.claims]
+        claims = []
+        for claim, args in self.claims:
+            try:
+                claims.append(claim(**args))
+            except TypeError as exc:
+                raise TypeError(f"Failed to instantiate '{claim}' with args '{args}' in token " +
+                                f"{type(self.verifier)}\nSee error in exception above.") from exc
+        return claims
 
-    def get_contained_claim_key_list(self):
+
+    def verify(self, value):
+        self.verify_count += 1
+
+    def _parse_token_dict(self, *, entry_number, token, verify, check_p_header, lower_case_key):
         ret = {}
-        for claim in self._get_contained_claims():
-            ret[claim.get_claim_key()] = claim.__class__
-        return ret
 
-    def _verify_dict(self, entry_number, value):
-        if not self._check_type(self.get_claim_name(), value, dict):
-            return
+        if verify:
+            self.verify(token)
+            if not self._check_type(self.get_claim_name(), token, dict):
+                return None
+        else:
+            if not isinstance(token, dict):
+                return token
 
-        claims = {v.get_claim_key(): v for v in self._get_contained_claims()}
-        for k, v in value.items():
-            if k not in claims.keys():
-                if self.config.strict:
+        claims = {val.get_claim_key(): val for val in self._get_contained_claims()}
+        for key, val in token.items():
+            if key not in claims.keys():
+                if verify and self.config.strict:
                     msg = 'Unexpected {} claim: {}'
-                    self.verifier.error(msg.format(self.get_claim_name(), k))
+                    self.verifier.error(msg.format(self.get_claim_name(), key))
                 else:
                     continue
             try:
-                claims[k].verify(v)
+                claim = claims[key]
+                name = claim.get_claim_name()
+                if lower_case_key:
+                    name = name.lower()
+                ret[name] = claim.parse_token(
+                    token=val,
+                    verify=verify,
+                    check_p_header=check_p_header,
+                    lower_case_key=lower_case_key)
             except Exception:
                 if not self.config.keep_going:
                     raise
 
-        # Check claims' necessity
+        if verify:
+            self._check_claims_necessity(entry_number, claims)
+            if self.cross_claim_requirement_checker is not None:
+                self.cross_claim_requirement_checker(self.verifier, claims)
+
+        return ret
+
+    def _check_claims_necessity(self, entry_number, claims):
         for claim in claims.values():
             if not claim.claim_found():
                 if claim.necessity==AttestationClaim.MANDATORY:
-                    msg = ('Invalid IAT: missing MANDATORY claim "{}" '
-                        'from {}').format(claim.get_claim_name(),
-                                    self.get_claim_name())
+                    msg = (f'Invalid IAT: missing MANDATORY claim "{claim.get_claim_name()}" '
+                        f'from {self.get_claim_name()}')
                     if entry_number is not None:
-                        msg += ' at index {}'.format(entry_number)
+                        msg += f' at index {entry_number}'
                     self.verifier.error(msg)
                 elif claim.necessity==AttestationClaim.RECOMMENDED:
-                    msg = ('Missing RECOMMENDED claim "{}" '
-                        'from {}').format(claim.get_claim_name(),
-                                    self.get_claim_name())
+                    msg = (f'Missing RECOMMENDED claim "{claim.get_claim_name()}" '
+                        f'from {self.get_claim_name()}')
                     if entry_number is not None:
-                        msg += ' at index {}'.format(entry_number)
+                        msg += f' at index {entry_number}'
                     self.verifier.warning(msg)
 
-    def verify(self, value):
-        """
-        Verify a composite claim.
-        """
+    def parse_token(self, *, token, verify, check_p_header, lower_case_key):
+        """This expects a raw token map as 'token'"""
+
         if self.is_list:
-            if not self._check_type(self.get_claim_name(), value, list):
-                return
+            ret = []
+            if verify:
+                if not self._check_type(self.get_claim_name(), token, list):
+                    return None
+            else:
+                if not isinstance(token, list):
+                    return token
+            for entry_number, entry in enumerate(token):
+                ret.append(self._parse_token_dict(
+                    entry_number=entry_number,
+                    check_p_header=check_p_header,
+                    token=entry,
+                    verify=verify,
+                    lower_case_key=lower_case_key))
+            return ret
+        return self._parse_token_dict(
+            entry_number=None,
+            check_p_header=check_p_header,
+            token=token,
+            verify=verify,
+            lower_case_key=lower_case_key)
 
-            for entry_number, entry in enumerate(value):
-                self._verify_dict(entry_number, entry)
+
+    def _encode_dict(self, token_encoder, token_map, *, add_p_header, name_as_key, parse_raw_value):
+        token_encoder.encode_length(_CBOR_MAJOR_TYPE_MAP, len(token_map))
+        if name_as_key:
+            claims = {claim.get_claim_name().lower():
+                claim for claim in self._get_contained_claims()}
         else:
-            self._verify_dict(None, value)
-
-        self.verify_count += 1
-
-    def _decode_dict(self, raw_dict):
-        decoded_dict = {}
-        names = {claim.get_claim_key(): claim.get_claim_name() for claim in self._get_contained_claims()}
-        for k, v in raw_dict.items():
-            if isinstance(v, bytes):
-                v = self.decode(v)
+            claims = {claim.get_claim_key(): claim for claim in self._get_contained_claims()}
+        for key, val in token_map.items():
             try:
-                decoded_dict[names[k]] = v
+                claim = claims[key]
+                key = claim.get_claim_key()
+                token_encoder.encode(key)
+                claim.convert_map_to_token(
+                    token_encoder,
+                    val,
+                    add_p_header=add_p_header,
+                    name_as_key=name_as_key,
+                    parse_raw_value=parse_raw_value)
             except KeyError:
                 if self.config.strict:
                     if not self.config.keep_going:
                         raise
                 else:
-                    decoded_dict[k] = v
-        return decoded_dict
+                    token_encoder.encode(key)
+                    token_encoder.encode(val)
 
-    def add_value_to_dict(self, token, value):
-        entry_name = self.get_claim_name()
-        try:
-            token[entry_name] = []
-            for raw_dict in value:
-                decoded_dict = self._decode_dict(raw_dict)
-                token[entry_name].append(decoded_dict)
-        except TypeError:
-            self.verifier.error('Invalid {} value: {}'.format(self.get_claim_name(), value))
+    def convert_map_to_token(
+            self,
+            token_encoder,
+            token_map,
+            *, add_p_header,
+            name_as_key,
+            parse_raw_value):
+        if self.is_list:
+            token_encoder.encode_length(_CBOR_MAJOR_TYPE_ARRAY, len(token_map))
+            for item in token_map:
+                self._encode_dict(
+                    token_encoder,
+                    item,
+                    add_p_header=add_p_header,
+                    name_as_key=name_as_key,
+                    parse_raw_value=parse_raw_value)
+        else:
+            self._encode_dict(
+                token_encoder,
+                token_map,
+                add_p_header=add_p_header,
+                name_as_key=name_as_key,
+                parse_raw_value=parse_raw_value)
 
 
-
+@dataclass
 class VerifierConfiguration:
-    def __init__(self, keep_going=False, strict=False):
-        self.keep_going=keep_going
-        self.strict=strict
+    """A class storing the configuration of the verifier.
 
-class AttestationTokenVerifier:
+    At the moment this determines what should happen if a problem is found
+    during verification.
+    """
+    keep_going: bool = False
+    strict: bool = False
 
-    all_known_claims = {}
+class AttestTokenRootClaims(CompositeAttestClaim):
+    """A claim type that is used to represent the claims in a token.
+
+    It is instantiated by AttestationTokenVerifier, and shouldn't be used
+    outside this module."""
+    def get_claim_key(self=None):
+        return None
+
+    def get_claim_name(self=None):
+        return None
+
+# This class inherits from NonVerifiedClaim. The actual claims in the token are
+# checked by the AttestTokenRootClaims object owned by this verifier. The
+# verify() function of the AttestTokenRootClaims object is called during
+# traversing the claim tree.
+class AttestationTokenVerifier(NonVerifiedClaim):
+    """Abstract base class for attestation token verifiers"""
 
     SIGN_METHOD_SIGN1 = "sign"
     SIGN_METHOD_MAC0 = "mac"
@@ -283,78 +437,221 @@
     COSE_ALG_HS384="HS384"
     COSE_ALG_HS512="HS512"
 
-    def __init__(self, method, cose_alg, configuration=None):
-        self.method = method
-        self.cose_alg = cose_alg
-        self.config = configuration if configuration is not None else VerifierConfiguration()
-        self.claims = []
+    @abstractmethod
+    def _get_p_header(self):
+        """Return the protected header for this Token
 
-        self.seen_errors = False
+        Return a dictionary if p_header should be present, and None if the token
+        doesn't defines a protected header.
+        """
+        raise NotImplementedError
 
-    def add_claims(self, claims):
-        for claim in claims:
-            key = claim.get_claim_key()
-            if key not in AttestationTokenVerifier.all_known_claims:
-                AttestationTokenVerifier.all_known_claims[key] = claim.__class__
-
-            AttestationTokenVerifier.all_known_claims.update(claim.get_contained_claim_key_list())
-        self.claims.extend(claims)
-
-    def check_cross_claim_requirements(self):
-        pass
-
-    def decode_and_validate_iat(self, encoded_iat):
-        try:
-            raw_token = cbor2.loads(encoded_iat)
-        except Exception as e:
-            msg = 'Invalid CBOR: {}'
-            raise ValueError(msg.format(e))
-
-        claims = {v.get_claim_key(): v for v in self.claims}
-
-        token = {}
-        while not hasattr(raw_token, 'items'):
-            # TODO: token map is not a map. We are assuming that it is a tag
-            raw_token = raw_token.value
-        for entry in raw_token.keys():
-            value = raw_token[entry]
-
-            try:
-                claim = claims[entry]
-            except KeyError:
-                if self.config.strict:
-                    self.error('Invalid IAT claim: {}'.format(entry))
-                token[entry] = value
-                continue
-
-            claim.verify(value)
-            claim.add_value_to_dict(token, value)
-
-        # Check claims' necessity
-        for claim in claims.values():
-            if not claim.claim_found():
-                if claim.necessity==AttestationClaim.MANDATORY:
-                    msg = 'Invalid IAT: missing MANDATORY claim "{}"'
-                    self.error(msg.format(claim.get_claim_name()))
-                elif claim.necessity==AttestationClaim.RECOMMENDED:
-                    msg = 'Missing RECOMMENDED claim "{}"'
-                    self.warning(msg.format(claim.get_claim_name()))
-
-            claim.check_cross_claim_requirements()
-
-        self.check_cross_claim_requirements()
-
-        return token
-
-
-    def get_wrapping_tag(self=None):
+    @abstractmethod
+    def _get_wrapping_tag(self):
         """The value of the tag that the token is wrapped in.
 
         The function should return None if the token is not wrapped.
         """
         return None
 
+    @abstractmethod
+    def _parse_p_header(self, msg):
+        """Throw exception in case of error"""
+
+    @staticmethod
+    @abstractmethod
+    def check_cross_claim_requirements(verifier, claims):
+        """Throw exception in case of error"""
+
+    def _get_cose_alg(self):
+        return self.cose_alg
+
+    def _get_method(self):
+        return self.method
+
+    def _get_signing_key(self):
+        return self.signing_key
+
+    def __init__(
+            self,
+            *, method,
+            cose_alg,
+            signing_key,
+            claims,
+            configuration=None,
+            necessity=AttestationClaim.MANDATORY):
+        self.method = method
+        self.cose_alg = cose_alg
+        self.signing_key=signing_key
+        self.config = configuration if configuration is not None else VerifierConfiguration()
+        self.seen_errors = False
+        self.claims = AttestTokenRootClaims(
+                verifier=self,
+                claims=claims,
+                is_list=False,
+                cross_claim_requirement_checker=type(self).check_cross_claim_requirements,
+                necessity=necessity)
+
+        super().__init__(verifier=self, necessity=necessity)
+
+    def _sign_token(self, token, add_p_header):
+        """Signs a token"""
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_RAW:
+            return token
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_SIGN1:
+            return self._sign_eat(token, add_p_header)
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_MAC0:
+            return self._hmac_eat(token, add_p_header)
+        err_msg = 'Unexpected method "{}"; must be one of: raw, sign, mac'
+        raise ValueError(err_msg.format(self.method))
+
+    def _sign_eat(self, token, add_p_header):
+        protected_header = CoseAttrs()
+        p_header=self._get_p_header()
+        key=self._get_signing_key()
+        if add_p_header and p_header is not None and key:
+            protected_header.update(p_header)
+        signed_msg = Sign1Message(p_header=protected_header)
+        signed_msg.payload = token
+        if key:
+            signed_msg.key = key
+            signed_msg.signature = signed_msg.compute_signature(alg=self._get_cose_alg())
+        return signed_msg.encode()
+
+
+    def _hmac_eat(self, token, add_p_header):
+        protected_header = CoseAttrs()
+        p_header=self._get_p_header()
+        key=self._get_signing_key()
+        if add_p_header and p_header is not None and key:
+            protected_header.update(p_header)
+        hmac_msg = Mac0Message(payload=token, key=key, p_header=protected_header)
+        hmac_msg.compute_auth_tag(alg=self.cose_alg)
+        return hmac_msg.encode()
+
+
+    def _get_cose_sign1_payload(self, cose, *, check_p_header, verify_signature):
+        msg = Sign1Message.decode(cose)
+        if verify_signature:
+            key = self._get_signing_key()
+            if check_p_header:
+                self._parse_p_header(msg)
+            msg.key = key
+            msg.signature = msg.signers
+            try:
+                msg.verify_signature(alg=self._get_cose_alg())
+            except Exception as exc:
+                raise ValueError(f'Bad signature ({exc})') from exc
+        return msg.payload
+
+
+    def _get_cose_mac0_payload(self, cose, *, check_p_header, verify_signature):
+        msg = Mac0Message.decode(cose)
+        if verify_signature:
+            key = self._get_signing_key()
+            if check_p_header:
+                self._parse_p_header(msg)
+            msg.key = key
+            try:
+                msg.verify_auth_tag(alg=self._get_cose_alg())
+            except Exception as exc:
+                raise ValueError(f'Bad signature ({exc})') from exc
+        return msg.payload
+
+
+    def _get_cose_payload(self, cose, *, check_p_header, verify_signature):
+        """Return the payload of a COSE envelope"""
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_SIGN1:
+            return self._get_cose_sign1_payload(
+                cose,
+                check_p_header=check_p_header,
+                verify_signature=verify_signature)
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_MAC0:
+            return self._get_cose_mac0_payload(
+                cose,
+                check_p_header=check_p_header,
+                verify_signature=verify_signature)
+        err_msg = f'Unexpected method "{self._get_method()}"; must be one of: sign, mac'
+        raise ValueError(err_msg)
+
+
+    def convert_map_to_token(
+            self,
+            token_encoder,
+            token_map,
+            *, add_p_header,
+            name_as_key,
+            parse_raw_value,
+            root=False):
+        with BytesIO() as b_io:
+            # Create a new encoder instance
+            encoder = CBOREncoder(b_io)
+
+            # Add tag if necessary
+            wrapping_tag = self._get_wrapping_tag()
+            if wrapping_tag is not None:
+                # TODO: this doesn't saves the string references used up to the
+                # point that this tag is added (see encode_semantic(...) in cbor2's
+                # encoder.py). This is not a problem as far the tokens don't use
+                # string references (which is the case for now).
+                encoder.encode_length(_CBOR_MAJOR_TYPE_SEMANTIC_TAG, wrapping_tag)
+
+            # Encode the token payload
+            self.claims.convert_map_to_token(
+                encoder,
+                token_map,
+                add_p_header=add_p_header,
+                name_as_key=name_as_key,
+                parse_raw_value=parse_raw_value)
+
+            token = b_io.getvalue()
+
+            # Sign and pack in a COSE envelope if necessary
+            signed_token = self._sign_token(token, add_p_header=add_p_header)
+
+            # Pack as a bstr if necessary
+            if root:
+                token_encoder.write(signed_token)
+            else:
+                token_encoder.encode_bytestring(signed_token)
+
+    def parse_token(self, *, token, verify, check_p_header, lower_case_key):
+        if self._get_method() == AttestationTokenVerifier.SIGN_METHOD_RAW:
+            payload = token
+        else:
+            try:
+                payload = self._get_cose_payload(
+                    token,
+                    check_p_header=check_p_header,
+                    verify_signature=(verify and self._get_signing_key() is not None))
+            except Exception as exc:
+                msg = f'Bad COSE: {exc}'
+                raise ValueError(msg) from exc
+
+        try:
+            raw_map = cbor2.loads(payload)
+        except Exception as exc:
+            msg = f'Invalid CBOR: {exc}'
+            raise ValueError(msg) from exc
+
+        wrapping_tag = self._get_wrapping_tag()
+        if wrapping_tag is not None:
+            if verify and wrapping_tag != raw_map.tag:
+                msg = 'Invalid token: token is wrapped in tag {} instead of {}'
+                raise ValueError(msg.format(raw_map.tag, wrapping_tag))
+            raw_map = raw_map.value
+
+        if verify:
+            self.verify(token)
+
+        return self.claims.parse_token(
+            token=raw_map,
+            check_p_header=check_p_header,
+            verify=verify,
+            lower_case_key=lower_case_key)
+
     def error(self, message):
+        """Act on an error depending on the configuration of this verifier"""
         self.seen_errors = True
         if self.config.keep_going:
             logger.error(message)
@@ -362,4 +659,5 @@
             raise ValueError(message)
 
     def warning(self, message):
+        """Print a warning with the logger of this verifier"""
         logger.warning(message)
diff --git a/iat-verifier/iatverifier/psa_iot_profile1_token_claims.py b/iat-verifier/iatverifier/psa_iot_profile1_token_claims.py
index d8a0828..99e29a1 100644
--- a/iat-verifier/iatverifier/psa_iot_profile1_token_claims.py
+++ b/iat-verifier/iatverifier/psa_iot_profile1_token_claims.py
@@ -5,6 +5,10 @@
 #
 # -----------------------------------------------------------------------------
 
+"""
+This module contains classes that represent claims for PSA IoT Profile1 Attestation token.
+"""
+
 import string
 
 from iatverifier.attest_token_verifier import AttestationClaim, NonVerifiedClaim
@@ -17,8 +21,9 @@
 SW_COMPONENT_RANGE = 0
 
 class InstanceIdClaim(AttestationClaim):
-    def __init__(self, verifier, *, expected_len, necessity=AttestationClaim.MANDATORY):
-        super().__init__(verifier, necessity=necessity)
+    """Class representing a PSA Attestation Token Instance ID claim"""
+    def __init__(self, *, verifier, expected_len, necessity=AttestationClaim.MANDATORY):
+        super().__init__(verifier=verifier, necessity=necessity)
         self.expected_len = expected_len
 
     def get_claim_key(self=None):
@@ -36,7 +41,7 @@
 
 
 class ChallengeClaim(AttestationClaim):
-
+    """Class representing a PSA Attestation Token Challenge claim"""
     HASH_SIZES = [32, 48, 64]
 
     def get_claim_key(self=None):
@@ -56,6 +61,7 @@
 
 
 class ImplementationIdClaim(NonVerifiedClaim):
+    """Class representing a PSA Attestation Token Implementation ID claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 3
 
@@ -64,6 +70,7 @@
 
 
 class HardwareVersionClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Hardware version claim"""
     def verify(self, value):
         self._check_type('HARDWARE_VERSION', value, str)
 
@@ -85,12 +92,13 @@
     def get_claim_name(self=None):
         return 'HARDWARE_VERSION'
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
 
 
 class SWComponentsClaim(CompositeAttestClaim):
-
+    """Class representing a PSA Attestation Token Software Components claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 6
 
@@ -98,17 +106,20 @@
         return 'SW_COMPONENTS'
 
 class SWComponentTypeClaim(NonVerifiedClaim):
+    """Class representing a PSA Attestation Token Software Component Measurement Type claim"""
     def get_claim_key(self=None):
         return SW_COMPONENT_RANGE + 1
 
     def get_claim_name(self=None):
         return 'SW_COMPONENT_TYPE'
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
 
 
 class NoMeasurementsClaim(NonVerifiedClaim):
+    """Class representing a PSA Attestation Token No Software Measurements claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 7
 
@@ -117,6 +128,7 @@
 
 
 class ClientIdClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Client ID claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 1
 
@@ -128,7 +140,7 @@
         self.verify_count += 1
 
 class SecurityLifecycleClaim(AttestationClaim):
-
+    """Class representing a PSA Attestation Token Security Lifecycle claim"""
     SL_SHIFT = 12
 
     SL_NAMES = [
@@ -158,25 +170,18 @@
         self._check_type('SECURITY_LIFECYCLE', value, int)
         self.verify_count += 1
 
-    def add_value_to_dict(self, token, value):
-        entry_name = self.get_claim_name()
-        try:
-            name_idx = (value >> SecurityLifecycleClaim.SL_SHIFT) - 1
-            token[entry_name] = SecurityLifecycleClaim.SL_NAMES[name_idx]
-        except IndexError:
-            token[entry_name] = 'CUSTOM({})'.format(value)
+    @classmethod
+    def parse_raw(cls, raw_value):
+        name_idx = cls.SL_NAMES.index(raw_value.upper())
+        return (name_idx + 1) << cls.SL_SHIFT
 
-    @staticmethod
-    def parse_raw(raw_value):
-        name_idx = SecurityLifecycleClaim.SL_NAMES.index(raw_value.upper())
-        return (name_idx + 1) << SecurityLifecycleClaim.SL_SHIFT
-
-    @staticmethod
-    def get_formatted_value(value):
-        return SecurityLifecycleClaim.SL_NAMES[(value >> SecurityLifecycleClaim.SL_SHIFT) - 1]
+    @classmethod
+    def get_formatted_value(cls, value):
+        return cls.SL_NAMES[(value >> cls.SL_SHIFT) - 1]
 
 
 class ProfileIdClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Profile Definition claim"""
     def get_claim_key(self=None):
         return ARM_RANGE
 
@@ -187,11 +192,13 @@
         self._check_type('PROFILE_ID', value, str)
         self.verify_count += 1
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
 
 
 class BootSeedClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Boot Seed claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 4
 
@@ -204,17 +211,20 @@
 
 
 class VerificationServiceClaim(NonVerifiedClaim):
+    """Class representing a PSA Attestation Token Verification Service Indicator claim"""
     def get_claim_key(self=None):
         return ARM_RANGE - 10 # originator
 
     def get_claim_name(self=None):
         return 'VERIFICATION_SERVICE'
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
 
 
 class SignerIdClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Software Component Signer ID claim"""
     def get_claim_key(self=None):
         return SW_COMPONENT_RANGE + 5
 
@@ -227,17 +237,20 @@
 
 
 class SwComponentVersionClaim(NonVerifiedClaim):
+    """Class representing a PSA Attestation Token Software Component Version claim"""
     def get_claim_key(self=None):
         return SW_COMPONENT_RANGE + 4
 
     def get_claim_name(self=None):
         return 'SW_COMPONENT_VERSION'
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
 
 
 class MeasurementValueClaim(AttestationClaim):
+    """Class representing a PSA Attestation Token Software Component Measurement value claim"""
     def get_claim_key(self=None):
         return SW_COMPONENT_RANGE + 2
 
@@ -250,11 +263,13 @@
 
 
 class MeasurementDescriptionClaim(NonVerifiedClaim):
+    """Class representing PSA Attestation Token Software Component Measurement description claim"""
     def get_claim_key(self=None):
         return SW_COMPONENT_RANGE + 6
 
     def get_claim_name(self=None):
         return 'MEASUREMENT_DESCRIPTION'
 
-    def is_utf_8(self):
+    @classmethod
+    def is_utf_8(cls):
         return True
diff --git a/iat-verifier/iatverifier/psa_iot_profile1_token_verifier.py b/iat-verifier/iatverifier/psa_iot_profile1_token_verifier.py
index e5700e1..569a924 100644
--- a/iat-verifier/iatverifier/psa_iot_profile1_token_verifier.py
+++ b/iat-verifier/iatverifier/psa_iot_profile1_token_verifier.py
@@ -5,49 +5,86 @@
 #
 # -----------------------------------------------------------------------------
 
+"""Contains class for verifying PSA Attestation Token profile PSA_IOT_PROFILE_1"""
+
 from iatverifier.attest_token_verifier import AttestationTokenVerifier as Verifier
 from iatverifier.attest_token_verifier import AttestationClaim as Claim
-from iatverifier.psa_iot_profile1_token_claims import ProfileIdClaim, ClientIdClaim, SecurityLifecycleClaim
-from iatverifier.psa_iot_profile1_token_claims import ImplementationIdClaim, BootSeedClaim, HardwareVersionClaim
+from iatverifier.psa_iot_profile1_token_claims import ProfileIdClaim, ClientIdClaim
+from iatverifier.psa_iot_profile1_token_claims import SecurityLifecycleClaim, ImplementationIdClaim
+from iatverifier.psa_iot_profile1_token_claims import BootSeedClaim, HardwareVersionClaim
 from iatverifier.psa_iot_profile1_token_claims import NoMeasurementsClaim, ChallengeClaim
-from iatverifier.psa_iot_profile1_token_claims import InstanceIdClaim, VerificationServiceClaim, SWComponentsClaim
-from iatverifier.psa_iot_profile1_token_claims import SWComponentTypeClaim, SwComponentVersionClaim
-from iatverifier.psa_iot_profile1_token_claims import MeasurementValueClaim, MeasurementDescriptionClaim, SignerIdClaim
+from iatverifier.psa_iot_profile1_token_claims import InstanceIdClaim, VerificationServiceClaim
+from iatverifier.psa_iot_profile1_token_claims import SWComponentsClaim, SWComponentTypeClaim
+from iatverifier.psa_iot_profile1_token_claims import SwComponentVersionClaim, MeasurementValueClaim
+from iatverifier.psa_iot_profile1_token_claims import MeasurementDescriptionClaim, SignerIdClaim
 
 class PSAIoTProfile1TokenVerifier(Verifier):
-    @staticmethod
-    def get_verifier(configuration=None):
-        verifier = PSAIoTProfile1TokenVerifier(
-            method=Verifier.SIGN_METHOD_SIGN1,
-            cose_alg=Verifier.COSE_ALG_ES256,
-            configuration=configuration)
+    """Verifier class for PSA Attestation Token profile PSA_IOT_PROFILE_1"""
 
+    def get_claim_key(self=None):
+        return 0xb5a101bc  #TODO: some made up claim. Change claim indexing to use name
+                           #      and this should return None
+
+    def get_claim_name(self=None):
+        return 'PSA_IOT_PROFILE1_TOKEN'
+
+    def _get_p_header(self):
+        return {'alg': self._get_cose_alg()}
+
+    def _get_wrapping_tag(self):
+        return None
+
+    def _parse_p_header(self, msg):
+        alg = self._get_cose_alg()
+        try:
+            msg_alg = msg.protected_header['alg']
+        except KeyError as exc:
+            raise ValueError(f'Missing algorithm from protected header (expected {alg})') from exc
+        if alg != msg_alg:
+            raise ValueError(f'Unexpected algorithm in protected header (expected {alg} ' +
+                f'instead of {msg_alg})')
+
+    def __init__(self, *, method, cose_alg, signing_key, configuration):
+
+        # First prepare the claim hierarchy for this token
         sw_component_claims = [
-            (SWComponentTypeClaim, {'necessity':Claim.OPTIONAL}),
-            (SwComponentVersionClaim, {'necessity':Claim.OPTIONAL}),
-            (MeasurementValueClaim, {'necessity':Claim.MANDATORY}),
-            (MeasurementDescriptionClaim, {'necessity':Claim.OPTIONAL}),
-            (SignerIdClaim, {'necessity':Claim.RECOMMENDED}),
+            (SWComponentTypeClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (SwComponentVersionClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (MeasurementValueClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (MeasurementDescriptionClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (SignerIdClaim, {'verifier': self, 'necessity': Claim.RECOMMENDED}),
         ]
 
-        verifier.add_claims([
-            ProfileIdClaim(verifier, necessity=Claim.OPTIONAL),
-            ClientIdClaim(verifier, necessity=Claim.MANDATORY),
-            SecurityLifecycleClaim(verifier, necessity=Claim.MANDATORY),
-            ImplementationIdClaim(verifier, necessity=Claim.MANDATORY),
-            BootSeedClaim(verifier, necessity=Claim.MANDATORY),
-            HardwareVersionClaim(verifier, necessity=Claim.OPTIONAL),
-            SWComponentsClaim(verifier, claims=sw_component_claims, is_list=True, necessity=Claim.OPTIONAL),
-            NoMeasurementsClaim(verifier, necessity=Claim.OPTIONAL),
-            ChallengeClaim(verifier, necessity=Claim.MANDATORY),
-            InstanceIdClaim(verifier, expected_len=33, necessity=Claim.MANDATORY),
-            VerificationServiceClaim(verifier, necessity=Claim.OPTIONAL),
-        ])
-        return verifier
+        verifier_claims = [
+            (ProfileIdClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (ClientIdClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (SecurityLifecycleClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (ImplementationIdClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (BootSeedClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (HardwareVersionClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (SWComponentsClaim, {
+                'verifier': self,
+                'claims': sw_component_claims,
+                'is_list': True,
+                'cross_claim_requirement_checker': None,
+                'necessity': Claim.OPTIONAL}),
+            (NoMeasurementsClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+            (ChallengeClaim, {'verifier': self, 'necessity': Claim.MANDATORY}),
+            (InstanceIdClaim, {'verifier': self, 'expected_len': 33, 'necessity': Claim.MANDATORY}),
+            (VerificationServiceClaim, {'verifier': self, 'necessity': Claim.OPTIONAL}),
+        ]
 
-    def check_cross_claim_requirements(self):
-        claims = {v.get_claim_key(): v for v in self.claims}
+        # initialise the base part of the token
+        super().__init__(
+            claims=verifier_claims,
+            configuration=configuration,
+            necessity=Claim.MANDATORY,
+            method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key)
 
+    @staticmethod
+    def check_cross_claim_requirements(verifier, claims):
         if SWComponentsClaim.get_claim_key() in claims:
             sw_component_present = claims[SWComponentsClaim.get_claim_key()].verify_count > 0
         else:
@@ -59,5 +96,5 @@
             no_measurement_present = False
 
         if not sw_component_present and not no_measurement_present:
-            self.error('Invalid IAT: no software measurements defined and '
+            verifier.error('Invalid IAT: no software measurements defined and '
                   'NO_MEASUREMENTS claim is not present.')
diff --git a/iat-verifier/iatverifier/util.py b/iat-verifier/iatverifier/util.py
index 39af2e3..12f16b8 100644
--- a/iat-verifier/iatverifier/util.py
+++ b/iat-verifier/iatverifier/util.py
@@ -5,155 +5,67 @@
 #
 # -----------------------------------------------------------------------------
 
+"""Helper utilities for CLI tools and tests"""
+
 from collections.abc import Iterable
 from copy import deepcopy
 import logging
 
 import base64
-import cbor2
 import yaml
 from ecdsa import SigningKey, VerifyingKey
-from pycose.attributes import CoseAttrs
-from pycose.sign1message import Sign1Message
-from pycose.mac0message import Mac0Message
 from iatverifier.attest_token_verifier import AttestationTokenVerifier
-from cbor2 import CBORTag
+from cbor2 import CBOREncoder
 
 _logger = logging.getLogger("util")
 
-def sign_eat(token, verifier, *, add_p_header, key=None):
-    protected_header = CoseAttrs()
-    if add_p_header and key:
-        protected_header['alg'] = verifier.cose_alg
-    signed_msg = Sign1Message(p_header=protected_header)
-    signed_msg.payload = token
-    if key:
-        signed_msg.key = key
-        signed_msg.signature = signed_msg.compute_signature(alg=verifier.cose_alg)
-    return signed_msg.encode()
+_known_curves = {
+    "NIST256p": AttestationTokenVerifier.COSE_ALG_ES256,
+    "NIST384p": AttestationTokenVerifier.COSE_ALG_ES384,
+    "NIST521p": AttestationTokenVerifier.COSE_ALG_ES512,
+}
+
+def convert_map_to_token(token_map, verifier, wfh, *, add_p_header, name_as_key, parse_raw_value):
+    """
+    Convert a map to token and write the result to a file.
+    """
+    encoder = CBOREncoder(wfh)
+    verifier.convert_map_to_token(
+        encoder,
+        token_map,
+        add_p_header=add_p_header,
+        name_as_key=name_as_key,
+        parse_raw_value=parse_raw_value,
+        root=True)
 
 
-def hmac_eat(token, verifier, *, add_p_header, key=None):
-    protected_header = CoseAttrs()
-    if add_p_header and key:
-        protected_header['alg'] = verifier.cose_alg
-    hmac_msg = Mac0Message(payload=token, key=key, p_header=protected_header)
-    hmac_msg.compute_auth_tag(alg=verifier.cose_alg)
-    return hmac_msg.encode()
-
-
-def convert_map_to_token_files(mapfile, keyfile, verifier, outfile, add_p_header):
-    token_map = read_token_map(mapfile)
-
-    if verifier.method == 'sign':
-        with open(keyfile) as fh:
-            signing_key = SigningKey.from_pem(fh.read())
+def read_token_map(file):
+    """
+    Read a yaml file and return a map
+    """
+    if hasattr(file, 'read'):
+        raw = yaml.safe_load(file)
     else:
-        with open(keyfile, 'rb') as fh:
-            signing_key = fh.read()
+        with open(file, encoding="utf8") as file_obj:
+            raw = yaml.safe_load(file_obj)
 
-    with open(outfile, 'wb') as wfh:
-        convert_map_to_token(token_map, signing_key, verifier, wfh, add_p_header)
+    return raw
 
 
-def convert_map_to_token(token_map, signing_key, verifier, wfh, add_p_header):
-    wrapping_tag = verifier.get_wrapping_tag()
-    if wrapping_tag is not None:
-        token = cbor2.dumps(CBORTag(wrapping_tag, token_map))
-    else:
-        token = cbor2.dumps(token_map)
+def recursive_bytes_to_strings(token, in_place=False):
+    """
+    Transform the map in 'token' by changing changing bytes to base64 encoded form.
 
-    if verifier.method == AttestationTokenVerifier.SIGN_METHOD_RAW:
-        signed_token = token
-    elif verifier.method == AttestationTokenVerifier.SIGN_METHOD_SIGN1:
-        signed_token = sign_eat(token, verifier, add_p_header=add_p_header, key=signing_key)
-    elif verifier.method == AttestationTokenVerifier.SIGN_METHOD_MAC0:
-        signed_token = hmac_eat(token, verifier, add_p_header=add_p_header, key=signing_key)
-    else:
-        err_msg = 'Unexpected method "{}"; must be one of: raw, sign, mac'
-        raise ValueError(err_msg.format(method))
-
-    wfh.write(signed_token)
-
-
-def convert_token_to_map(raw_data, verifier):
-    payload = get_cose_payload(raw_data, verifier, check_p_header=False)
-    token_map = cbor2.loads(payload)
-    return _relabel_keys(token_map)
-
-
-def read_token_map(f):
-    if hasattr(f, 'read'):
-        raw = yaml.safe_load(f)
-    else:
-        with open(f) as fh:
-            raw = yaml.safe_load(fh)
-
-    return _parse_raw_token(raw)
-
-
-def extract_iat_from_cose(keyfile, tokenfile, verifier, check_p_header):
-    key = read_keyfile(keyfile, verifier.method)
-
-    try:
-        with open(tokenfile, 'rb') as wfh:
-            return get_cose_payload(wfh.read(), verifier, check_p_header=check_p_header, key=key)
-    except Exception as e:
-        msg = 'Bad COSE file "{}": {}'
-        raise ValueError(msg.format(tokenfile, e))
-
-
-def get_cose_payload(cose, verifier, *, check_p_header, key=None):
-    if verifier.method == AttestationTokenVerifier.SIGN_METHOD_SIGN1:
-        return get_cose_sign1_payload(cose, verifier, check_p_header=check_p_header, key=key)
-    if verifier.method == AttestationTokenVerifier.SIGN_METHOD_MAC0:
-        return get_cose_mac0_payload(cose, verifier, check_p_header=check_p_header, key=key)
-    err_msg = 'Unexpected method "{}"; must be one of: sign, mac'
-    raise ValueError(err_msg.format(verifier.method))
-
-def parse_protected_header(msg, alg):
-    try:
-        msg_alg = msg.protected_header['alg']
-    except KeyError:
-        raise ValueError('Missing alg from protected header (expected {})'.format(alg))
-    if alg != msg_alg:
-        raise ValueError('Unexpected alg in protected header (expected {} instead of {})'.format(alg, msg_alg))
-
-def get_cose_sign1_payload(cose, verifier, *, check_p_header, key=None):
-    msg = Sign1Message.decode(cose)
-    if key:
-        if check_p_header:
-            parse_protected_header(msg, verifier.cose_alg)
-        msg.key = key
-        msg.signature = msg.signers
-        try:
-            msg.verify_signature(alg=verifier.cose_alg)
-        except Exception as e:
-            raise ValueError('Bad signature ({})'.format(e))
-    return msg.payload
-
-
-def get_cose_mac0_payload(cose, verifier, *, check_p_header, key=None):
-    msg = Mac0Message.decode(cose)
-    if key:
-        if check_p_header:
-            parse_protected_header(msg, verifier.cose_alg)
-        msg.key = key
-        try:
-            msg.verify_auth_tag(alg=verifier.cose_alg)
-        except Exception as e:
-            raise ValueError('Bad signature ({})'.format(e))
-    return msg.payload
-
-def recursive_bytes_to_strings(d, in_place=False):
+    if 'in_place' is True, 'token' is modified, a new map is returned otherwise.
+    """
     if in_place:
-        result = d
+        result = token
     else:
-        result = deepcopy(d)
+        result = deepcopy(token)
 
     if hasattr(result, 'items'):
-        for k, v in result.items():
-            result[k] = recursive_bytes_to_strings(v, in_place=True)
+        for key, value in result.items():
+            result[key] = recursive_bytes_to_strings(value, in_place=True)
     elif (isinstance(result, Iterable) and
             not isinstance(result, (str, bytes))):
         result = [recursive_bytes_to_strings(r, in_place=True)
@@ -165,105 +77,45 @@
 
 
 def read_keyfile(keyfile, method=AttestationTokenVerifier.SIGN_METHOD_SIGN1):
+    """
+    Read a keyfile and return the key
+    """
     if keyfile:
         if method == AttestationTokenVerifier.SIGN_METHOD_SIGN1:
-            return read_sign1_key(keyfile)
+            return _read_sign1_key(keyfile)
         if method == AttestationTokenVerifier.SIGN_METHOD_MAC0:
-            return read_hmac_key(keyfile)
+            return _read_hmac_key(keyfile)
         err_msg = 'Unexpected method "{}"; must be one of: sign, mac'
         raise ValueError(err_msg.format(method))
 
     return None
 
+def get_cose_alg_from_key(key):
+    """Extract the algorithm from the key if possible
 
-def read_sign1_key(keyfile):
+    Returns the signature algorithm ID defined by COSE
+    """
+    if not hasattr(key, "curve"):
+        raise ValueError("Key has no curve specified in it.")
+    return _known_curves[key.curve.name]
+
+def _read_sign1_key(keyfile):
+    with open(keyfile, 'rb') as file_obj:
+        raw_key = file_obj.read()
     try:
-        key = SigningKey.from_pem(open(keyfile, 'rb').read())
-    except Exception as e:
-        signing_key_error = str(e)
+        key = SigningKey.from_pem(raw_key)
+    except Exception as exc:
+        signing_key_error = str(exc)
 
         try:
-            key = VerifyingKey.from_pem(open(keyfile, 'rb').read())
-        except Exception as e:
-            verifying_key_error = str(e)
+            key = VerifyingKey.from_pem(raw_key)
+        except Exception as vexc:
+            verifying_key_error = str(vexc)
 
             msg = 'Bad key file "{}":\n\tpubkey error: {}\n\tprikey error: {}'
-            raise ValueError(msg.format(keyfile, verifying_key_error, signing_key_error))
+            raise ValueError(msg.format(keyfile, verifying_key_error, signing_key_error)) from vexc
     return key
 
 
-def read_hmac_key(keyfile):
+def _read_hmac_key(keyfile):
     return open(keyfile, 'rb').read()
-
-def _get_known_claims():
-    if logging.DEBUG >= logging.root.level:
-        _logger.debug("Known claims are:")
-        for _, claim_class in AttestationTokenVerifier.all_known_claims.items():
-            _logger.debug(f"    {claim_class.get_claim_key():8} '{claim_class.get_claim_name()}'")
-    for _, claim_class in AttestationTokenVerifier.all_known_claims.items():
-        yield claim_class
-
-def _parse_raw_token(raw):
-    result = {}
-    field_names = {cc.get_claim_name(): cc for cc in _get_known_claims()}
-    for raw_key, raw_value in raw.items():
-        if isinstance(raw_key, int):
-            key = raw_key
-        else:
-            field_name = raw_key.upper()
-            try:
-                claim_class = field_names[field_name]
-                key = claim_class.get_claim_key()
-            except KeyError:
-                msg = 'Unknown field "{}" in token.'.format(field_name)
-                raise ValueError(msg)
-
-        if hasattr(raw_value, 'items'):
-            value = _parse_raw_token(raw_value)
-        elif (isinstance(raw_value, Iterable) and
-                not isinstance(raw_value, (str, bytes))):
-            value = []
-            for v in raw_value:
-                if hasattr(v, 'items'):
-                    value.append(_parse_raw_token(v))
-                else:
-                    value.append(claim_class.parse_raw(v))
-        else:
-            value = claim_class.parse_raw(raw_value)
-
-        result[key] = value
-
-    return result
-
-def _format_value(names, key, value):
-    if key in names:
-        value = names[key].get_formatted_value(value)
-    return value
-
-def _relabel_keys(token_map):
-    result = {}
-    while not hasattr(token_map, 'items'):
-        # TODO: token map is not a map. We are assuming that it is a tag
-        token_map = token_map.value
-    names = {v.get_claim_key(): v for v in _get_known_claims()}
-    for key, value in token_map.items():
-        if hasattr(value, 'items'):
-            value = _relabel_keys(value)
-        elif (isinstance(value, Iterable) and
-                not isinstance(value, (str, bytes))):
-            new_value = []
-            for item in value:
-                if hasattr(item, 'items'):
-                    new_value.append(_relabel_keys(item))
-                else:
-                    new_value.append(_format_value(names, key, item))
-            value = new_value
-        else:
-            value = _format_value(names, key, value)
-
-        if key in names:
-            new_key = names[key].get_claim_name().lower()
-        else:
-            new_key = key
-        result[new_key] = value
-    return result
diff --git a/iat-verifier/scripts/check_iat b/iat-verifier/scripts/check_iat
index c391393..8193b84 100755
--- a/iat-verifier/scripts/check_iat
+++ b/iat-verifier/scripts/check_iat
@@ -5,18 +5,21 @@
 #
 # -----------------------------------------------------------------------------
 
+"""CLI script for verifying an IAT."""
+
 import argparse
 import json
 import logging
 import sys
 
-from iatverifier.util import extract_iat_from_cose, recursive_bytes_to_strings
+from iatverifier.util import recursive_bytes_to_strings, read_keyfile, get_cose_alg_from_key
 from iatverifier.psa_iot_profile1_token_verifier import PSAIoTProfile1TokenVerifier
 from iatverifier.attest_token_verifier import VerifierConfiguration, AttestationTokenVerifier
 
 logger = logging.getLogger('iat-verify')
 
 def main():
+    """Main function for verifying an IAT"""
 
     token_verifiers = {
         "PSA-IoT-Profile1-token": PSAIoTProfile1TokenVerifier,
@@ -67,25 +70,44 @@
     logging.basicConfig(level=logging.INFO)
 
     config = VerifierConfiguration(keep_going=args.keep_going, strict=args.strict)
-    verifier = token_verifiers[args.token_type].get_verifier(config)
     if args.method == 'mac':
-        verifier.method = AttestationTokenVerifier.SIGN_METHOD_MAC0
-        verifier.cose_alg = AttestationTokenVerifier.COSE_ALG_HS256
+        method = AttestationTokenVerifier.SIGN_METHOD_MAC0
+    else:
+        method = AttestationTokenVerifier.SIGN_METHOD_SIGN1
 
-    try:
-        raw_iat = extract_iat_from_cose(args.keyfile, args.tokenfile, verifier, args.check_protected_header)
-        if args.keyfile:
-            print('Signature OK')
-    except ValueError as e:
-        logger.error('Could not extract IAT from COSE:\n\t{}'.format(e))
+    key = read_keyfile(keyfile=args.keyfile, method=method)
+
+    if args.method == 'mac':
+        cose_alg = AttestationTokenVerifier.COSE_ALG_HS256
+    else:
+        if key is not None:
+            cose_alg = get_cose_alg_from_key(key)
+        else:
+            cose_alg = AttestationTokenVerifier.COSE_ALG_ES256
+
+    verifier_class = token_verifiers[args.token_type]
+    if verifier_class == PSAIoTProfile1TokenVerifier:
+        verifier = PSAIoTProfile1TokenVerifier(
+            method=method,
+            cose_alg=cose_alg,
+            signing_key=key,
+            configuration=config)
+    else:
+        logger.error(f'Invalid token type:{verifier_class}\n\t')
         sys.exit(1)
 
     try:
-        token = verifier.decode_and_validate_iat(raw_iat)
-        if not verifier.seen_errors:
-            print('Token format OK')
-    except ValueError as e:
-        logger.error('Could not validate IAT:\n\t{}'.format(e))
+        with open(args.tokenfile, 'rb') as token_file:
+            token = verifier.parse_token(
+                token=token_file.read(),
+                verify=True,
+                check_p_header=args.check_protected_header,
+                lower_case_key=False)
+        if args.keyfile:
+            print('Signature OK')
+        print('Token format OK')
+    except ValueError as exc:
+        logger.error(f'Could not extract IAT from COSE:\n\t{exc}')
         sys.exit(1)
 
     if args.print_iat:
@@ -95,4 +117,4 @@
         print('')
 
 if __name__ == '__main__':
-    main()
\ No newline at end of file
+    main()
diff --git a/iat-verifier/scripts/compile_token b/iat-verifier/scripts/compile_token
index 8fac1fc..7fa9816 100755
--- a/iat-verifier/scripts/compile_token
+++ b/iat-verifier/scripts/compile_token
@@ -6,13 +6,15 @@
 #
 #-------------------------------------------------------------------------------
 
+"""CLI tool for compiling token from a yaml file"""
+
 import argparse
 import logging
 import os
 import sys
 
-from ecdsa import SigningKey
-from iatverifier.util import read_token_map, convert_map_to_token
+from iatverifier.util import read_token_map, convert_map_to_token, read_keyfile
+from iatverifier.util import get_cose_alg_from_key
 from iatverifier.psa_iot_profile1_token_verifier import PSAIoTProfile1TokenVerifier
 from iatverifier.attest_token_verifier import AttestationTokenVerifier
 
@@ -50,36 +52,51 @@
                         required=True)
 
     args = parser.parse_args()
-    signing_key = None
 
-    cose_alg = None
     if args.hmac:
-        method = AttestationTokenVerifier.SIGN_METHOD_MAC0
-        cose_alg = AttestationTokenVerifier.COSE_ALG_HS256
-
-        if args.keyfile:
-            with open(args.keyfile, 'rb') as fh:
-                signing_key = fh.read()
+        METHOD = AttestationTokenVerifier.SIGN_METHOD_MAC0
     elif args.raw:
         if args.keyfile:
             raise ValueError('A keyfile cannot be specified with --raw.')
-        method = AttestationTokenVerifier.SIGN_METHOD_RAW
+        METHOD = AttestationTokenVerifier.SIGN_METHOD_RAW
     else:
-        method = AttestationTokenVerifier.SIGN_METHOD_SIGN1
-        if args.keyfile:
-            with open(args.keyfile) as fh:
-                signing_key = SigningKey.from_pem(fh.read())
+        METHOD = AttestationTokenVerifier.SIGN_METHOD_SIGN1
 
-    verifier = token_verifiers[args.token_type].get_verifier()
-    if verifier.method != method:
-        verifier.method = method
-    if cose_alg is not None and verifier.cose_alg != cose_alg:
-        verifier.cose_alg = cose_alg
+    key = read_keyfile(args.keyfile, METHOD)
+
+    COSE_ALG = None
+    if args.hmac:
+        COSE_ALG = AttestationTokenVerifier.COSE_ALG_HS256
+    elif not args.raw:
+        COSE_ALG = get_cose_alg_from_key(key)
+
+    verifier_class = token_verifiers[args.token_type]
+    if verifier_class == PSAIoTProfile1TokenVerifier:
+        verifier = PSAIoTProfile1TokenVerifier(
+            method=METHOD,
+            cose_alg=COSE_ALG,
+            signing_key=key,
+            configuration=None)
+    else:
+        logging.error(f'Invalid token type:{verifier_class}\n\t')
+        sys.exit(1)
     token_map = read_token_map(args.source)
 
     if args.outfile:
         with open(args.outfile, 'wb') as wfh:
-            convert_map_to_token(token_map, signing_key, verifier, wfh, args.add_protected_header)
+            convert_map_to_token(
+                token_map,
+                verifier,
+                wfh,
+                add_p_header=args.add_protected_header,
+                name_as_key=True,
+                parse_raw_value=True)
     else:
         with os.fdopen(sys.stdout.fileno(), 'wb') as wfh:
-            convert_map_to_token(token_map, signing_key, verifier, wfh, args.add_protected_header)
+            convert_map_to_token(
+                token_map,
+                verifier,
+                wfh,
+                add_p_header=args.add_protected_header,
+                name_as_key=True,
+                parse_raw_value=True)
diff --git a/iat-verifier/scripts/decompile_token b/iat-verifier/scripts/decompile_token
index d61247f..b64fa59 100755
--- a/iat-verifier/scripts/decompile_token
+++ b/iat-verifier/scripts/decompile_token
@@ -6,15 +6,19 @@
 #
 #-------------------------------------------------------------------------------
 
+"""CLI script for decompiling a cbor formatted IAT file"""
+
 import argparse
+import logging
 import sys
 
 import yaml
-from iatverifier.util import convert_token_to_map
 from iatverifier.psa_iot_profile1_token_verifier import PSAIoTProfile1TokenVerifier
+from iatverifier.attest_token_verifier import AttestationTokenVerifier
 
 
 if __name__ == '__main__':
+    logging.basicConfig(level=logging.INFO)
 
     token_verifiers = {
         "PSA-IoT-Profile1-token": PSAIoTProfile1TokenVerifier,
@@ -31,14 +35,24 @@
                         required=True)
     args = parser.parse_args()
 
-    verifier = token_verifiers[args.token_type].get_verifier()
+    verifier_class = token_verifiers[args.token_type]
+    if verifier_class == PSAIoTProfile1TokenVerifier:
+        verifier = PSAIoTProfile1TokenVerifier(
+            method=AttestationTokenVerifier.SIGN_METHOD_SIGN1,
+            cose_alg=AttestationTokenVerifier.COSE_ALG_ES256,
+            signing_key=None, configuration=None)
+    else:
+        logging.error(f'Invalid token type:{verifier_class}\n\t')
+        sys.exit(1)
     with open(args.source, 'rb') as fh:
-        token_map = convert_token_to_map(fh.read(), verifier)
+        token_map = verifier.parse_token(
+            token=fh.read(),
+            verify=False,
+            check_p_header=False,
+            lower_case_key=True)
 
     if args.outfile:
-        with open(args.outfile, 'w') as wfh:
+        with open(args.outfile, 'w', encoding="UTF-8") as wfh:
             yaml.dump(token_map, wfh)
     else:
         yaml.dump(token_map, sys.stdout)
-
-
diff --git a/iat-verifier/tests/test_verifier.py b/iat-verifier/tests/test_verifier.py
index 93c058e..63b41a9 100644
--- a/iat-verifier/tests/test_verifier.py
+++ b/iat-verifier/tests/test_verifier.py
@@ -5,13 +5,15 @@
 #
 # -----------------------------------------------------------------------------
 
+"""Unittests for iat-verifier using PSAIoTProfile1TokenVerifier"""
+
 import os
 import tempfile
 import unittest
 
 from iatverifier.psa_iot_profile1_token_verifier import PSAIoTProfile1TokenVerifier
-from iatverifier.util import convert_map_to_token_files, extract_iat_from_cose
-from iatverifier.attest_token_verifier import VerifierConfiguration
+from iatverifier.util import read_token_map, convert_map_to_token, read_keyfile
+from iatverifier.attest_token_verifier import VerifierConfiguration, AttestationTokenVerifier
 
 
 THIS_DIR = os.path.dirname(__file__)
@@ -20,100 +22,213 @@
 KEYFILE = os.path.join(DATA_DIR, 'key.pem')
 KEYFILE_ALT = os.path.join(DATA_DIR, 'key-alt.pem')
 
+def create_token(source_name, verifier):
+    """Create a CBOR encoded token and save it to a temp file
 
-def create_token(source_name, keyfile, verifier):
+    Return the name of the temp file."""
     source_path = os.path.join(DATA_DIR, source_name)
-    fd, dest_path = tempfile.mkstemp()
-    os.close(fd)
-    convert_map_to_token_files(source_path, keyfile, verifier, dest_path, True)
+    temp_file, dest_path = tempfile.mkstemp()
+    os.close(temp_file)
+
+    token_map = read_token_map(source_path)
+    with open(dest_path, 'wb') as wfh:
+        convert_map_to_token(
+            token_map,
+            verifier,
+            wfh,
+            add_p_header=False,
+            name_as_key=True,
+            parse_raw_value=True)
     return dest_path
 
-
-def read_iat(filename, keyfile, verifier):
+def read_iat(filename, verifier):
+    """Parse a token file"""
     filepath = os.path.join(DATA_DIR, filename)
-    raw_iat = extract_iat_from_cose(keyfile, filepath, verifier, True)
-    return verifier.decode_and_validate_iat(raw_iat)
+    with open(filepath, 'rb') as token_file:
+        return verifier.parse_token(
+            token=token_file.read(),
+            verify=True,
+            check_p_header=False,
+            lower_case_key=False)
 
-
-def create_and_read_iat(source_name, keyfile, verifier):
-    token_file = create_token(source_name, keyfile, verifier)
-    return read_iat(token_file, keyfile, verifier)
-
+def create_and_read_iat(source_name, verifier):
+    """Create a cbor encoded token in a temp file and parse it back"""
+    token_file = create_token(source_name, verifier)
+    return read_iat(token_file, verifier)
 
 class TestIatVerifier(unittest.TestCase):
+    """A class used for testing iat-verifier.
+
+    This class uses the claim and token definitions for PSA Attestation Token"""
 
     def setUp(self):
         self.config = VerifierConfiguration()
 
     def test_validate_signature(self):
-        verifier = PSAIoTProfile1TokenVerifier.get_verifier(self.config)
-        good_sig = create_token('valid-iat.yaml', KEYFILE, verifier)
-        bad_sig = create_token('valid-iat.yaml', KEYFILE_ALT, verifier)
+        """Testing Signature validation"""
+        method=AttestationTokenVerifier.SIGN_METHOD_SIGN1
+        cose_alg=AttestationTokenVerifier.COSE_ALG_ES256
 
-        raw_iat = extract_iat_from_cose(KEYFILE, good_sig, verifier, True)
+        signing_key = read_keyfile(KEYFILE, method)
+        verifier_good_sig = PSAIoTProfile1TokenVerifier(
+            method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key,
+            configuration=self.config)
+        good_sig = create_token('valid-iat.yaml', verifier_good_sig)
 
-        with self.assertRaises(ValueError) as cm:
-            raw_iat = extract_iat_from_cose(KEYFILE, bad_sig, verifier, True)
+        signing_key = read_keyfile(KEYFILE_ALT, method)
+        verifier_bad_sig = PSAIoTProfile1TokenVerifier(
+            method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key,
+            configuration=self.config)
+        bad_sig = create_token('valid-iat.yaml', verifier_bad_sig)
 
-        self.assertIn('Bad signature', cm.exception.args[0])
+        #dump_file_binary(good_sig)
+
+        with open(good_sig, 'rb') as wfh:
+            verifier_good_sig.parse_token(
+                token=wfh.read(),
+                verify=True,
+                check_p_header=False,
+                lower_case_key=False)
+
+
+        with self.assertRaises(ValueError) as test_ctx:
+            with open(bad_sig, 'rb') as wfh:
+                verifier_good_sig.parse_token(
+                    token=wfh.read(),
+                    verify=True,
+                    check_p_header=False,
+                    lower_case_key=False)
+
+        self.assertIn('Bad signature', test_ctx.exception.args[0])
 
     def test_validate_iat_structure(self):
+        """Testing IAT structure validation"""
         keep_going_conf = VerifierConfiguration(keep_going=True)
+        method=AttestationTokenVerifier.SIGN_METHOD_SIGN1
+        cose_alg=AttestationTokenVerifier.COSE_ALG_ES256
+        signing_key = read_keyfile(KEYFILE, method)
 
-        iat = create_and_read_iat('valid-iat.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
+        create_and_read_iat(
+            'valid-iat.yaml',
+            PSAIoTProfile1TokenVerifier(method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key,
+            configuration=self.config))
 
-        with self.assertRaises(ValueError) as cm:
-            iat = create_and_read_iat('invalid-profile-id.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
-        self.assertIn('Invalid PROFILE_ID', cm.exception.args[0])
+        with self.assertRaises(ValueError) as test_ctx:
+            create_and_read_iat(
+                'invalid-profile-id.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
+        self.assertIn('Invalid PROFILE_ID', test_ctx.exception.args[0])
 
-        with self.assertRaises(ValueError) as cm:
-            iat = read_iat('malformed.cbor', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
-        self.assertIn('Bad COSE', cm.exception.args[0])
+        with self.assertRaises(ValueError) as test_ctx:
+            read_iat(
+                'malformed.cbor',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
+        self.assertIn('Bad COSE', test_ctx.exception.args[0])
 
-        with self.assertRaises(ValueError) as cm:
-            iat = create_and_read_iat('missing-claim.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
-        self.assertIn('missing MANDATORY claim', cm.exception.args[0])
+        with self.assertRaises(ValueError) as test_ctx:
+            create_and_read_iat(
+                'missing-claim.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
+        self.assertIn('missing MANDATORY claim', test_ctx.exception.args[0])
 
-        with self.assertRaises(ValueError) as cm:
-            iat = create_and_read_iat('submod-missing-claim.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
-        self.assertIn('missing MANDATORY claim', cm.exception.args[0])
+        with self.assertRaises(ValueError) as test_ctx:
+            create_and_read_iat(
+                'submod-missing-claim.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
+        self.assertIn('missing MANDATORY claim', test_ctx.exception.args[0])
 
-        with self.assertRaises(ValueError) as cm:
-            iat = create_and_read_iat('missing-sw-comps.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
+        with self.assertRaises(ValueError) as test_ctx:
+            create_and_read_iat(
+                'missing-sw-comps.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
         self.assertIn('NO_MEASUREMENTS claim is not present',
-                      cm.exception.args[0])
+                      test_ctx.exception.args[0])
 
-        with self.assertLogs() as cm:
-            iat = create_and_read_iat('missing-signer-id.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
+        with self.assertLogs() as test_ctx:
+            create_and_read_iat(
+                'missing-signer-id.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=self.config))
             self.assertIn('Missing RECOMMENDED claim "SIGNER_ID" from SW_COMPONENTS',
-                         cm.records[0].getMessage())
+                         test_ctx.records[0].getMessage())
 
-        with self.assertLogs() as cm:
-            iat = create_and_read_iat('invalid-type-length.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(keep_going_conf))
+        with self.assertLogs() as test_ctx:
+            create_and_read_iat(
+                'invalid-type-length.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=keep_going_conf))
             self.assertIn("Invalid PROFILE_ID: must be a(n) <class 'str'>: found <class 'int'>",
-                         cm.records[0].getMessage())
+                         test_ctx.records[0].getMessage())
             self.assertIn("Invalid SIGNER_ID: must be a(n) <class 'bytes'>: found <class 'str'>",
-                         cm.records[1].getMessage())
+                         test_ctx.records[1].getMessage())
             self.assertIn("Invalid SIGNER_ID length: must be at least 32 bytes, found 12 bytes",
-                         cm.records[2].getMessage())
+                         test_ctx.records[2].getMessage())
             self.assertIn("Invalid MEASUREMENT length: must be at least 32 bytes, found 28 bytes",
-                         cm.records[3].getMessage())
+                         test_ctx.records[3].getMessage())
 
-        with self.assertLogs() as cm:
-            iat = create_and_read_iat('invalid-hw-version.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(keep_going_conf))
+        with self.assertLogs() as test_ctx:
+            create_and_read_iat(
+                'invalid-hw-version.yaml',
+                PSAIoTProfile1TokenVerifier(method=method,
+                cose_alg=cose_alg,
+                signing_key=signing_key,
+                configuration=keep_going_conf))
             self.assertIn("Invalid HARDWARE_VERSION length; must be 13 digits, found 10 characters",
-                         cm.records[0].getMessage())
+                         test_ctx.records[0].getMessage())
             self.assertIn("Invalid digit   at position 1",
-                         cm.records[1].getMessage())
+                         test_ctx.records[1].getMessage())
             self.assertIn("Invalid digit - at position 4",
-                         cm.records[2].getMessage())
+                         test_ctx.records[2].getMessage())
             self.assertIn("Invalid digit a at position 10",
-                         cm.records[3].getMessage())
+                         test_ctx.records[3].getMessage())
 
     def test_binary_string_decoding(self):
-        iat = create_and_read_iat('valid-iat.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
+        """Test binary_string decoding"""
+        method=AttestationTokenVerifier.SIGN_METHOD_SIGN1
+        cose_alg=AttestationTokenVerifier.COSE_ALG_ES256
+        signing_key = read_keyfile(KEYFILE, method)
+        iat = create_and_read_iat(
+            'valid-iat.yaml',
+            PSAIoTProfile1TokenVerifier(method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key,
+            configuration=self.config))
         self.assertEqual(iat['SECURITY_LIFECYCLE'], 'SL_SECURED')
 
     def test_security_lifecycle_decoding(self):
-        iat = create_and_read_iat('valid-iat.yaml', KEYFILE, PSAIoTProfile1TokenVerifier.get_verifier(self.config))
+        """Test security lifecycle decoding"""
+        method=AttestationTokenVerifier.SIGN_METHOD_SIGN1
+        cose_alg=AttestationTokenVerifier.COSE_ALG_ES256
+        signing_key = read_keyfile(KEYFILE, method)
+        iat = create_and_read_iat(
+            'valid-iat.yaml',
+            PSAIoTProfile1TokenVerifier(method=method,
+            cose_alg=cose_alg,
+            signing_key=signing_key,
+            configuration=self.config))
         self.assertEqual(iat['SECURITY_LIFECYCLE'], 'SL_SECURED')