TFTF tests for v8.6 AMU enhancements

Not much can be done with the new AMU offsets running at EL2 (virtual
offsets apply at EL0 and EL1) but we can make sure they are being saved
and restored properly, so that's what this patch does.

Signed-off-by: John Powell <john.powell@arm.com>
Change-Id: I5aef85021e875be2109bb9bd7cdbdbe31580394e
diff --git a/include/lib/aarch32/arch.h b/include/lib/aarch32/arch.h
index dcc4243..7c03c2b 100644
--- a/include/lib/aarch32/arch.h
+++ b/include/lib/aarch32/arch.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2016-2020, ARM Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -103,6 +103,8 @@
 #define ID_PFR0_AMU_SHIFT	U(20)
 #define ID_PFR0_AMU_LENGTH	U(4)
 #define ID_PFR0_AMU_MASK	U(0xf)
+#define ID_PFR0_AMU_8_4		U(0x1)
+#define ID_PFR0_AMU_8_6		U(0x2)
 
 #define ID_PFR0_DIT_SHIFT	U(24)
 #define ID_PFR0_DIT_LENGTH	U(4)
diff --git a/include/lib/aarch64/arch.h b/include/lib/aarch64/arch.h
index 2d2a892..5464907 100644
--- a/include/lib/aarch64/arch.h
+++ b/include/lib/aarch64/arch.h
@@ -129,6 +129,8 @@
 #define ID_AA64PFR0_AMU_SHIFT	U(44)
 #define ID_AA64PFR0_AMU_LENGTH	U(4)
 #define ID_AA64PFR0_AMU_MASK	ULL(0xf)
+#define ID_AA64PFR0_AMU_8_4	U(0x1)
+#define ID_AA64PFR0_AMU_8_6	U(0x2)
 #define ID_AA64PFR0_ELX_MASK	ULL(0xf)
 #define ID_AA64PFR0_SVE_SHIFT	U(32)
 #define ID_AA64PFR0_SVE_MASK	ULL(0xf)
@@ -303,6 +305,7 @@
 
 /* SCR definitions */
 #define SCR_RES1_BITS		((U(1) << 4) | (U(1) << 5))
+#define SCR_AMVOFFEN_BIT	(UL(1) << 35)
 #define SCR_ATA_BIT		(U(1) << 26)
 #define SCR_FIEN_BIT		(U(1) << 21)
 #define SCR_API_BIT		(U(1) << 17)
@@ -364,6 +367,7 @@
 #define VTTBR_BADDR_SHIFT	U(0)
 
 /* HCR definitions */
+#define HCR_AMVOFFEN_BIT	(ULL(1) << 51)
 #define HCR_API_BIT		(ULL(1) << 41)
 #define HCR_APK_BIT		(ULL(1) << 40)
 #define HCR_TGE_BIT		(ULL(1) << 27)
@@ -861,10 +865,14 @@
 #define AMEVTYPER1E_EL0		S3_3_C13_C15_6
 #define AMEVTYPER1F_EL0		S3_3_C13_C15_7
 
+/* AMCFGR_EL0 definitions */
+#define AMCFGR_EL0_NCG_SHIFT		U(28)
+#define AMCFGR_EL0_NCG_MASK		U(0xf)
+
 /* AMCGCR_EL0 definitions */
-#define AMCGCR_EL0_CG1NC_SHIFT	U(8)
-#define AMCGCR_EL0_CG1NC_LENGTH	U(8)
-#define AMCGCR_EL0_CG1NC_MASK	U(0xff)
+#define AMCGCR_EL0_CG1NC_SHIFT		U(8)
+#define AMCGCR_EL0_CG1NC_LENGTH		U(8)
+#define AMCGCR_EL0_CG1NC_MASK		U(0xff)
 
 /* MPAM register definitions */
 #define MPAM3_EL3_MPAMEN_BIT		(ULL(1) << 63)
@@ -876,6 +884,44 @@
 #define MPAMIDR_HAS_HCR_BIT		(ULL(1) << 17)
 
 /*******************************************************************************
+ * Definitions for system register interface to AMU for ARMv8.6 enhancements
+ ******************************************************************************/
+
+/* Definition for register defining which virtual offsets are implemented. */
+#define AMCG1IDR_EL0		S3_3_C13_C2_6
+#define AMCG1IDR_CTR_MASK	ULL(0xffff)
+#define AMCG1IDR_CTR_SHIFT	U(0)
+#define AMCG1IDR_VOFF_MASK	ULL(0xffff)
+#define AMCG1IDR_VOFF_SHIFT	U(16)
+
+/* New bit added to AMCR_EL0 */
+#define AMCR_CG1RZ_BIT		(ULL(0x1) << 17)
+
+/* Definitions for virtual offset registers for architected event counters. */
+/* AMEVCNTR01_EL0 intentionally left undefined, as it does not exist. */
+#define AMEVCNTVOFF00_EL2	S3_4_C13_C8_0
+#define AMEVCNTVOFF02_EL2	S3_4_C13_C8_2
+#define AMEVCNTVOFF03_EL2	S3_4_C13_C8_3
+
+/* Definitions for virtual offset registers for auxiliary event counters. */
+#define AMEVCNTVOFF10_EL2	S3_4_C13_C10_0
+#define AMEVCNTVOFF11_EL2	S3_4_C13_C10_1
+#define AMEVCNTVOFF12_EL2	S3_4_C13_C10_2
+#define AMEVCNTVOFF13_EL2	S3_4_C13_C10_3
+#define AMEVCNTVOFF14_EL2	S3_4_C13_C10_4
+#define AMEVCNTVOFF15_EL2	S3_4_C13_C10_5
+#define AMEVCNTVOFF16_EL2	S3_4_C13_C10_6
+#define AMEVCNTVOFF17_EL2	S3_4_C13_C10_7
+#define AMEVCNTVOFF18_EL2	S3_4_C13_C11_0
+#define AMEVCNTVOFF19_EL2	S3_4_C13_C11_1
+#define AMEVCNTVOFF1A_EL2	S3_4_C13_C11_2
+#define AMEVCNTVOFF1B_EL2	S3_4_C13_C11_3
+#define AMEVCNTVOFF1C_EL2	S3_4_C13_C11_4
+#define AMEVCNTVOFF1D_EL2	S3_4_C13_C11_5
+#define AMEVCNTVOFF1E_EL2	S3_4_C13_C11_6
+#define AMEVCNTVOFF1F_EL2	S3_4_C13_C11_7
+
+/*******************************************************************************
  * RAS system registers
  ******************************************************************************/
 #define DISR_EL1		S3_0_C12_C1_1
diff --git a/include/lib/aarch64/arch_helpers.h b/include/lib/aarch64/arch_helpers.h
index 9bcd0bb..d46f8e9 100644
--- a/include/lib/aarch64/arch_helpers.h
+++ b/include/lib/aarch64/arch_helpers.h
@@ -421,7 +421,10 @@
 DEFINE_RENAME_SYSREG_WRITE_FUNC(icc_sgi0r_el1, ICC_SGI0R_EL1)
 DEFINE_RENAME_SYSREG_RW_FUNCS(icc_sgi1r, ICC_SGI1R)
 
+DEFINE_RENAME_SYSREG_RW_FUNCS(amcr_el0, AMCR_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcgcr_el0, AMCGCR_EL0)
+DEFINE_RENAME_SYSREG_READ_FUNC(amcfgr_el0, AMCFGR_EL0)
+DEFINE_RENAME_SYSREG_READ_FUNC(amcg1idr_el0, AMCG1IDR_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr0_el0, AMCNTENCLR0_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenset0_el0, AMCNTENSET0_EL0)
 DEFINE_RENAME_SYSREG_RW_FUNCS(amcntenclr1_el0, AMCNTENCLR1_EL0)
diff --git a/include/lib/extensions/amu.h b/include/lib/extensions/amu.h
index 3071464..84ed67f 100644
--- a/include/lib/extensions/amu.h
+++ b/include/lib/extensions/amu.h
@@ -1,35 +1,80 @@
 /*
- * Copyright (c) 2017, Arm Limited. All rights reserved.
+ * Copyright (c) 2017-2020, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
 
-#ifndef __AMU_H__
-#define __AMU_H__
+#ifndef AMU_H
+#define AMU_H
 
-#include <platform_def.h>
 #include <stdint.h>
 
-#define AMU_GROUP0_NR_COUNTERS		4
-#define AMU_GROUP0_COUNTERS_MASK	0xf
+#include <cassert.h>
+#include <platform_def.h>
+
+#define AMU_GROUP0_COUNTERS_MASK	U(0xf)
+#define AMU_GROUP0_NR_COUNTERS		U(4)
 
 #ifdef PLAT_AMU_GROUP1_COUNTERS_MASK
 #define AMU_GROUP1_COUNTERS_MASK	PLAT_AMU_GROUP1_COUNTERS_MASK
 #else
-#define AMU_GROUP1_COUNTERS_MASK	0
+#define AMU_GROUP1_COUNTERS_MASK	U(0)
 #endif
 
-#ifdef PLAT_AMU_GROUP1_NR_COUNTERS
-#define AMU_GROUP1_NR_COUNTERS		PLAT_AMU_GROUP1_NR_COUNTERS
+/* Calculate number of group 1 counters */
+#if (AMU_GROUP1_COUNTERS_MASK	& (1 << 15))
+#define	AMU_GROUP1_NR_COUNTERS		16U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 14))
+#define	AMU_GROUP1_NR_COUNTERS		15U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 13))
+#define	AMU_GROUP1_NR_COUNTERS		14U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 12))
+#define	AMU_GROUP1_NR_COUNTERS		13U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 11))
+#define	AMU_GROUP1_NR_COUNTERS		12U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 10))
+#define	AMU_GROUP1_NR_COUNTERS		11U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 9))
+#define	AMU_GROUP1_NR_COUNTERS		10U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 8))
+#define	AMU_GROUP1_NR_COUNTERS		9U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 7))
+#define	AMU_GROUP1_NR_COUNTERS		8U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 6))
+#define	AMU_GROUP1_NR_COUNTERS		7U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 5))
+#define	AMU_GROUP1_NR_COUNTERS		6U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 4))
+#define	AMU_GROUP1_NR_COUNTERS		5U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 3))
+#define	AMU_GROUP1_NR_COUNTERS		4U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 2))
+#define	AMU_GROUP1_NR_COUNTERS		3U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 1))
+#define	AMU_GROUP1_NR_COUNTERS		2U
+#elif (AMU_GROUP1_COUNTERS_MASK	& (1 << 0))
+#define	AMU_GROUP1_NR_COUNTERS		1U
 #else
-#define AMU_GROUP1_NR_COUNTERS		0
+#define	AMU_GROUP1_NR_COUNTERS		0U
 #endif
 
-#define AMU_GROUP0_MAX_NR_COUNTERS	4
-#define AMU_GROUP1_MAX_NR_COUNTERS	16
+CASSERT(AMU_GROUP1_COUNTERS_MASK <= 0xffff, invalid_amu_group1_counters_mask);
 
 int amu_supported(void);
-uint64_t amu_group0_cnt_read(int idx);
-uint64_t amu_group1_cnt_read(int idx);
+int amu_supported_8_6(void);
 
-#endif /* __AMU_H__ */
+uint64_t amu_group0_cnt_read(unsigned int idx);
+#if __aarch64__
+uint64_t amu_group0_voffset_read(unsigned int idx);
+void amu_group0_voffset_write(unsigned int idx, uint64_t val);
+#endif
+
+#if AMU_GROUP1_NR_COUNTERS
+uint64_t amu_group1_cnt_read(unsigned int idx);
+#if __aarch64__
+uint64_t amu_group1_voffset_read(unsigned int idx);
+void amu_group1_voffset_write(unsigned int idx, uint64_t val);
+#endif
+#endif
+
+#endif /* AMU_H */
diff --git a/include/lib/extensions/amu_private.h b/include/lib/extensions/amu_private.h
index e6cd290..67794df 100644
--- a/include/lib/extensions/amu_private.h
+++ b/include/lib/extensions/amu_private.h
@@ -1,15 +1,23 @@
 /*
- * Copyright (c) 2017, Arm Limited. All rights reserved.
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
 
-#ifndef __AMU_PRIVATE_H__
-#define __AMU_PRIVATE_H__
+#ifndef AMU_PRIVATE_H
+#define AMU_PRIVATE_H
 
 #include <stdint.h>
 
-uint64_t amu_group0_cnt_read_internal(int idx);
-uint64_t amu_group1_cnt_read_internal(int idx);
+uint64_t amu_group0_cnt_read_internal(unsigned int idx);
+uint64_t amu_group1_cnt_read_internal(unsigned int idx);
 
-#endif /* __AMU_PRIVATE_H__ */
+#if __aarch64__
+uint64_t amu_group0_voffset_read_internal(unsigned int idx);
+void amu_group0_voffset_write_internal(unsigned int idx, uint64_t val);
+
+uint64_t amu_group1_voffset_read_internal(unsigned int idx);
+void amu_group1_voffset_write_internal(unsigned int idx, uint64_t val);
+#endif
+
+#endif /* AMU_PRIVATE_H */
diff --git a/lib/extensions/amu/aarch32/amu.c b/lib/extensions/amu/aarch32/amu.c
index a923df3..fdc8728 100644
--- a/lib/extensions/amu/aarch32/amu.c
+++ b/lib/extensions/amu/aarch32/amu.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -15,23 +15,31 @@
 	uint64_t features;
 
 	features = read_id_pfr0() >> ID_PFR0_AMU_SHIFT;
-	return (features & ID_PFR0_AMU_MASK) == 1;
+	return (features & ID_PFR0_AMU_MASK) >= ID_PFR0_AMU_8_4;
+}
+
+int amu_supported_8_6(void)
+{
+	uint64_t features;
+
+	features = read_id_pfr0() >> ID_PFR0_AMU_SHIFT;
+	return (features & ID_PFR0_AMU_MASK) >= ID_PFR0_AMU_8_6;
 }
 
 /* Read the group 0 counter identified by the given `idx`. */
-uint64_t amu_group0_cnt_read(int idx)
+uint64_t amu_group0_cnt_read(unsigned int idx)
 {
 	assert(amu_supported());
-	assert(idx >= 0 && idx < AMU_GROUP0_NR_COUNTERS);
+	assert(idx < AMU_GROUP0_NR_COUNTERS);
 
 	return amu_group0_cnt_read_internal(idx);
 }
 
 /* Read the group 1 counter identified by the given `idx`. */
-uint64_t amu_group1_cnt_read(int idx)
+uint64_t amu_group1_cnt_read(unsigned int idx)
 {
 	assert(amu_supported());
-	assert(idx >= 0 && idx < AMU_GROUP1_NR_COUNTERS);
+	assert(idx < AMU_GROUP1_NR_COUNTERS);
 
 	return amu_group1_cnt_read_internal(idx);
 }
diff --git a/lib/extensions/amu/aarch64/amu.c b/lib/extensions/amu/aarch64/amu.c
index 00253b3..8b15e5d 100644
--- a/lib/extensions/amu/aarch64/amu.c
+++ b/lib/extensions/amu/aarch64/amu.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2020, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -10,28 +10,110 @@
 #include <arch_helpers.h>
 #include <assert.h>
 
+/* Supported status of basic AMU features starting with v8.4 */
 int amu_supported(void)
 {
 	uint64_t features;
 
 	features = read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT;
-	return (features & ID_AA64PFR0_AMU_MASK) == 1;
+	return ((features & ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_8_4);
+}
+
+/* Supported status of AMU v8.6 features. */
+int amu_supported_8_6(void)
+{
+	uint64_t features;
+
+	features = read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT;
+	return ((features & ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_8_6);
+}
+
+/* Check if group 1 counters is implemented */
+int amu_group1_supported(void)
+{
+	uint64_t features = read_amcfgr_el0() >> AMCFGR_EL0_NCG_SHIFT;
+
+	return (features & AMCFGR_EL0_NCG_MASK) == 1U;
 }
 
 /* Read the group 0 counter identified by the given `idx`. */
-uint64_t amu_group0_cnt_read(int idx)
+uint64_t amu_group0_cnt_read(unsigned int idx)
 {
 	assert(amu_supported());
-	assert(idx >= 0 && idx < AMU_GROUP0_NR_COUNTERS);
+	assert(idx < AMU_GROUP0_NR_COUNTERS);
 
 	return amu_group0_cnt_read_internal(idx);
 }
 
+/*
+ * Read the group 0 offset register for a given index. Index must not be 0, 2,
+ * or 3, the register for 1 does not exist.
+ *
+ * Using this function requires v8.6 support.
+ */
+uint64_t amu_group0_voffset_read(unsigned int idx)
+{
+	assert(amu_supported_8_6());
+	assert(idx < AMU_GROUP0_NR_COUNTERS);
+	assert(idx != 1U);
+
+	return amu_group0_voffset_read_internal(idx);
+}
+
+/*
+ * Write the group 0 offset register for a given index. Index must be 0, 2, or
+ * 3, the register for 1 does not exist.
+ *
+ * Using this function requires v8.6 support.
+ */
+void amu_group0_voffset_write(unsigned int idx, uint64_t val)
+{
+	assert(amu_supported_8_6());
+	assert(idx < AMU_GROUP0_NR_COUNTERS);
+	assert(idx != 1U);
+
+	amu_group0_voffset_write_internal(idx, val);
+	isb();
+}
+
 /* Read the group 1 counter identified by the given `idx`. */
-uint64_t amu_group1_cnt_read(int idx)
+uint64_t amu_group1_cnt_read(unsigned int idx)
 {
 	assert(amu_supported());
-	assert(idx >= 0 && idx < AMU_GROUP1_NR_COUNTERS);
+	assert(idx < AMU_GROUP1_NR_COUNTERS);
 
 	return amu_group1_cnt_read_internal(idx);
 }
+
+/*
+ * Read the group 1 offset register for a given index.
+ *
+ * Using this function requires v8.6 support.
+ */
+uint64_t amu_group1_voffset_read(unsigned int idx)
+{
+	assert(amu_supported_8_6());
+	assert(amu_group1_supported());
+	assert(idx < AMU_GROUP1_NR_COUNTERS);
+	assert(((read_amcg1idr_el0() >> AMCG1IDR_VOFF_SHIFT) &
+		(1U << idx)) != 0U);
+
+	return amu_group1_voffset_read_internal(idx);
+}
+
+/*
+ * Write the group 1 offset register for a given index.
+ *
+ * Using this function requires v8.6 support.
+ */
+void amu_group1_voffset_write(unsigned int idx, uint64_t val)
+{
+	assert(amu_supported_8_6());
+	assert(amu_group1_supported());
+	assert(idx < AMU_GROUP1_NR_COUNTERS);
+	assert(((read_amcg1idr_el0() >> AMCG1IDR_VOFF_SHIFT) &
+		(1U << idx)) != 0U);
+
+	amu_group1_voffset_write_internal(idx, val);
+	isb();
+}
diff --git a/lib/extensions/amu/aarch64/amu_helpers.S b/lib/extensions/amu/aarch64/amu_helpers.S
index 061f3fd..cca55e4 100644
--- a/lib/extensions/amu/aarch64/amu_helpers.S
+++ b/lib/extensions/amu/aarch64/amu_helpers.S
@@ -11,6 +11,12 @@
 	.globl	amu_group0_cnt_read_internal
 	.globl	amu_group1_cnt_read_internal
 
+	/* v8.6 virtualisation offset register functions */
+	.globl	amu_group0_voffset_read_internal
+	.globl	amu_group0_voffset_write_internal
+	.globl	amu_group1_voffset_read_internal
+	.globl	amu_group1_voffset_write_internal
+
 /*
  * uint64_t amu_group0_cnt_read_internal(int idx);
  *
@@ -86,3 +92,169 @@
 	read	AMEVCNTR1E_EL0		/* index 14 */
 	read	AMEVCNTR1F_EL0		/* index 15 */
 endfunc amu_group1_cnt_read_internal
+
+/*
+ * Accessor functions for virtual offset registers added in v8.6
+ */
+
+/*
+ * uint64_t amu_group0_voffset_read_internal(int idx);
+ *
+ * Given `idx`, read the corresponding AMU virtual offset register
+ * and return it in `x0`.
+ */
+func amu_group0_voffset_read_internal
+	adr	x1, 1f
+#if ENABLE_ASSERTIONS
+	/*
+	 * It can be dangerous to call this function with an
+	 * out of bounds index.  Ensure `idx` is valid.
+	 */
+	tst	x0, #~3
+	ASM_ASSERT(eq)
+	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
+	cmp	x0, #1
+	ASM_ASSERT(ne)
+#endif
+	/*
+	 * Given `idx` calculate address of mrs/ret instruction pair
+	 * in the table below.
+	 */
+	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
+#if ENABLE_BTI
+	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
+#endif
+	br	x1
+
+1:	read	AMEVCNTVOFF00_EL2	/* index 0 */
+	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
+#if ENABLE_BTI
+	.skip	4			/* Extra space for BTI instruction. */
+#endif
+	read	AMEVCNTVOFF02_EL2	/* index 2 */
+	read	AMEVCNTVOFF03_EL2	/* index 3 */
+endfunc amu_group0_voffset_read_internal
+
+/*
+ * void amu_group0_voffset_write_internal(int idx, uint64_t val);
+ *
+ * Given `idx`, write `val` to the corresponding AMU virtual offset register.
+ */
+func amu_group0_voffset_write_internal
+	adr	x2, 1f
+#if ENABLE_ASSERTIONS
+	/*
+	 * It can be dangerous to call this function with an
+	 * out of bounds index.  Ensure `idx` is valid.
+	 */
+	tst	x0, #~3
+	ASM_ASSERT(eq)
+	/* Make sure idx != 1 since AMEVCNTVOFF01_EL2 does not exist */
+	cmp	x0, #1
+	ASM_ASSERT(ne)
+#endif
+	/*
+	 * Given `idx` calculate address of mrs/ret instruction pair
+	 * in the table below.
+	 */
+	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
+#if ENABLE_BTI
+	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
+#endif
+	br	x2
+
+1:	write	AMEVCNTVOFF00_EL2	/* index 0 */
+	.skip	8			/* AMEVCNTVOFF01_EL2 does not exist */
+#if ENABLE_BTI
+	.skip	4			/* Extra space for BTI instruction. */
+#endif
+	write	AMEVCNTVOFF02_EL2	/* index 2 */
+	write	AMEVCNTVOFF03_EL2	/* index 3 */
+endfunc amu_group0_voffset_write_internal
+
+/*
+ * uint64_t amu_group1_voffset_read_internal(int idx);
+ *
+ * Given `idx`, read the corresponding AMU virtual offset register
+ * and return it in `x0`.
+ */
+func amu_group1_voffset_read_internal
+	adr	x1, 1f
+#if ENABLE_ASSERTIONS
+	/*
+	 * It can be dangerous to call this function with an
+	 * out of bounds index.  Ensure `idx` is valid.
+	 */
+	tst	x0, #~0xF
+	ASM_ASSERT(eq)
+#endif
+	/*
+	 * Given `idx` calculate address of mrs/ret instruction pair
+	 * in the table below.
+	 */
+	add	x1, x1, x0, lsl #3	/* each mrs/ret sequence is 8 bytes */
+#if ENABLE_BTI
+	add	x1, x1, x0, lsl #2	/* + "bti j" instruction */
+#endif
+	br	x1
+
+1:	read	AMEVCNTVOFF10_EL2	/* index 0 */
+	read	AMEVCNTVOFF11_EL2	/* index 1 */
+	read	AMEVCNTVOFF12_EL2	/* index 2 */
+	read	AMEVCNTVOFF13_EL2	/* index 3 */
+	read	AMEVCNTVOFF14_EL2	/* index 4 */
+	read	AMEVCNTVOFF15_EL2	/* index 5 */
+	read	AMEVCNTVOFF16_EL2	/* index 6 */
+	read	AMEVCNTVOFF17_EL2	/* index 7 */
+	read	AMEVCNTVOFF18_EL2	/* index 8 */
+	read	AMEVCNTVOFF19_EL2	/* index 9 */
+	read	AMEVCNTVOFF1A_EL2	/* index 10 */
+	read	AMEVCNTVOFF1B_EL2	/* index 11 */
+	read	AMEVCNTVOFF1C_EL2	/* index 12 */
+	read	AMEVCNTVOFF1D_EL2	/* index 13 */
+	read	AMEVCNTVOFF1E_EL2	/* index 14 */
+	read	AMEVCNTVOFF1F_EL2	/* index 15 */
+endfunc amu_group1_voffset_read_internal
+
+/*
+ * void amu_group1_voffset_write_internal(int idx, uint64_t val);
+ *
+ * Given `idx`, write `val` to the corresponding AMU virtual offset register.
+ */
+func amu_group1_voffset_write_internal
+	adr	x2, 1f
+#if ENABLE_ASSERTIONS
+	/*
+	 * It can be dangerous to call this function with an
+	 * out of bounds index.  Ensure `idx` is valid.
+	 */
+	tst	x0, #~0xF
+	ASM_ASSERT(eq)
+#endif
+	/*
+	 * Given `idx` calculate address of mrs/ret instruction pair
+	 * in the table below.
+	 */
+	add	x2, x2, x0, lsl #3	/* each msr/ret sequence is 8 bytes */
+#if ENABLE_BTI
+	add	x2, x2, x0, lsl #2	/* + "bti j" instruction */
+#endif
+	br	x2
+
+1:	write	AMEVCNTVOFF10_EL2	/* index 0 */
+	write	AMEVCNTVOFF11_EL2	/* index 1 */
+	write	AMEVCNTVOFF12_EL2	/* index 2 */
+	write	AMEVCNTVOFF13_EL2	/* index 3 */
+	write	AMEVCNTVOFF14_EL2	/* index 4 */
+	write	AMEVCNTVOFF15_EL2	/* index 5 */
+	write	AMEVCNTVOFF16_EL2	/* index 6 */
+	write	AMEVCNTVOFF17_EL2	/* index 7 */
+	write	AMEVCNTVOFF18_EL2	/* index 8 */
+	write	AMEVCNTVOFF19_EL2	/* index 9 */
+	write	AMEVCNTVOFF1A_EL2	/* index 10 */
+	write	AMEVCNTVOFF1B_EL2	/* index 11 */
+	write	AMEVCNTVOFF1C_EL2	/* index 12 */
+	write	AMEVCNTVOFF1D_EL2	/* index 13 */
+	write	AMEVCNTVOFF1E_EL2	/* index 14 */
+	write	AMEVCNTVOFF1F_EL2	/* index 15 */
+endfunc amu_group1_voffset_write_internal
diff --git a/tftf/tests/extensions/amu/test_amu.c b/tftf/tests/extensions/amu/test_amu.c
index 8799aa5..fb7e90d 100644
--- a/tftf/tests/extensions/amu/test_amu.c
+++ b/tftf/tests/extensions/amu/test_amu.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018, Arm Limited. All rights reserved.
+ * Copyright (c) 2018-2020, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -117,7 +117,7 @@
  */
 test_result_t test_amu_valid_ctr(void)
 {
-	int i;
+	unsigned int i;
 
 	if (!amu_supported())
 		return TEST_RESULT_SKIPPED;
@@ -126,7 +126,7 @@
 	if (read_amcntenset0_el0() != AMU_GROUP0_COUNTERS_MASK)
 		return TEST_RESULT_SKIPPED;
 
-	for (i = 0; i < AMU_GROUP0_NR_COUNTERS; i++) {
+	for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) {
 		uint64_t value;
 
 		value = amu_group0_cnt_read(i);
@@ -145,8 +145,8 @@
  */
 test_result_t test_amu_suspend_resume(void)
 {
-	uint64_t group0_ctrs[AMU_GROUP0_MAX_NR_COUNTERS];
-	int i;
+	uint64_t group0_ctrs[AMU_GROUP0_NR_COUNTERS];
+	unsigned int i;
 
 	if (!amu_supported())
 		return TEST_RESULT_SKIPPED;
@@ -156,9 +156,37 @@
 		return TEST_RESULT_SKIPPED;
 
 	/* Save counters values before suspend */
-	for (i = 0; i < AMU_GROUP0_NR_COUNTERS; i++)
+	for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++)
 		group0_ctrs[i] = amu_group0_cnt_read(i);
 
+	/*
+	 * If V8.6 supported then make sure the save/restore works for virtual
+	 * counter values.  Write known values into the virtual offsets and then
+	 * make sure they are still there after resume.  The virtual offset
+	 * registers are only accessible in AARCH64 mode in EL2 or EL3.
+	 */
+#if __aarch64__
+	if (amu_supported_8_6()) {
+		/* Enabling voffsets in HCR_EL2. */
+		write_hcr_el2(read_hcr_el2() | HCR_AMVOFFEN_BIT);
+
+		/* Writing known values into voffset registers. */
+		amu_group0_voffset_write(0U, 0xDEADBEEF);
+		amu_group0_voffset_write(2U, 0xDEADBEEF);
+		amu_group0_voffset_write(3U, 0xDEADBEEF);
+
+#if AMU_GROUP1_NR_COUNTERS
+		u_register_t amcg1idr = read_amcg1idr_el0() >> 16;
+
+		for (i = 0U; i < AMU_GROUP1_NR_COUNTERS; i++) {
+			if (((amcg1idr >> i) & 1U) != 0U) {
+				amu_group1_voffset_write(i, 0xDEADBEEF);
+			}
+		}
+#endif
+	}
+#endif
+
 	/* Suspend/resume current core */
 	suspend_and_resume_this_cpu();
 
@@ -178,5 +206,34 @@
 		}
 	}
 
+#if __aarch64__
+	if (amu_supported_8_6()) {
+		for (i = 0U; i < AMU_GROUP0_NR_COUNTERS; i++) {
+			if ((i != 1U) &&
+				(amu_group0_voffset_read(i) != 0xDEADBEEF)) {
+				tftf_testcase_printf(
+					"Invalid G0 voffset %u: 0x%llx\n", i,
+					amu_group0_voffset_read(i));
+				return TEST_RESULT_FAIL;
+			}
+		}
+
+#if AMU_GROUP1_NR_COUNTERS
+		u_register_t amcg1idr = read_amcg1idr_el0() >> 16;
+
+		for (i = 0U; i < AMU_GROUP1_NR_COUNTERS; i++) {
+			if (((amcg1idr >> i) & 1U) != 0U) {
+				if (amu_group1_voffset_read(i) != 0xDEADBEEF) {
+					tftf_testcase_printf("Invalid G1 " \
+						"voffset %u: 0x%llx\n", i,
+						amu_group1_voffset_read(i));
+					return TEST_RESULT_FAIL;
+				}
+			}
+		}
+#endif
+	}
+#endif
+
 	return TEST_RESULT_SUCCESS;
 }