Merge changes from topic "for-lts-v2.10.13" into lts-v2.10

* changes:
  fix(cpus): workaround for Neoverse-V3 erratum 3701767
  fix(cpus): workaround for Neoverse-N3 erratum 3699563
  fix(cpus): workaround for Neoverse-N2 erratum 3701773
  fix(cpus): workaround for Cortex-X925 erratum 3701747
  fix(cpus): workaround for Cortex-X4 erratum 3701758
  fix(cpus): workaround for Cortex-X3 erratum 3701769
  fix(cpus): workaround for Cortex-X2 erratum 3701772
  fix(cpus): workaround for Cortex-A725 erratum 3699564
  fix(cpus): workaround for Cortex-A720 erratum 3699561
  fix(cpus): workaround for Cortex-A715 erratum 3699560
  fix(cpus): workaround for Cortex-A710 erratum 3701772
  fix(cpus): workaround for accessing ICH_VMCR_EL2
  refactor(errata-abi): move EXTRACT_PARTNUM to arch.h
  chore(cpus): fix incorrect header macro
  chore: rename hermes to neoverse-n3
  refactor(fvp): move cpus with nomodel
  fix(cm): update gic el2 sysregs save/restore mechanism
  fix(security): apply SMCCC_ARCH_WORKAROUND_4 to affected cpus
  fix(security): add support in cpu_ops for CVE-2024-7881
  fix(security): add CVE-2024-7881 mitigation to Cortex-X3
  fix(security): add CVE-2024-7881 mitigation to Neoverse-V3
  fix(security): add CVE-2024-7881 mitigation to Neoverse-V2
  fix(security): add CVE-2024-7881 mitigation to Cortex-X925
  fix(security): add CVE-2024-7881 mitigation to Cortex-X4
  fix(security): enable WORKAROUND_CVE_2024_7881 build option
diff --git a/docs/design/cpu-specific-build-macros.rst b/docs/design/cpu-specific-build-macros.rst
index 369ec6f..c3683f1 100644
--- a/docs/design/cpu-specific-build-macros.rst
+++ b/docs/design/cpu-specific-build-macros.rst
@@ -38,6 +38,10 @@
    in EL3 FW. This build option should be set to 1 if the target platform contains
    at least 1 CPU that requires this mitigation. Defaults to 1.
 
+-  ``WORKAROUND_CVE_2024_7881``: Enables mitigation for `CVE-2024-7881`.
+   This build option should be set to 1 if the target platform contains at
+   least 1 CPU that requires this mitigation. Defaults to 1.
+
 .. _arm_cpu_macros_errata_workarounds:
 
 CPU Errata Workarounds
@@ -574,6 +578,12 @@
    CPU, this affects all configurations. This needs to be enabled for revisions
    r0p0 and r0p1. It has been fixed in r0p2.
 
+For Neoverse V3, the following errata build flags are defined :
+
+- ``ERRATA_V3_3701767``: This applies errata 3701767 workaround to Neoverse-V3
+  CPU. This needs to be enabled for revisions r0p0, r0p1, r0p2 of the CPU and
+  is still open.
+
 For Cortex-A710, the following errata build flags are defined :
 
 -  ``ERRATA_A710_1987031``: This applies errata 1987031 workaround to
@@ -649,6 +659,10 @@
    CPU. This needs to be enabled for revisions r0p0, r1p0, r2p0 and r2p1 of the
    CPU and is still open.
 
+- ``ERRATA_A710_3701772``: This applies errata 3701772 workaround to Cortex-A710
+  CPU. This needs to be enabled for revisions r0p0, r1p0, r2p0, r2p1 of the
+  CPU and is still open.
+
 For Neoverse N2, the following errata build flags are defined :
 
 -  ``ERRATA_N2_2002655``: This applies errata 2002655 workaround to Neoverse-N2
@@ -720,6 +734,15 @@
    CPU. This needs to be enabled for revisions r0p0, r0p1 and r0p2. It is fixed
    in r0p3.
 
+-  ``ERRATA_N2_3701773``: This applies errata 3701773 workaround to Neoverse-N2
+   CPU. This needs to be enabled for revisions r0p0, r0p1, r0p2, r0p3 and is
+   still open.
+
+For Neoverse N3, the following errata build flags are defined :
+
+-  ``ERRATA_N3_3699563``: This applies errata 3699563 workaround to Neoverse-N3
+   CPU. This needs to be enabled for revisions r0p0 and is still open.
+
 For Cortex-X2, the following errata build flags are defined :
 
 -  ``ERRATA_X2_2002765``: This applies errata 2002765 workaround to Cortex-X2
@@ -774,6 +797,10 @@
    CPU. This needs to be enabled for revisions r0p0, r1p0, r2p0 and r2p1 of the
    CPU and it is still open.
 
+-  ``ERRATA_X2_3701772``: This applies errata 3701772 workaround to Cortex-X2
+   CPU. This needs to be enabled for revisions r0p0, r1p0, r2p0 and r2p1 of the
+   CPU and it is still open.
+
 For Cortex-X3, the following errata build flags are defined :
 
 - ``ERRATA_X3_2070301``: This applies errata 2070301 workaround to the Cortex-X3
@@ -821,6 +848,10 @@
   CPU. This needs to be enabled only for revisions r0p0, r1p0 and r1p1 of the
   CPU. It is fixed in r1p2.
 
+- ``ERRATA_X3_3701769``: This applies errata 3701769 workaround to Cortex-X3
+  CPU. This needs to be enabled only for revisions r0p0, r1p0, r1p1 and r1p2
+  of the CPU and it is still open.
+
 For Cortex-X4, the following errata build flags are defined :
 
 - ``ERRATA_X4_2701112``: This applies erratum 2701112 workaround to Cortex-X4
@@ -854,6 +885,15 @@
 - ``ERRATA_X4_3076789``: This applies errata 3076789 workaround to Cortex-X4
   CPU. This needs to be enabled for revisions r0p0 and r0p1. It is fixed in r0p2.
 
+- ``ERRATA_X4_3701758``: This applies errata 3701758 workaround to Cortex-X4
+  CPU. This needs to be enabled for revisions r0p0, r0p1, r0p2 and r0p3.
+  It is still open.
+
+For Cortex-X925, the following errata build flags are defined :
+
+- ``ERRATA_X925_3701747``: This applies errata 3701747 workaround to Cortex-X925
+  CPU. This needs to be enabled for revisions r0p0 and r0p1. It is still open.
+
 For Cortex-A510, the following errata build flags are defined :
 
 -  ``ERRATA_A510_1922240``: This applies errata 1922240 workaround to
@@ -952,6 +992,10 @@
    Cortex-A715 CPU. This needs to be enabled for revisions r0p0, r1p0
    and r1p1. It is fixed in r1p2.
 
+-  ``ERRATA_A715_3699560``: This applies errata 3699560 workaround to
+   Cortex-A715 CPU. This needs to be enabled for revisions r0p0, r1p0,
+   r1p2, r1p3. It is still open.
+
 For Cortex-A720, the following errata build flags are defined :
 
 -  ``ERRATA_A720_2792132``: This applies errata 2792132 workaround to
@@ -970,6 +1014,16 @@
    Cortex-A720 CPU. This needs to be enabled for revisions r0p0 and r0p1.
    It is fixed in r0p2.
 
+-  ``ERRATA_A720_3699561``: This applies errata 3699561 workaround to
+   Cortex-A720 CPU. This needs to be enabled for revisions r0p0, r0p1
+   and r0p2. It is still open.
+
+For Cortex-A725, the following errata build flags are defined :
+
+-  ``ERRATA_A725_3699564``: This applies errata 3699564 workaround to
+   Cortex-A725 CPU. This needs to be enabled for revisions r0p0 and r0p1.
+   It is fixed in r0p2.
+
 DSU Errata Workarounds
 ----------------------
 
@@ -1055,7 +1109,7 @@
 
 --------------
 
-*Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.*
+*Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved.*
 
 .. _CVE-2017-5715: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-5715
 .. _CVE-2018-3639: http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-3639
diff --git a/include/arch/aarch64/arch.h b/include/arch/aarch64/arch.h
index 7d6ff5f..8ac6818 100644
--- a/include/arch/aarch64/arch.h
+++ b/include/arch/aarch64/arch.h
@@ -24,6 +24,9 @@
 #define MIDR_PN_MASK		U(0xfff)
 #define MIDR_PN_SHIFT		U(0x4)
 
+/* Extracts the CPU part number from MIDR for checking CPU match */
+#define EXTRACT_PARTNUM(x)     ((x >> MIDR_PN_SHIFT) & MIDR_PN_MASK)
+
 /*******************************************************************************
  * MPIDR macros
  ******************************************************************************/
diff --git a/include/lib/cpus/aarch64/cortex_a710.h b/include/lib/cpus/aarch64/cortex_a710.h
index 9df8d47..650193c 100644
--- a/include/lib/cpus/aarch64/cortex_a710.h
+++ b/include/lib/cpus/aarch64/cortex_a710.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -67,4 +67,8 @@
 #define CORTEX_A710_CPUPOR_EL3					S3_6_C15_C8_2
 #define CORTEX_A710_CPUPMR_EL3					S3_6_C15_C8_3
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_a710_3701772(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_A710_H */
diff --git a/include/lib/cpus/aarch64/cortex_a715.h b/include/lib/cpus/aarch64/cortex_a715.h
index c7f50db..e9bd886 100644
--- a/include/lib/cpus/aarch64/cortex_a715.h
+++ b/include/lib/cpus/aarch64/cortex_a715.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -38,4 +38,8 @@
 #define CORTEX_A715_CPUPWRCTLR_EL1				S3_0_C15_C2_7
 #define CORTEX_A715_CPUPWRCTLR_EL1_CORE_PWRDN_BIT		U(1)
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_a715_3699560(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_A715_H */
diff --git a/include/lib/cpus/aarch64/cortex_a720.h b/include/lib/cpus/aarch64/cortex_a720.h
index 129c1ee..670438f 100644
--- a/include/lib/cpus/aarch64/cortex_a720.h
+++ b/include/lib/cpus/aarch64/cortex_a720.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -38,4 +38,8 @@
 #define CORTEX_A720_CPUPWRCTLR_EL1				S3_0_C15_C2_7
 #define CORTEX_A720_CPUPWRCTLR_EL1_CORE_PWRDN_BIT		U(1)
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_a720_3699561(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_A720_H */
diff --git a/include/lib/cpus/aarch64/cortex_a725.h b/include/lib/cpus/aarch64/cortex_a725.h
index 123c5ab..6ca8edc 100644
--- a/include/lib/cpus/aarch64/cortex_a725.h
+++ b/include/lib/cpus/aarch64/cortex_a725.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2023-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -20,4 +20,8 @@
 #define CORTEX_A725_CPUPWRCTLR_EL1				S3_0_C15_C2_7
 #define CORTEX_A725_CPUPWRCTLR_EL1_CORE_PWRDN_BIT		U(1)
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_a725_3699564(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_A725_H */
diff --git a/include/lib/cpus/aarch64/cortex_x2.h b/include/lib/cpus/aarch64/cortex_x2.h
index 0f97b1e..9ec5177 100644
--- a/include/lib/cpus/aarch64/cortex_x2.h
+++ b/include/lib/cpus/aarch64/cortex_x2.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -64,4 +64,8 @@
 #define CORTEX_X2_IMP_CPUPOR_EL3				S3_6_C15_C8_2
 #define CORTEX_X2_IMP_CPUPMR_EL3				S3_6_C15_C8_3
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_x2_3701772(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_X2_H */
diff --git a/include/lib/cpus/aarch64/cortex_x3.h b/include/lib/cpus/aarch64/cortex_x3.h
index c5f820c..8834db1 100644
--- a/include/lib/cpus/aarch64/cortex_x3.h
+++ b/include/lib/cpus/aarch64/cortex_x3.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -63,4 +63,8 @@
 #define CORTEX_X3_CPUACTLR3_EL1			S3_0_C15_C1_2
 #define CORTEX_X3_CPUACTLR3_EL1_BIT_47		(ULL(1) << 47)
 
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_x3_3701769(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_X3_H */
diff --git a/include/lib/cpus/aarch64/cortex_x4.h b/include/lib/cpus/aarch64/cortex_x4.h
index f701216..8ef830a 100644
--- a/include/lib/cpus/aarch64/cortex_x4.h
+++ b/include/lib/cpus/aarch64/cortex_x4.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2022-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2022-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -36,6 +36,11 @@
 #define CORTEX_X4_CPUACTLR5_EL1				S3_0_C15_C8_0
 #define CORTEX_X4_CPUACTLR5_EL1_BIT_14			(ULL(1) << 14)
 
+/*******************************************************************************
+ * CPU Auxiliary control register 6 specific definitions
+ ******************************************************************************/
+#define CORTEX_X4_CPUACTLR6_EL1				S3_0_C15_C8_1
+
 #ifndef __ASSEMBLER__
 #if ERRATA_X4_2726228
 long check_erratum_cortex_x4_2726228(long cpu_rev);
@@ -45,6 +50,8 @@
        return 0;
 }
 #endif /* ERRATA_X4_2726228 */
+
+long check_erratum_cortex_x4_3701758(long cpu_rev);
 #endif /* __ASSEMBLER__ */
 
 #endif /* CORTEX_X4_H */
diff --git a/include/lib/cpus/aarch64/cortex_x925.h b/include/lib/cpus/aarch64/cortex_x925.h
index 38aafcf..1866828 100644
--- a/include/lib/cpus/aarch64/cortex_x925.h
+++ b/include/lib/cpus/aarch64/cortex_x925.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2023-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -20,4 +20,13 @@
 #define CORTEX_X925_CPUPWRCTLR_EL1				S3_0_C15_C2_7
 #define CORTEX_X925_CPUPWRCTLR_EL1_CORE_PWRDN_BIT		U(1)
 
+/*******************************************************************************
+ * CPU Auxiliary control register 6 specific definitions
+ ******************************************************************************/
+#define CORTEX_X925_CPUACTLR6_EL1                                S3_0_C15_C8_1
+
+#ifndef __ASSEMBLER__
+long check_erratum_cortex_x925_3701747(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* CORTEX_X925_H */
diff --git a/include/lib/cpus/aarch64/cpu_macros.S b/include/lib/cpus/aarch64/cpu_macros.S
index 630fa20..19f45e8 100644
--- a/include/lib/cpus/aarch64/cpu_macros.S
+++ b/include/lib/cpus/aarch64/cpu_macros.S
@@ -63,6 +63,10 @@
 	 *	This is a placeholder for future per CPU operations. Currently,
 	 *	some CPUs use this entry to set a test function to determine if
 	 *	the workaround for CVE-2022-23960 needs to be applied or not.
+	 * _extra4:
+	 *	This is a placeholder for future per CPU operations. Currently,
+	 *	some CPUs use this entry to set a test function to determine if
+	 *	the workaround for CVE-2024-7881 needs to be applied or not.
 	 * _e_handler:
 	 *	This is a placeholder for future per CPU exception handlers.
 	 * _power_down_ops:
@@ -75,7 +79,8 @@
 	 *	used to handle power down at subsequent levels
 	 */
 	.macro declare_cpu_ops_base _name:req, _midr:req, _resetfunc:req, \
-		_extra1:req, _extra2:req, _extra3:req, _e_handler:req, _power_down_ops:vararg
+		_extra1:req, _extra2:req, _extra3:req, _extra4:req, \
+		_e_handler:req, _power_down_ops:vararg
 	.section .cpu_ops, "a"
 	.align 3
 	.type cpu_ops_\_name, %object
@@ -86,6 +91,7 @@
 	.quad \_extra1
 	.quad \_extra2
 	.quad \_extra3
+	.quad \_extra4
 	.quad \_e_handler
 #ifdef IMAGE_BL31
 	/* Insert list of functions */
@@ -154,21 +160,28 @@
 
 	.macro declare_cpu_ops _name:req, _midr:req, _resetfunc:req, \
 		_power_down_ops:vararg
-		declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, \
+		declare_cpu_ops_base \_name, \_midr, \_resetfunc, 0, 0, 0, 0, 0, \
 			\_power_down_ops
 	.endm
 
 	.macro declare_cpu_ops_eh _name:req, _midr:req, _resetfunc:req, \
 		_e_handler:req, _power_down_ops:vararg
 		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
-			0, 0, 0, \_e_handler, \_power_down_ops
+			0, 0, 0, 0, \_e_handler, \_power_down_ops
 	.endm
 
 	.macro declare_cpu_ops_wa _name:req, _midr:req, \
 		_resetfunc:req, _extra1:req, _extra2:req, \
 		_extra3:req, _power_down_ops:vararg
 		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
-			\_extra1, \_extra2, \_extra3, 0, \_power_down_ops
+			\_extra1, \_extra2, \_extra3, 0, 0, \_power_down_ops
+	.endm
+
+	.macro declare_cpu_ops_wa_4 _name:req, _midr:req, \
+		_resetfunc:req, _extra1:req, _extra2:req, \
+		_extra3:req, _extra4:req, _power_down_ops:vararg
+		declare_cpu_ops_base \_name, \_midr, \_resetfunc, \
+			\_extra1, \_extra2, \_extra3, \_extra4, 0, \_power_down_ops
 	.endm
 
 /* TODO can be deleted once all CPUs have been converted */
diff --git a/include/lib/cpus/aarch64/neoverse_hermes.h b/include/lib/cpus/aarch64/neoverse_hermes.h
deleted file mode 100644
index 22492c3..0000000
--- a/include/lib/cpus/aarch64/neoverse_hermes.h
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright (c) 2023, Arm Limited. All rights reserved.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
-
-#ifndef NEOVERSE_HERMES_H
-#define NEOVERSE_HERMES_H
-
-#define NEOVERSE_HERMES_MIDR				U(0x410FD8E0)
-
-/*******************************************************************************
- * CPU Extended Control register specific definitions
- ******************************************************************************/
-#define NEOVERSE_HERMES_CPUECTLR_EL1			S3_0_C15_C1_4
-
-/*******************************************************************************
- * CPU Power Control register specific definitions
- ******************************************************************************/
-#define NEOVERSE_HERMES_CPUPWRCTLR_EL1			S3_0_C15_C2_7
-#define NEOVERSE_HERMES_CPUPWRCTLR_EL1_CORE_PWRDN_BIT	U(1)
-
-#endif /* NEOVERSE_HERMES_H */
diff --git a/include/lib/cpus/aarch64/neoverse_n2.h b/include/lib/cpus/aarch64/neoverse_n2.h
index b379fab..f5837d4 100644
--- a/include/lib/cpus/aarch64/neoverse_n2.h
+++ b/include/lib/cpus/aarch64/neoverse_n2.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2020-2023, Arm Limited. All rights reserved.
+ * Copyright (c) 2020-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -69,4 +69,8 @@
 #define CPUECTLR2_EL1_TXREQ_LSB				U(0)
 #define CPUECTLR2_EL1_TXREQ_WIDTH			U(3)
 
+#ifndef __ASSEMBLER__
+long check_erratum_neoverse_n2_3701773(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* NEOVERSE_N2_H */
diff --git a/include/lib/cpus/aarch64/neoverse_n3.h b/include/lib/cpus/aarch64/neoverse_n3.h
new file mode 100644
index 0000000..7b9d90e
--- /dev/null
+++ b/include/lib/cpus/aarch64/neoverse_n3.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#ifndef NEOVERSE_N3_H
+#define NEOVERSE_N3_H
+
+#define NEOVERSE_N3_MIDR				U(0x410FD8E0)
+
+/*******************************************************************************
+ * CPU Extended Control register specific definitions
+ ******************************************************************************/
+#define NEOVERSE_N3_CPUECTLR_EL1			S3_0_C15_C1_4
+
+/*******************************************************************************
+ * CPU Power Control register specific definitions
+ ******************************************************************************/
+#define NEOVERSE_N3_CPUPWRCTLR_EL1			S3_0_C15_C2_7
+#define NEOVERSE_N3_CPUPWRCTLR_EL1_CORE_PWRDN_BIT	U(1)
+
+#ifndef __ASSEMBLER__
+long check_erratum_neoverse_n3_3699563(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
+#endif /* NEOVERSE_N3_H */
diff --git a/include/lib/cpus/aarch64/neoverse_v2.h b/include/lib/cpus/aarch64/neoverse_v2.h
index 39a6607..a0e7130 100644
--- a/include/lib/cpus/aarch64/neoverse_v2.h
+++ b/include/lib/cpus/aarch64/neoverse_v2.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -57,4 +57,9 @@
 #define NEOVERSE_V2_CPUACTLR5_EL1_BIT_56		(ULL(1) << 56)
 #define NEOVERSE_V2_CPUACTLR5_EL1_BIT_55		(ULL(1) << 55)
 
+/*******************************************************************************
+ * CPU Auxiliary control register 6 specific definitions
+ ******************************************************************************/
+#define NEOVERSE_V2_CPUACTLR6_EL1			S3_0_C15_C8_1
+
 #endif /* NEOVERSE_V2_H */
diff --git a/include/lib/cpus/aarch64/neoverse_v3.h b/include/lib/cpus/aarch64/neoverse_v3.h
index e5f75ba..5a828c0 100644
--- a/include/lib/cpus/aarch64/neoverse_v3.h
+++ b/include/lib/cpus/aarch64/neoverse_v3.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2022-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2022-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,7 +22,16 @@
 /*******************************************************************************
  * CPU Power Control register specific definitions
  ******************************************************************************/
-#define NEOVERSE_V3_CPUPWRCTLR_EL1			S3_0_C15_C2_7
+#define NEOVERSE_V3_CPUPWRCTLR_EL1				S3_0_C15_C2_7
 #define NEOVERSE_V3_CPUPWRCTLR_EL1_CORE_PWRDN_BIT		U(1)
 
+/*******************************************************************************
+ * CPU Auxiliary control register 6 specific definitions
+ ******************************************************************************/
+#define NEOVERSE_V3_CPUACTLR6_EL1                                S3_0_C15_C8_1
+
+#ifndef __ASSEMBLER__
+long check_erratum_neoverse_v3_3701767(long cpu_rev);
+#endif /* __ASSEMBLER__ */
+
 #endif /* NEOVERSE_V3_H */
diff --git a/include/lib/cpus/cpu_ops.h b/include/lib/cpus/cpu_ops.h
index 8b36ff1..3fce66a 100644
--- a/include/lib/cpus/cpu_ops.h
+++ b/include/lib/cpus/cpu_ops.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2023, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2023-2025, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -28,6 +28,7 @@
 #define CPU_NO_EXTRA1_FUNC		0
 #define CPU_NO_EXTRA2_FUNC		0
 #define CPU_NO_EXTRA3_FUNC		0
+#define CPU_NO_EXTRA4_FUNC		0
 #endif /* __aarch64__ */
 
 
@@ -45,6 +46,7 @@
 #define CPU_EXTRA1_FUNC_SIZE	CPU_WORD_SIZE
 #define CPU_EXTRA2_FUNC_SIZE	CPU_WORD_SIZE
 #define CPU_EXTRA3_FUNC_SIZE	CPU_WORD_SIZE
+#define CPU_EXTRA4_FUNC_SIZE	CPU_WORD_SIZE
 #define CPU_E_HANDLER_FUNC_SIZE CPU_WORD_SIZE
 /* The power down core and cluster is needed only in BL31 and BL32 */
 #if defined(IMAGE_BL31) || defined(IMAGE_BL32)
@@ -91,7 +93,8 @@
 #define CPU_EXTRA1_FUNC		CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
 #define CPU_EXTRA2_FUNC		CPU_EXTRA1_FUNC + CPU_EXTRA1_FUNC_SIZE
 #define CPU_EXTRA3_FUNC		CPU_EXTRA2_FUNC + CPU_EXTRA2_FUNC_SIZE
-#define CPU_E_HANDLER_FUNC	CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
+#define CPU_EXTRA4_FUNC		CPU_EXTRA3_FUNC + CPU_EXTRA3_FUNC_SIZE
+#define CPU_E_HANDLER_FUNC	CPU_EXTRA4_FUNC + CPU_EXTRA4_FUNC_SIZE
 #define CPU_PWR_DWN_OPS		CPU_E_HANDLER_FUNC + CPU_E_HANDLER_FUNC_SIZE
 #else
 #define CPU_PWR_DWN_OPS		CPU_RESET_FUNC + CPU_RESET_FUNC_SIZE
@@ -122,6 +125,7 @@
 	void (*extra1_func)(void);
 	void (*extra2_func)(void);
 	void (*extra3_func)(void);
+	void (*extra4_func)(void);
 	void (*e_handler_func)(long es);
 #endif /* __aarch64__ */
 #if (defined(IMAGE_BL31) || defined(IMAGE_BL32)) && CPU_MAX_PWR_DWN_OPS
diff --git a/include/lib/cpus/errata.h b/include/lib/cpus/errata.h
index 9a1a644..e0ea741 100644
--- a/include/lib/cpus/errata.h
+++ b/include/lib/cpus/errata.h
@@ -1,15 +1,14 @@
 /*
- * Copyright (c) 2017-2023, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2017-2025, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
 
-#ifndef ERRATA_REPORT_H
-#define ERRATA_REPORT_H
+#ifndef ERRATA_H
+#define ERRATA_H
 
 #include <lib/cpus/cpu_ops.h>
 
-
 #define ERRATUM_WA_FUNC_SIZE	CPU_WORD_SIZE
 #define ERRATUM_CHECK_FUNC_SIZE	CPU_WORD_SIZE
 #define ERRATUM_ID_SIZE		4
@@ -36,19 +35,6 @@
 void print_errata_status(void);
 void errata_print_msg(unsigned int status, const char *cpu, const char *id);
 
-#if ERRATA_A75_764081
-bool errata_a75_764081_applies(void);
-#else
-static inline bool errata_a75_764081_applies(void)
-{
-	return false;
-}
-#endif
-
-#if ERRATA_A520_2938996 || ERRATA_X4_2726228
-unsigned int check_if_affected_core(void);
-#endif
-
 /*
  * NOTE that this structure will be different on AArch32 and AArch64. The
  * uintptr_t will reflect the change and the alignment will be correct in both.
@@ -67,6 +53,26 @@
 
 CASSERT(sizeof(struct erratum_entry) == ERRATUM_ENTRY_SIZE,
 	assert_erratum_entry_asm_c_different_sizes);
+
+/*
+ * Runtime errata helpers.
+ */
+#if ERRATA_A75_764081
+bool errata_a75_764081_applies(void);
+#else
+static inline bool errata_a75_764081_applies(void)
+{
+       return false;
+}
+#endif
+
+#if ERRATA_A520_2938996 || ERRATA_X4_2726228
+unsigned int check_if_affected_core(void);
+#endif
+
+int check_wa_cve_2024_7881(void);
+bool errata_ich_vmcr_el2_applies(void);
+
 #else
 
 /*
@@ -95,4 +101,4 @@
 /* Macro to get CPU revision code for checking errata version compatibility. */
 #define CPU_REV(r, p)		((r << 4) | p)
 
-#endif /* ERRATA_REPORT_H */
+#endif /* ERRATA_H */
diff --git a/include/services/arm_arch_svc.h b/include/services/arm_arch_svc.h
index 645b388..85b6b83 100644
--- a/include/services/arm_arch_svc.h
+++ b/include/services/arm_arch_svc.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018-2022, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -13,6 +13,7 @@
 #define SMCCC_ARCH_WORKAROUND_1		U(0x80008000)
 #define SMCCC_ARCH_WORKAROUND_2		U(0x80007FFF)
 #define SMCCC_ARCH_WORKAROUND_3		U(0x80003FFF)
+#define SMCCC_ARCH_WORKAROUND_4		U(0x80000004)
 
 #define SMCCC_GET_SOC_VERSION		U(0)
 #define SMCCC_GET_SOC_REVISION		U(1)
diff --git a/lib/cpus/aarch64/cortex_a710.S b/lib/cpus/aarch64/cortex_a710.S
index 76ae8f6..b9b9204 100644
--- a/lib/cpus/aarch64/cortex_a710.S
+++ b/lib/cpus/aarch64/cortex_a710.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,8 @@
 #error "Cortex A710 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_a710_3701772
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table CORTEX_A710_BHB_LOOP_COUNT, cortex_a710
 #endif /* WORKAROUND_CVE_2022_23960 */
@@ -218,6 +220,10 @@
 
 check_erratum_chosen cortex_a710, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+add_erratum_entry cortex_a710, ERRATUM(3701772), ERRATA_A710_3701772, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_a710, ERRATUM(3701772), CPU_REV(2, 1)
+
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
 	 * ----------------------------------------------------
diff --git a/lib/cpus/aarch64/cortex_a715.S b/lib/cpus/aarch64/cortex_a715.S
index 16be161..737d7b8 100644
--- a/lib/cpus/aarch64/cortex_a715.S
+++ b/lib/cpus/aarch64/cortex_a715.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,8 @@
 #error "Cortex-A715 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_a715_3699560
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table CORTEX_A715_BHB_LOOP_COUNT, cortex_a715
 #endif /* WORKAROUND_CVE_2022_23960 */
@@ -127,6 +129,10 @@
 
 check_erratum_chosen cortex_a715, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+add_erratum_entry cortex_a715, ERRATUM(3699560), ERRATA_A715_3699560, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_a715, ERRATUM(3699560), CPU_REV(1, 3)
+
 cpu_reset_func_start cortex_a715
 	/* Disable speculative loads */
 	msr	SSBS, xzr
diff --git a/lib/cpus/aarch64/cortex_a720.S b/lib/cpus/aarch64/cortex_a720.S
index 9cc3cbc..e69ec24 100644
--- a/lib/cpus/aarch64/cortex_a720.S
+++ b/lib/cpus/aarch64/cortex_a720.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,8 @@
 #error "Cortex A720 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_a720_3699561
+
 #if WORKAROUND_CVE_2022_23960
         wa_cve_2022_23960_bhb_vector_table CORTEX_A720_BHB_LOOP_COUNT, cortex_a720
 #endif /* WORKAROUND_CVE_2022_23960 */
@@ -72,6 +74,10 @@
 
 check_erratum_chosen cortex_a720, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+add_erratum_entry cortex_a720, ERRATUM(3699561), ERRATA_A720_3699561, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_a720, ERRATUM(3699561), CPU_REV(0, 2)
+
 cpu_reset_func_start cortex_a720
 	/* Disable speculative loads */
 	msr	SSBS, xzr
diff --git a/lib/cpus/aarch64/cortex_a725.S b/lib/cpus/aarch64/cortex_a725.S
index c08945f..7960521 100644
--- a/lib/cpus/aarch64/cortex_a725.S
+++ b/lib/cpus/aarch64/cortex_a725.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2023-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -21,6 +21,12 @@
 #error "Cortex-A725 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_a725_3699564
+
+add_erratum_entry cortex_a725, ERRATUM(3699564), ERRATA_A725_3699564, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_a725, ERRATUM(3699564), CPU_REV(0, 1)
+
 cpu_reset_func_start cortex_a725
 	/* Disable speculative loads */
 	msr	SSBS, xzr
diff --git a/lib/cpus/aarch64/cortex_x2.S b/lib/cpus/aarch64/cortex_x2.S
index 0a39f01..c226d51 100644
--- a/lib/cpus/aarch64/cortex_x2.S
+++ b/lib/cpus/aarch64/cortex_x2.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,12 @@
 #error "Cortex X2 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_x2_3701772
+
+add_erratum_entry cortex_x2, ERRATUM(3701772), ERRATA_X2_3701772, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_x2, ERRATUM(3701772), CPU_REV(2, 1)
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table CORTEX_X2_BHB_LOOP_COUNT, cortex_x2
 #endif /* WORKAROUND_CVE_2022_23960 */
diff --git a/lib/cpus/aarch64/cortex_x3.S b/lib/cpus/aarch64/cortex_x3.S
index 81b0fd8..c47e457 100644
--- a/lib/cpus/aarch64/cortex_x3.S
+++ b/lib/cpus/aarch64/cortex_x3.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,12 @@
 #error "Cortex-X3 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_x3_3701769
+
+add_erratum_entry cortex_x3, ERRATUM(3701769), ERRATA_X3_3701769, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_x3, ERRATUM(3701769), CPU_REV(1, 2)
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table CORTEX_X3_BHB_LOOP_COUNT, cortex_x3
 #endif /* WORKAROUND_CVE_2022_23960 */
@@ -111,6 +117,17 @@
 
 check_erratum_chosen cortex_x3, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+workaround_reset_start cortex_x3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+	/* ---------------------------------
+	 * Sets BIT41 of CPUACTLR6_EL1 which
+	 * disables L1 Data cache prefetcher
+	 * ---------------------------------
+	 */
+	sysreg_bit_set CORTEX_X3_CPUACTLR6_EL1, BIT(41)
+workaround_reset_end cortex_x3, CVE(2024, 7881)
+
+check_erratum_chosen cortex_x3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+
 cpu_reset_func_start cortex_x3
 	/* Disable speculative loads */
 	msr	SSBS, xzr
@@ -153,6 +170,10 @@
 	ret
 endfunc cortex_x3_cpu_reg_dump
 
-declare_cpu_ops cortex_x3, CORTEX_X3_MIDR, \
+declare_cpu_ops_wa_4 cortex_x3, CORTEX_X3_MIDR, \
 	cortex_x3_reset_func, \
+	CPU_NO_EXTRA1_FUNC, \
+	CPU_NO_EXTRA2_FUNC, \
+	CPU_NO_EXTRA3_FUNC, \
+	check_erratum_cortex_x3_7881, \
 	cortex_x3_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_x4.S b/lib/cpus/aarch64/cortex_x4.S
index 644bc58..dbf6f54 100644
--- a/lib/cpus/aarch64/cortex_x4.S
+++ b/lib/cpus/aarch64/cortex_x4.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2022-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2022-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -23,6 +23,7 @@
 #endif
 
 .global check_erratum_cortex_x4_2726228
+.global check_erratum_cortex_x4_3701758
 
 #if WORKAROUND_CVE_2022_23960
         wa_cve_2022_23960_bhb_vector_table CORTEX_X4_BHB_LOOP_COUNT, cortex_x4
@@ -108,6 +109,21 @@
 
 check_erratum_chosen cortex_x4, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+workaround_reset_start cortex_x4, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+	/* ---------------------------------
+	 * Sets BIT41 of CPUACTLR6_EL1 which
+	 * disables L1 Data cache prefetcher
+	 * ---------------------------------
+	 */
+	sysreg_bit_set CORTEX_X4_CPUACTLR6_EL1, BIT(41)
+workaround_reset_end cortex_x4, CVE(2024, 7881)
+
+check_erratum_chosen cortex_x4, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+
+add_erratum_entry cortex_x4, ERRATUM(3701758), ERRATA_X4_3701758, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_x4, ERRATUM(3701758), CPU_REV(0, 3)
+
 cpu_reset_func_start cortex_x4
 	/* Disable speculative loads */
 	msr	SSBS, xzr
@@ -151,6 +167,10 @@
 	ret
 endfunc cortex_x4_cpu_reg_dump
 
-declare_cpu_ops cortex_x4, CORTEX_X4_MIDR, \
+declare_cpu_ops_wa_4 cortex_x4, CORTEX_X4_MIDR, \
 	cortex_x4_reset_func, \
+	CPU_NO_EXTRA1_FUNC, \
+	CPU_NO_EXTRA2_FUNC, \
+	CPU_NO_EXTRA3_FUNC, \
+	check_erratum_cortex_x4_7881, \
 	cortex_x4_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cortex_x925.S b/lib/cpus/aarch64/cortex_x925.S
index dbf7270..5dade1a 100644
--- a/lib/cpus/aarch64/cortex_x925.S
+++ b/lib/cpus/aarch64/cortex_x925.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2023-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -21,6 +21,12 @@
 #error "Cortex-X925 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_cortex_x925_3701747
+
+add_erratum_entry cortex_x925, ERRATUM(3701747), ERRATA_X925_3701747, NO_APPLY_AT_RESET
+
+check_erratum_ls cortex_x925, ERRATUM(3701747), CPU_REV(0, 1)
+
 /* Disable hardware page aggregation. Enables mitigation for `CVE-2024-5660` */
 workaround_reset_start cortex_x925, CVE(2024, 5660), WORKAROUND_CVE_2024_5660
 	sysreg_bit_set CORTEX_X925_CPUECTLR_EL1, BIT(46)
@@ -28,6 +34,17 @@
 
 check_erratum_ls cortex_x925, CVE(2024, 5660), CPU_REV(0, 1)
 
+workaround_reset_start cortex_x925, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+	/* ---------------------------------
+         * Sets BIT41 of CPUACTLR6_EL1 which
+         * disables L1 Data cache prefetcher
+         * ---------------------------------
+         */
+	sysreg_bit_set CORTEX_X925_CPUACTLR6_EL1, BIT(41)
+workaround_reset_end cortex_x925, CVE(2024, 7881)
+
+check_erratum_chosen cortex_x925, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+
 cpu_reset_func_start cortex_x925
 	/* Disable speculative loads */
 	msr	SSBS, xzr
@@ -68,6 +85,10 @@
 	ret
 endfunc cortex_x925_cpu_reg_dump
 
-declare_cpu_ops cortex_x925, CORTEX_X925_MIDR, \
+declare_cpu_ops_wa_4 cortex_x925, CORTEX_X925_MIDR, \
 	cortex_x925_reset_func, \
+	CPU_NO_EXTRA1_FUNC, \
+	CPU_NO_EXTRA2_FUNC, \
+	CPU_NO_EXTRA3_FUNC, \
+	check_erratum_cortex_x925_7881, \
 	cortex_x925_core_pwr_dwn
diff --git a/lib/cpus/aarch64/cpu_helpers.S b/lib/cpus/aarch64/cpu_helpers.S
index 1ae3180..9c3c025 100644
--- a/lib/cpus/aarch64/cpu_helpers.S
+++ b/lib/cpus/aarch64/cpu_helpers.S
@@ -327,6 +327,43 @@
 endfunc check_wa_cve_2017_5715
 
 /*
+ * int check_wa_cve_2024_7881(void);
+ *
+ * This function returns:
+ *  - ERRATA_APPLIES when firmware mitigation is required.
+ *  - ERRATA_NOT_APPLIES when firmware mitigation is _not_ required.
+ *  - ERRATA_MISSING when firmware mitigation would be required but
+ *    is not compiled in.
+ *
+ * NOTE: Must be called only after cpu_ops have been initialized
+ *       in per-CPU data.
+ */
+.globl	check_wa_cve_2024_7881
+func check_wa_cve_2024_7881
+	mrs	x0, tpidr_el3
+#if ENABLE_ASSERTIONS
+	cmp	x0, #0
+	ASM_ASSERT(ne)
+#endif
+	ldr	x0, [x0, #CPU_DATA_CPU_OPS_PTR]
+#if ENABLE_ASSERTIONS
+	cmp	x0, #0
+	ASM_ASSERT(ne)
+#endif
+	ldr	x0, [x0, #CPU_EXTRA4_FUNC]
+	/*
+	 * If the reserved function pointer is NULL, this CPU
+	 * is unaffected by CVE-2024-7881 so bail out.
+	 */
+	cmp	x0, #CPU_NO_EXTRA4_FUNC
+	beq	1f
+	br	x0
+1:
+	mov	x0, #ERRATA_NOT_APPLIES
+	ret
+endfunc check_wa_cve_2024_7881
+
+/*
  * void *wa_cve_2018_3639_get_disable_ptr(void);
  *
  * Returns a function pointer which is used to disable mitigation
diff --git a/lib/cpus/aarch64/neoverse_hermes.S b/lib/cpus/aarch64/neoverse_hermes.S
deleted file mode 100644
index cb90b71..0000000
--- a/lib/cpus/aarch64/neoverse_hermes.S
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (c) 2023, Arm Limited. All rights reserved.
- *
- * SPDX-License-Identifier: BSD-3-Clause
- */
-
-#include <arch.h>
-#include <asm_macros.S>
-#include <common/bl_common.h>
-#include <neoverse_hermes.h>
-#include <cpu_macros.S>
-#include <plat_macros.S>
-
-/* Hardware handled coherency */
-#if HW_ASSISTED_COHERENCY == 0
-#error "Neoverse Hermes must be compiled with HW_ASSISTED_COHERENCY enabled"
-#endif
-
-/* 64-bit only core */
-#if CTX_INCLUDE_AARCH32_REGS == 1
-#error "Neoverse Hermes supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
-#endif
-
-cpu_reset_func_start neoverse_hermes
-	/* Disable speculative loads */
-	msr	SSBS, xzr
-cpu_reset_func_end neoverse_hermes
-
-	/* ----------------------------------------------------
-	 * HW will do the cache maintenance while powering down
-	 * ----------------------------------------------------
-	 */
-func neoverse_hermes_core_pwr_dwn
-	/* ---------------------------------------------------
-	 * Enable CPU power down bit in power control register
-	 * ---------------------------------------------------
-	 */
-	sysreg_bit_set NEOVERSE_HERMES_CPUPWRCTLR_EL1, NEOVERSE_HERMES_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
-	isb
-	ret
-endfunc neoverse_hermes_core_pwr_dwn
-
-errata_report_shim neoverse_hermes
-
-	/* ---------------------------------------------
-	 * This function provides Neoverse Hermes specific
-	 * register information for crash reporting.
-	 * It needs to return with x6 pointing to
-	 * a list of register names in ascii and
-	 * x8 - x15 having values of registers to be
-	 * reported.
-	 * ---------------------------------------------
-	 */
-.section .rodata.neoverse_hermes_regs, "aS"
-neoverse_hermes_regs:  /* The ascii list of register names to be reported */
-	.asciz	"cpuectlr_el1", ""
-
-func neoverse_hermes_cpu_reg_dump
-	adr	x6, neoverse_hermes_regs
-	mrs	x8, NEOVERSE_HERMES_CPUECTLR_EL1
-	ret
-endfunc neoverse_hermes_cpu_reg_dump
-
-declare_cpu_ops neoverse_hermes, NEOVERSE_HERMES_MIDR, \
-	neoverse_hermes_reset_func, \
-	neoverse_hermes_core_pwr_dwn
diff --git a/lib/cpus/aarch64/neoverse_n2.S b/lib/cpus/aarch64/neoverse_n2.S
index afbb94c..0ff5a94 100644
--- a/lib/cpus/aarch64/neoverse_n2.S
+++ b/lib/cpus/aarch64/neoverse_n2.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2020-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2020-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -20,6 +20,12 @@
 #error "Neoverse-N2 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_neoverse_n2_3701773
+
+add_erratum_entry neoverse_n2, ERRATUM(3701773), ERRATA_N2_3701773, NO_APPLY_AT_RESET
+
+check_erratum_ls neoverse_n2, ERRATUM(3701773), CPU_REV(0, 3)
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table NEOVERSE_N2_BHB_LOOP_COUNT, neoverse_n2
 #endif /* WORKAROUND_CVE_2022_23960 */
diff --git a/lib/cpus/aarch64/neoverse_n3.S b/lib/cpus/aarch64/neoverse_n3.S
new file mode 100644
index 0000000..b484b26
--- /dev/null
+++ b/lib/cpus/aarch64/neoverse_n3.S
@@ -0,0 +1,72 @@
+/*
+ * Copyright (c) 2023-2025, Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: BSD-3-Clause
+ */
+
+#include <arch.h>
+#include <asm_macros.S>
+#include <common/bl_common.h>
+#include <neoverse_n3.h>
+#include <cpu_macros.S>
+#include <plat_macros.S>
+
+/* Hardware handled coherency */
+#if HW_ASSISTED_COHERENCY == 0
+#error "Neoverse-N3 must be compiled with HW_ASSISTED_COHERENCY enabled"
+#endif
+
+/* 64-bit only core */
+#if CTX_INCLUDE_AARCH32_REGS == 1
+#error "Neoverse-N3 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
+#endif
+
+.global check_erratum_neoverse_n3_3699563
+
+add_erratum_entry neoverse_n3, ERRATUM(3699563), ERRATA_N3_3699563, NO_APPLY_AT_RESET
+
+check_erratum_ls neoverse_n3, ERRATUM(3699563), CPU_REV(0, 0)
+
+cpu_reset_func_start neoverse_n3
+	/* Disable speculative loads */
+	msr	SSBS, xzr
+cpu_reset_func_end neoverse_n3
+
+	/* ----------------------------------------------------
+	 * HW will do the cache maintenance while powering down
+	 * ----------------------------------------------------
+	 */
+func neoverse_n3_core_pwr_dwn
+	/* ---------------------------------------------------
+	 * Enable CPU power down bit in power control register
+	 * ---------------------------------------------------
+	 */
+	sysreg_bit_set NEOVERSE_N3_CPUPWRCTLR_EL1, NEOVERSE_N3_CPUPWRCTLR_EL1_CORE_PWRDN_BIT
+	isb
+	ret
+endfunc neoverse_n3_core_pwr_dwn
+
+errata_report_shim neoverse_n3
+
+	/* ---------------------------------------------
+	 * This function provides Neoverse-N3 specific
+	 * register information for crash reporting.
+	 * It needs to return with x6 pointing to
+	 * a list of register names in ascii and
+	 * x8 - x15 having values of registers to be
+	 * reported.
+	 * ---------------------------------------------
+	 */
+.section .rodata.neoverse_n3_regs, "aS"
+neoverse_n3_regs:  /* The ascii list of register names to be reported */
+	.asciz	"cpuectlr_el1", ""
+
+func neoverse_n3_cpu_reg_dump
+	adr	x6, neoverse_n3_regs
+	mrs	x8, NEOVERSE_N3_CPUECTLR_EL1
+	ret
+endfunc neoverse_n3_cpu_reg_dump
+
+declare_cpu_ops neoverse_n3, NEOVERSE_N3_MIDR, \
+	neoverse_n3_reset_func, \
+	neoverse_n3_core_pwr_dwn
diff --git a/lib/cpus/aarch64/neoverse_v2.S b/lib/cpus/aarch64/neoverse_v2.S
index ca62c52..9f14155 100644
--- a/lib/cpus/aarch64/neoverse_v2.S
+++ b/lib/cpus/aarch64/neoverse_v2.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2021-2023, Arm Limited. All rights reserved.
+ * Copyright (c) 2021-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -97,6 +97,17 @@
 	wa_cve_2022_23960_bhb_vector_table NEOVERSE_V2_BHB_LOOP_COUNT, neoverse_v2
 #endif /* WORKAROUND_CVE_2022_23960 */
 
+workaround_reset_start neoverse_v2, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+       /* ---------------------------------
+        * Sets BIT41 of CPUACTLR6_EL1 which
+        * disables L1 Data cache prefetcher
+        * ---------------------------------
+        */
+       sysreg_bit_set NEOVERSE_V2_CPUACTLR6_EL1, BIT(41)
+workaround_reset_end neoverse_v2, CVE(2024, 7881)
+
+check_erratum_chosen neoverse_v2, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+
 	/* ----------------------------------------------------
 	 * HW will do the cache maintenance while powering down
 	 * ----------------------------------------------------
@@ -138,6 +149,10 @@
 	ret
 endfunc neoverse_v2_cpu_reg_dump
 
-declare_cpu_ops neoverse_v2, NEOVERSE_V2_MIDR, \
+declare_cpu_ops_wa_4 neoverse_v2, NEOVERSE_V2_MIDR, \
 	neoverse_v2_reset_func, \
+	CPU_NO_EXTRA1_FUNC, \
+	CPU_NO_EXTRA2_FUNC, \
+	CPU_NO_EXTRA3_FUNC, \
+	check_erratum_neoverse_v2_7881, \
 	neoverse_v2_core_pwr_dwn
diff --git a/lib/cpus/aarch64/neoverse_v3.S b/lib/cpus/aarch64/neoverse_v3.S
index 031d3c8..716e777 100644
--- a/lib/cpus/aarch64/neoverse_v3.S
+++ b/lib/cpus/aarch64/neoverse_v3.S
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2022-2024, Arm Limited. All rights reserved.
+ * Copyright (c) 2022-2025, Arm Limited. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -22,6 +22,12 @@
 #error "Neoverse V3 supports only AArch64. Compile with CTX_INCLUDE_AARCH32_REGS=0"
 #endif
 
+.global check_erratum_neoverse_v3_3701767
+
+add_erratum_entry neoverse_v3, ERRATUM(3701767), ERRATA_V3_3701767, NO_APPLY_AT_RESET
+
+check_erratum_ls neoverse_v3, ERRATUM(3701767), CPU_REV(0, 2)
+
 #if WORKAROUND_CVE_2022_23960
 	wa_cve_2022_23960_bhb_vector_table NEOVERSE_V3_BHB_LOOP_COUNT, neoverse_v3
 #endif /* WORKAROUND_CVE_2022_23960 */
@@ -46,6 +52,17 @@
 
 check_erratum_chosen neoverse_v3, CVE(2022, 23960), WORKAROUND_CVE_2022_23960
 
+workaround_reset_start neoverse_v3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+       /* ---------------------------------
+        * Sets BIT41 of CPUACTLR6_EL1 which
+        * disables L1 Data cache prefetcher
+        * ---------------------------------
+        */
+       sysreg_bit_set NEOVERSE_V3_CPUACTLR6_EL1, BIT(41)
+workaround_reset_end neoverse_v3, CVE(2024, 7881)
+
+check_erratum_chosen neoverse_v3, CVE(2024, 7881), WORKAROUND_CVE_2024_7881
+
 	/* ---------------------------------------------
 	 * HW will do the cache maintenance while powering down
 	 * ---------------------------------------------
@@ -92,6 +109,10 @@
 	neoverse_v3_reset_func, \
 	neoverse_v3_core_pwr_dwn
 
-declare_cpu_ops neoverse_v3, NEOVERSE_V3_MIDR, \
+declare_cpu_ops_wa_4 neoverse_v3, NEOVERSE_V3_MIDR, \
 	neoverse_v3_reset_func, \
+	CPU_NO_EXTRA1_FUNC, \
+	CPU_NO_EXTRA2_FUNC, \
+	CPU_NO_EXTRA3_FUNC, \
+	check_erratum_neoverse_v3_7881, \
 	neoverse_v3_core_pwr_dwn
diff --git a/lib/cpus/cpu-ops.mk b/lib/cpus/cpu-ops.mk
index 8eee2f2..0dbfba9 100644
--- a/lib/cpus/cpu-ops.mk
+++ b/lib/cpus/cpu-ops.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.
+# Copyright (c) 2014-2025, Arm Limited and Contributors. All rights reserved.
 # Copyright (c) 2020-2022, NVIDIA Corporation. All rights reserved.
 #
 # SPDX-License-Identifier: BSD-3-Clause
@@ -37,6 +37,8 @@
 CPU_FLAG_LIST += DYNAMIC_WORKAROUND_CVE_2018_3639
 WORKAROUND_CVE_2022_23960		?=1
 CPU_FLAG_LIST += WORKAROUND_CVE_2022_23960
+WORKAROUND_CVE_2024_7881		?=1
+CPU_FLAG_LIST += WORKAROUND_CVE_2024_7881
 
 # Flags to indicate internal or external Last level cache
 # By default internal
@@ -561,6 +563,11 @@
 # still open.
 CPU_FLAG_LIST += ERRATA_V1_2779461
 
+# Flag to apply erratum 3701767 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1 and r0p2 of
+# the Neoverse V3 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_V3_3701767
+
 # Flag to apply erratum 1987031 workaround during reset. This erratum applies
 # to revisions r0p0, r1p0 and r2p0 of the Cortex-A710 cpu and is still open.
 CPU_FLAG_LIST += ERRATA_A710_1987031
@@ -639,6 +646,11 @@
 # open.
 CPU_FLAG_LIST += ERRATA_A710_2778471
 
+# Flag to apply erratum  3701772 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r1p0, r2p0, r2p1
+# of the Cortex-A710 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_A710_3701772
+
 # Flag to apply erratum 2002655 workaround during reset. This erratum applies
 # to revisions r0p0 of the Neoverse-N2 cpu and is fixed in r0p1.
 CPU_FLAG_LIST += ERRATA_N2_2002655
@@ -720,6 +732,16 @@
 # to r0p0, r0p1, r0p2 of the Neoverse N2 cpu, it is fixed in r0p3.
 CPU_FLAG_LIST += ERRATA_N2_2779511
 
+# Flag to apply erratum 3701773 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1, r0p2 and r0p3
+# of the Neoverse N2 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_N2_3701773
+
+# Flag to apply erratum 3699563 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revision r0p0 of the Neoverse N3
+# cpu and is still open.
+CPU_FLAG_LIST += ERRATA_N3_3699563
+
 # Flag to apply erratum 2002765 workaround during reset. This erratum applies
 # to revisions r0p0, r1p0, and r2p0 of the Cortex-X2 cpu and is still open.
 CPU_FLAG_LIST += ERRATA_X2_2002765
@@ -778,6 +800,11 @@
 # to revisions r0p0, r1p0, r2p0, r2p1 of the Cortex-X2 cpu and it is still open.
 CPU_FLAG_LIST += ERRATA_X2_2778471
 
+# Flag to apply erratum 3701772 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r1p0, r2p0 and r2p1
+# of the Cortex-X2 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_X2_3701772
+
 # Flag to apply erratum 2070301 workaround on reset. This erratum applies
 # to revisions r0p0, r1p0, r1p1 and r1p2 of the Cortex-X3 cpu and is
 # still open.
@@ -819,6 +846,11 @@
 # to revisions r0p0, r1p0 and r1p1 of the Cortex-X3 cpu, it is fixed in r1p2.
 CPU_FLAG_LIST += ERRATA_X3_2743088
 
+# Flag to apply erratum 3701769 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r1p0, r1p1 and r1p2
+# of the Cortex-X3 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_X3_3701769
+
 # Flag to apply erratum 2779509 workaround on reset. This erratum applies
 # to revisions r0p0, r1p0, r1p1 of the Cortex-X3 cpu, it is fixed in r1p2.
 CPU_FLAG_LIST += ERRATA_X3_2779509
@@ -856,6 +888,16 @@
 # to revisions r0p0 and r0p1 of the Cortex-X4 cpu. It is fixed in r0p2.
 CPU_FLAG_LIST += ERRATA_X4_3076789
 
+# Flag to apply erratum 3701758 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1, r0p2 and r0p3
+# of the Cortex-X4 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_X4_3701758
+
+# Flag to apply erratum 3701747 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1 of the
+# Cortex-X925 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_X925_3701747
+
 # Flag to apply erratum 1922240 workaround during reset. This erratum applies
 # to revision r0p0 of the Cortex-A510 cpu and is fixed in r0p1.
 CPU_FLAG_LIST += ERRATA_A510_1922240
@@ -981,6 +1023,11 @@
 # only to revision r0p0, r1p0 and r1p1. It is fixed in r1p2.
 CPU_FLAG_LIST += ERRATA_A715_2728106
 
+# Flag to apply erratum 3699560 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r1p0, r1p2, r1p3
+# of the Cortex-A715 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_A715_3699560
+
 # Flag to apply erratum 2792132 workaround during reset. This erratum applies
 # to revisions r0p0 and r0p1. It is fixed in r0p2.
 CPU_FLAG_LIST += ERRATA_A720_2792132
@@ -997,12 +1044,22 @@
 # to revisions r0p0 and r0p1. It is fixed in r0p2.
 CPU_FLAG_LIST += ERRATA_A720_2940794
 
+# Flag to apply erratum 3699561 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1, r0p2 of
+# the Cortex-A720 cpu and is still open.
+CPU_FLAG_LIST += ERRATA_A720_3699561
+
+# Flag to apply erratum 3699564 workaround during context save/restore of
+# ICH_VMCR_EL2 reg. This erratum applies to revisions r0p0, r0p1 of
+# the Cortex-A725 cpu and is fixed in r0p2
+CPU_FLAG_LIST += ERRATA_A725_3699564
+
 # Flag to apply DSU erratum 798953. This erratum applies to DSUs revision r0p0.
 # Applying the workaround results in higher DSU power consumption on idle.
 CPU_FLAG_LIST += ERRATA_DSU_798953
 
 # Flag to apply DSU erratum 936184. This erratum applies to DSUs containing
-# the ACP interface and revision < r2p0. Applying the workaround results in
+# the ACP interface and revision < r0p0. Applying the workaround results in
 # higher DSU power consumption on idle.
 CPU_FLAG_LIST += ERRATA_DSU_936184
 
diff --git a/lib/cpus/errata_common.c b/lib/cpus/errata_common.c
index f7a063b..ba9c676 100644
--- a/lib/cpus/errata_common.c
+++ b/lib/cpus/errata_common.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2024, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2024-2025, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -8,11 +8,21 @@
 
 #include <arch.h>
 #include <arch_helpers.h>
-#include <cortex_a520.h>
 #include <cortex_a75.h>
+#include <cortex_a520.h>
+#include <cortex_a710.h>
+#include <cortex_a715.h>
+#include <cortex_a720.h>
+#include <cortex_a725.h>
+#include <cortex_x2.h>
+#include <cortex_x3.h>
 #include <cortex_x4.h>
+#include <cortex_x925.h>
 #include <lib/cpus/cpu_ops.h>
 #include <lib/cpus/errata.h>
+#include <neoverse_n2.h>
+#include <neoverse_n3.h>
+#include <neoverse_v3.h>
 
 #if ERRATA_A520_2938996 || ERRATA_X4_2726228
 unsigned int check_if_affected_core(void)
@@ -41,3 +51,90 @@
 	return false;
 }
 #endif /* ERRATA_A75_764081 */
+
+bool errata_ich_vmcr_el2_applies(void)
+{
+	switch (EXTRACT_PARTNUM(read_midr())) {
+#if ERRATA_A710_3701772
+	case EXTRACT_PARTNUM(CORTEX_A710_MIDR):
+		if (check_erratum_cortex_a710_3701772(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_A710_3701772 */
+
+#if ERRATA_A715_3699560
+	case EXTRACT_PARTNUM(CORTEX_A715_MIDR):
+		if (check_erratum_cortex_a715_3699560(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_A715_3699560 */
+
+#if ERRATA_A720_3699561
+	case EXTRACT_PARTNUM(CORTEX_A720_MIDR):
+		if (check_erratum_cortex_a720_3699561(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;;
+		break;
+#endif /* ERRATA_A720_3699561 */
+
+#if ERRATA_A725_3699564
+	case EXTRACT_PARTNUM(CORTEX_A725_MIDR):
+		if (check_erratum_cortex_a725_3699564(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_A725_3699564 */
+
+#if ERRATA_X2_3701772
+	case EXTRACT_PARTNUM(CORTEX_X2_MIDR):
+		if (check_erratum_cortex_x2_3701772(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_X2_3701772 */
+
+#if ERRATA_X3_3701769
+	case EXTRACT_PARTNUM(CORTEX_X3_MIDR):
+		if (check_erratum_cortex_x3_3701769(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_X3_3701769 */
+
+#if ERRATA_X4_3701758
+	case EXTRACT_PARTNUM(CORTEX_X4_MIDR):
+		if (check_erratum_cortex_x4_3701758(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_X4_3701758 */
+
+#if ERRATA_X925_3701747
+	case EXTRACT_PARTNUM(CORTEX_X925_MIDR):
+		if (check_erratum_cortex_x925_3701747(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_X925_3701747 */
+
+#if ERRATA_N2_3701773
+	case EXTRACT_PARTNUM(NEOVERSE_N2_MIDR):
+		if (check_erratum_neoverse_n2_3701773(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_N2_3701773 */
+
+#if ERRATA_N3_3699563
+	case EXTRACT_PARTNUM(NEOVERSE_N3_MIDR):
+		if (check_erratum_neoverse_n3_3699563(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_N3_3699563 */
+
+#if ERRATA_V3_3701767
+	case EXTRACT_PARTNUM(NEOVERSE_V3_MIDR):
+		if (check_erratum_neoverse_v3_3701767(cpu_get_rev_var()) == ERRATA_APPLIES)
+			return true;
+		break;
+#endif /* ERRATA_V3_3701767 */
+
+	default:
+		break;
+	}
+
+	return false;
+}
diff --git a/lib/el3_runtime/aarch64/context_mgmt.c b/lib/el3_runtime/aarch64/context_mgmt.c
index b8a581f..0a92606 100644
--- a/lib/el3_runtime/aarch64/context_mgmt.c
+++ b/lib/el3_runtime/aarch64/context_mgmt.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2013-2023, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2025, Arm Limited and Contributors. All rights reserved.
  * Copyright (c) 2022, NVIDIA Corporation. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
@@ -1111,13 +1111,90 @@
 	}
 }
 
+/* ---------------------------------------------------------------------------
+ * The following registers are not added:
+ * ICH_AP0R<n>_EL2
+ * ICH_AP1R<n>_EL2
+ * ICH_LR<n>_EL2
+ *
+ * NOTE: For a system with S-EL2 present but not enabled, accessing
+ * ICC_SRE_EL2 is undefined from EL3. To workaround this change the
+ * SCR_EL3.NS = 1 before accessing this register.
+ * ---------------------------------------------------------------------------
+ */
+static void el2_sysregs_context_save_gic(el2_sysregs_t *ctx, uint32_t security_state)
+{
+	u_register_t scr_el3 = read_scr_el3();
+
+#if defined(SPD_spmd) && SPMD_SPM_AT_SEL2
+	write_ctx_reg(ctx, CTX_ICC_SRE_EL2, read_icc_sre_el2());
+#else
+	write_scr_el3(scr_el3 | SCR_NS_BIT);
+	isb();
+
+	write_ctx_reg(ctx, CTX_ICC_SRE_EL2, read_icc_sre_el2());
+
+	write_scr_el3(scr_el3);
+	isb();
+
+#endif
+	write_ctx_reg(ctx, CTX_ICH_HCR_EL2, read_ich_hcr_el2());
+
+	if (errata_ich_vmcr_el2_applies()) {
+		if (security_state == SECURE) {
+			write_scr_el3(scr_el3 & ~SCR_NS_BIT);
+		} else {
+			write_scr_el3(scr_el3 | SCR_NS_BIT);
+		}
+		isb();
+	}
+
+	write_ctx_reg(ctx, CTX_ICH_VMCR_EL2, read_ich_vmcr_el2());
+
+	if (errata_ich_vmcr_el2_applies()) {
+		write_scr_el3(scr_el3);
+		isb();
+	}
+}
+
+static void el2_sysregs_context_restore_gic(el2_sysregs_t *ctx, uint32_t security_state)
+{
+	u_register_t scr_el3 = read_scr_el3();
+
+#if defined(SPD_spmd) && SPMD_SPM_AT_SEL2
+	write_icc_sre_el2(read_ctx_reg(ctx, CTX_ICC_SRE_EL2));
+#else
+	write_scr_el3(scr_el3 | SCR_NS_BIT);
+	isb();
+
+	write_icc_sre_el2(read_ctx_reg(ctx, CTX_ICC_SRE_EL2));
+
+	write_scr_el3(scr_el3);
+	isb();
+#endif
+	write_ich_hcr_el2(read_ctx_reg(ctx, CTX_ICH_HCR_EL2));
+
+	if (errata_ich_vmcr_el2_applies()) {
+		if (security_state == SECURE) {
+			write_scr_el3(scr_el3 & ~SCR_NS_BIT);
+		} else {
+			write_scr_el3(scr_el3 | SCR_NS_BIT);
+		}
+		isb();
+	}
+
+	write_ich_vmcr_el2(read_ctx_reg(ctx, CTX_ICH_VMCR_EL2));
+
+	if (errata_ich_vmcr_el2_applies()) {
+		write_scr_el3(scr_el3);
+		isb();
+	}
+}
+
 /* -----------------------------------------------------
  * The following registers are not added:
  * AMEVCNTVOFF0<n>_EL2
  * AMEVCNTVOFF1<n>_EL2
- * ICH_AP0R<n>_EL2
- * ICH_AP1R<n>_EL2
- * ICH_LR<n>_EL2
  * -----------------------------------------------------
  */
 static void el2_sysregs_context_save_common(el2_sysregs_t *ctx)
@@ -1139,22 +1216,6 @@
 	write_ctx_reg(ctx, CTX_HCR_EL2, read_hcr_el2());
 	write_ctx_reg(ctx, CTX_HPFAR_EL2, read_hpfar_el2());
 	write_ctx_reg(ctx, CTX_HSTR_EL2, read_hstr_el2());
-
-	/*
-	 * Set the NS bit to be able to access the ICC_SRE_EL2 register
-	 * TODO: remove with root context
-	 */
-	u_register_t scr_el3 = read_scr_el3();
-
-	write_scr_el3(scr_el3 | SCR_NS_BIT);
-	isb();
-	write_ctx_reg(ctx, CTX_ICC_SRE_EL2, read_icc_sre_el2());
-
-	write_scr_el3(scr_el3);
-	isb();
-
-	write_ctx_reg(ctx, CTX_ICH_HCR_EL2, read_ich_hcr_el2());
-	write_ctx_reg(ctx, CTX_ICH_VMCR_EL2, read_ich_vmcr_el2());
 	write_ctx_reg(ctx, CTX_MAIR_EL2, read_mair_el2());
 	write_ctx_reg(ctx, CTX_MDCR_EL2, read_mdcr_el2());
 	write_ctx_reg(ctx, CTX_SCTLR_EL2, read_sctlr_el2());
@@ -1189,22 +1250,6 @@
 	write_hcr_el2(read_ctx_reg(ctx, CTX_HCR_EL2));
 	write_hpfar_el2(read_ctx_reg(ctx, CTX_HPFAR_EL2));
 	write_hstr_el2(read_ctx_reg(ctx, CTX_HSTR_EL2));
-
-	/*
-	 * Set the NS bit to be able to access the ICC_SRE_EL2 register
-	 * TODO: remove with root context
-	 */
-	u_register_t scr_el3 = read_scr_el3();
-
-	write_scr_el3(scr_el3 | SCR_NS_BIT);
-	isb();
-	write_icc_sre_el2(read_ctx_reg(ctx, CTX_ICC_SRE_EL2));
-
-	write_scr_el3(scr_el3);
-	isb();
-
-	write_ich_hcr_el2(read_ctx_reg(ctx, CTX_ICH_HCR_EL2));
-	write_ich_vmcr_el2(read_ctx_reg(ctx, CTX_ICH_VMCR_EL2));
 	write_mair_el2(read_ctx_reg(ctx, CTX_MAIR_EL2));
 	write_mdcr_el2(read_ctx_reg(ctx, CTX_MDCR_EL2));
 	write_sctlr_el2(read_ctx_reg(ctx, CTX_SCTLR_EL2));
@@ -1234,6 +1279,8 @@
 	el2_sysregs_ctx = get_el2_sysregs_ctx(ctx);
 
 	el2_sysregs_context_save_common(el2_sysregs_ctx);
+	el2_sysregs_context_save_gic(el2_sysregs_ctx, security_state);
+
 #if CTX_INCLUDE_MTE_REGS
 	write_ctx_reg(el2_sysregs_ctx, CTX_TFSR_EL2, read_tfsr_el2());
 #endif
@@ -1307,6 +1354,8 @@
 	el2_sysregs_ctx = get_el2_sysregs_ctx(ctx);
 
 	el2_sysregs_context_restore_common(el2_sysregs_ctx);
+	el2_sysregs_context_restore_gic(el2_sysregs_ctx, security_state);
+
 #if CTX_INCLUDE_MTE_REGS
 	write_tfsr_el2(read_ctx_reg(el2_sysregs_ctx, CTX_TFSR_EL2));
 #endif
diff --git a/plat/arm/board/fvp/platform.mk b/plat/arm/board/fvp/platform.mk
index d8413c1..8025f0d 100644
--- a/plat/arm/board/fvp/platform.mk
+++ b/plat/arm/board/fvp/platform.mk
@@ -31,6 +31,9 @@
 # Macro to enable helpers for running SPM tests. Disabled by default.
 PLAT_TEST_SPM	:= 0
 
+# By default dont build CPUs with no FVP model.
+BUILD_CPUS_WITH_NO_FVP_MODEL	?= 0
+
 # This is a very trickly TEMPORARY fix. Enabling ALL features exceeds BL31's
 # progbits limit. We need a way to build all useful configurations while waiting
 # on the fvp to increase its SRAM size. The problem is twofild:
@@ -213,16 +216,21 @@
 					lib/cpus/aarch64/neoverse_v1.S		\
 					lib/cpus/aarch64/neoverse_e1.S		\
 					lib/cpus/aarch64/cortex_x2.S		\
-					lib/cpus/aarch64/cortex_x4.S		\
-					lib/cpus/aarch64/cortex_gelas.S		\
-					lib/cpus/aarch64/nevis.S		\
-					lib/cpus/aarch64/travis.S
+					lib/cpus/aarch64/cortex_x4.S
 	endif
 	# AArch64/AArch32 cores
 	FVP_CPU_LIBS	+=	lib/cpus/aarch64/cortex_a55.S		\
 				lib/cpus/aarch64/cortex_a75.S
 endif
 
+#Build AArch64-only CPUs with no FVP model yet.
+ifeq (${BUILD_CPUS_WITH_NO_FVP_MODEL},1)
+	FVP_CPU_LIBS    +=	lib/cpus/aarch64/neoverse_n3.S	\
+				lib/cpus/aarch64/cortex_gelas.S		\
+				lib/cpus/aarch64/nevis.S		\
+				lib/cpus/aarch64/travis.S
+endif
+
 else
 FVP_CPU_LIBS		+=	lib/cpus/aarch32/cortex_a32.S			\
 				lib/cpus/aarch32/cortex_a57.S			\
diff --git a/services/arm_arch_svc/arm_arch_svc_setup.c b/services/arm_arch_svc/arm_arch_svc_setup.c
index 57d211e..3895b4a 100644
--- a/services/arm_arch_svc/arm_arch_svc_setup.c
+++ b/services/arm_arch_svc/arm_arch_svc_setup.c
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2018-2023, Arm Limited and Contributors. All rights reserved.
+ * Copyright (c) 2018-2025, Arm Limited and Contributors. All rights reserved.
  *
  * SPDX-License-Identifier: BSD-3-Clause
  */
@@ -90,6 +90,15 @@
 		}
 		return 0; /* ERRATA_APPLIES || ERRATA_MISSING */
 #endif
+
+#if WORKAROUND_CVE_2024_7881
+	case SMCCC_ARCH_WORKAROUND_4:
+		if (check_wa_cve_2024_7881() != ERRATA_APPLIES) {
+			return SMC_ARCH_CALL_NOT_SUPPORTED;
+		}
+		return 0;
+#endif /* WORKAROUND_CVE_2024_7881 */
+
 #endif /* __aarch64__ */
 
 	/* Fallthrough */
@@ -160,6 +169,15 @@
 		 */
 		SMC_RET0(handle);
 #endif
+#if WORKAROUND_CVE_2024_7881
+	case SMCCC_ARCH_WORKAROUND_4:
+		/*
+		 * The workaround has already been applied on affected PEs
+		 * during cold boot. This function has no effect whether PE is
+		 * affected or not.
+		 */
+		SMC_RET0(handle);
+#endif /* WORKAROUND_CVE_2024_7881 */
 #endif /* __aarch64__ */
 	default:
 		WARN("Unimplemented Arm Architecture Service Call: 0x%x \n",
diff --git a/services/std_svc/errata_abi/cpu_errata_info.h b/services/std_svc/errata_abi/cpu_errata_info.h
index 2d59fc9..21a23e1 100644
--- a/services/std_svc/errata_abi/cpu_errata_info.h
+++ b/services/std_svc/errata_abi/cpu_errata_info.h
@@ -8,6 +8,7 @@
 #define ERRATA_CPUSPEC_H
 
 #include <stdint.h>
+#include <arch.h>
 #include <arch_helpers.h>
 
 #if __aarch64__
@@ -31,8 +32,6 @@
 /* Default values for unused memory in the array */
 #define UNDEF_ERRATA		{UINT_MAX, UCHAR_MAX, UCHAR_MAX}
 
-#define EXTRACT_PARTNUM(x)	((x >> MIDR_PN_SHIFT) & MIDR_PN_MASK)
-
 #define RXPX_RANGE(x, y, z)	(((x >= y) && (x <= z)) ? true : false)
 
 /*