Merge changes from topic "sb/exceptions"

* changes:
  Rework IRQ vector code
  Add CFI debug info to vector entries
  Improve readability of exceptions stubs code
  Fix comments in AArch64 exceptions code
diff --git a/include/common/aarch64/asm_macros.S b/include/common/aarch64/asm_macros.S
index ad47290..5298ae0 100644
--- a/include/common/aarch64/asm_macros.S
+++ b/include/common/aarch64/asm_macros.S
@@ -61,20 +61,21 @@
 	 */
 	.macro vector_entry  label
 	.section .vectors, "ax"
+	.cfi_sections .debug_frame
 	.align 7, 0
+	.type \label, %function
+	.cfi_startproc
 	\label:
 	.endm
 
 	/*
-	 * This macro verifies that the a given vector doesn't exceed the
-	 * architectural limit of 32 instructions. This is meant to be placed
-	 * immedately after the last instruction in the vector. It takes the
-	 * vector entry as the parameter
+	 * Add the bytes until fill the full exception vector, whose size is always
+	 * 32 instructions. If there are more than 32 instructions in the
+	 * exception vector then an error is emitted.
 	 */
-	.macro check_vector_size since
-	  .if (. - \since) > (32 * 4)
-	    .error "Vector exceeds 32 instructions"
-	  .endif
+	.macro end_vector_entry label
+	.cfi_endproc
+	.fill	\label + (32 * 4) - .
 	.endm
 
 	/*
diff --git a/lib/aarch64/exception_stubs.S b/lib/aarch64/exception_stubs.S
index 0508fe5..d418451 100644
--- a/lib/aarch64/exception_stubs.S
+++ b/lib/aarch64/exception_stubs.S
@@ -6,41 +6,32 @@
 
 #include <asm_macros.S>
 
+	.global exception_stubs
+
 /*
  * Simplistic exceptions vector table.
  * All entries spin, which means all types of exceptions are unrecoverable.
  */
-	.global exception_stubs
+	.macro vector_entry_spin name
+	vector_entry \name
+	b \name
+	end_vector_entry \name
+	.endm
+
 vector_base exception_stubs
-vector_entry SynchronousExceptionSP0
-	b	.
-vector_entry IrqSP0
-	b	.
-vector_entry FiqSP0
-	b	.
-vector_entry SErrorSP0
-	b	.
-vector_entry SynchronousExceptionSPx
-	b	.
-vector_entry IrqSPx
-	b	.
-vector_entry FiqSPx
-	b	.
-vector_entry SErrorSPx
-	b	.
-vector_entry SynchronousExceptionA64
-	b	.
-vector_entry IrqA64
-	b	.
-vector_entry FiqA64
-	b	.
-vector_entry SErrorA64
-	b	.
-vector_entry SynchronousExceptionA32
-	b	.
-vector_entry IrqA32
-	b	.
-vector_entry FiqA32
-	b	.
-vector_entry SErrorA32
-	b	.
+vector_entry_spin SynchronousExceptionSP0
+vector_entry_spin IrqSP0
+vector_entry_spin FiqSP0
+vector_entry_spin SErrorSP0
+vector_entry_spin SynchronousExceptionSPx
+vector_entry_spin IrqSPx
+vector_entry_spin FiqSPx
+vector_entry_spin SErrorSPx
+vector_entry_spin SynchronousExceptionA64
+vector_entry_spin IrqA64
+vector_entry_spin FiqA64
+vector_entry_spin SErrorA64
+vector_entry_spin SynchronousExceptionA32
+vector_entry_spin IrqA32
+vector_entry_spin FiqA32
+vector_entry_spin SErrorA32
diff --git a/tftf/framework/aarch64/exceptions.S b/tftf/framework/aarch64/exceptions.S
index 08bef46..6014b12 100644
--- a/tftf/framework/aarch64/exceptions.S
+++ b/tftf/framework/aarch64/exceptions.S
@@ -9,95 +9,88 @@
 	.globl	tftf_vector
 
 vector_base tftf_vector
-	//-----------------------------------------------------
-	// Current EL with SP0 : 0x0 - 0x180
-	//-----------------------------------------------------
+
+	/*
+	 * Current EL with SP0 : 0x0 - 0x200.
+	 */
 vector_entry SynchronousExceptionSP0
 	b	SynchronousExceptionSP0
-	check_vector_size SynchronousExceptionSP0
+end_vector_entry SynchronousExceptionSP0
 
 vector_entry IrqSP0
 	b	IrqSP0
-	check_vector_size IrqSP0
+end_vector_entry IrqSP0
 
 vector_entry FiqSP0
 	b	FiqSP0
-	check_vector_size FiqSP0
+end_vector_entry FiqSP0
 
 vector_entry SErrorSP0
 	b	SErrorSP0
-	check_vector_size SErrorSP0
+end_vector_entry SErrorSP0
 
-	//-----------------------------------------------------
-	// Current EL with SPx: 0x200 - 0x380
-	//-----------------------------------------------------
+	/*
+	 * Current EL with SPx : 0x200 - 0x400.
+	 */
 vector_entry SynchronousExceptionSPx
 	b	SynchronousExceptionSPx
-	check_vector_size SynchronousExceptionSPx
+end_vector_entry SynchronousExceptionSPx
 
 vector_entry IrqSPx
-	/*
-	 * TODO: Investigate whether the Trusted Firmware-A code for context
-	 * save/restore could be reused
-	 */
-	stp	x29, x30, [sp, #-0x10]!
-	bl	save_regs
-	bl	tftf_irq_handler_dispatcher
-	bl	restore_regs
-	ldp	x29, x30, [sp], #0x10
-	eret
-	check_vector_size IrqSPx
+	b	irq_vector_entry
+end_vector_entry IrqSPx
 
 vector_entry FiqSPx
 	b	FiqSPx
-	check_vector_size FiqSPx
+end_vector_entry FiqSPx
 
 vector_entry SErrorSPx
 	b	SErrorSPx
-	check_vector_size SErrorSPx
+end_vector_entry SErrorSPx
 
-	//-----------------------------------------------------
-	// Lower EL using AArch64 : 0x400 - 0x580
-	//-----------------------------------------------------
+	/*
+	 * Lower EL using AArch64 : 0x400 - 0x600.
+	 */
 vector_entry SynchronousExceptionA64
 	b	SynchronousExceptionA64
-	check_vector_size SynchronousExceptionA64
+end_vector_entry SynchronousExceptionA64
 
 vector_entry IrqA64
 	b	IrqA64
-	check_vector_size IrqA64
+end_vector_entry IrqA64
 
 vector_entry FiqA64
 	b	FiqA64
-	check_vector_size FiqA64
+end_vector_entry FiqA64
 
 vector_entry SErrorA64
 	b	SErrorA64
-	check_vector_size SErrorA64
+end_vector_entry SErrorA64
 
-	//-----------------------------------------------------
- 	// Lower EL using AArch32 : 0x0 - 0x180
-	//-----------------------------------------------------
+	/*
+	 * Lower EL using AArch32 : 0x600 - 0x800.
+	 */
 vector_entry SynchronousExceptionA32
 	b	SynchronousExceptionA32
-	check_vector_size SynchronousExceptionA32
+end_vector_entry SynchronousExceptionA32
 
 vector_entry IrqA32
 	b	IrqA32
-	check_vector_size IrqA32
+end_vector_entry IrqA32
 
 vector_entry FiqA32
 	b	FiqA32
-	check_vector_size FiqA32
+end_vector_entry FiqA32
 
 vector_entry SErrorA32
 	b	SErrorA32
-	check_vector_size SErrorA32
+end_vector_entry SErrorA32
 
-
-// Note: Exceptions will always be from the same EL, so no need to save spsr
-func save_regs
-	sub	sp, sp, #0x100
+/*
+ * Exceptions will always be from the same exception level so no need to save
+ * and restore SPSR.
+ */
+.macro save_gp_regs
 	stp	x0, x1, [sp, #0x0]
 	stp	x2, x3, [sp, #0x10]
 	stp	x4, x5, [sp, #0x20]
@@ -112,18 +105,15 @@
 	stp	x22, x23, [sp, #0xb0]
 	stp	x24, x25, [sp, #0xc0]
 	stp	x26, x27, [sp, #0xd0]
+	stp	x28, x29, [sp, #0xe0]
 	mrs     x0, sp_el0
-	stp	x28, x0, [sp, #0xe0]
-	str	x0, [sp, #0xf0]
-	ret
-endfunc save_regs
+	stp	x30, x0, [sp, #0xf0]
+.endm
 
-
-// Note: Exceptions will always be from the same EL, so no need to restore spsr
-func restore_regs
-	ldr	x9, [sp, #0xf0]
-	ldp	x28, x9, [sp, #0xe0]
-	msr	sp_el0, x9
+.macro restore_gp_regs
+	ldp	x30, x0, [sp, #0xf0]
+	msr	sp_el0, x0
+	ldp	x28, x29, [sp, #0xe0]
 	ldp	x26, x27, [sp, #0xd0]
 	ldp	x24, x25, [sp, #0xc0]
 	ldp	x22, x23, [sp, #0xb0]
@@ -138,6 +128,13 @@
 	ldp	x4, x5, [sp, #0x20]
 	ldp	x2, x3, [sp, #0x10]
 	ldp	x0, x1, [sp, #0x0]
+.endm
+
+func irq_vector_entry
+	sub	sp, sp, #0x100
+	save_gp_regs
+	bl	tftf_irq_handler_dispatcher
+	restore_gp_regs
 	add	sp, sp, #0x100
-	ret
-endfunc restore_regs
+	eret
+endfunc irq_vector_entry