aboutsummaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorDouglas Raillard <douglas.raillard@arm.com>2017-03-07 16:36:14 +0000
committerDouglas Raillard <douglas.raillard@arm.com>2017-03-20 10:38:43 +0000
commit355a5d03360802e2c7b8f09ffca641df0c9e47bf (patch)
tree80a2804c6ff789e369be9d62637a8b0636044010 /lib
parentfa971fca2f16b3085499bc79066a8ba792841f13 (diff)
downloadtrusted-firmware-a-355a5d03360802e2c7b8f09ffca641df0c9e47bf.tar.gz
Replace ASM signed tests with unsigned
ge, lt, gt and le condition codes in assembly provide a signed test whereas hs, lo, hi and ls provide the unsigned counterpart. Signed tests should only be used when strictly necessary, as using them on logically unsigned values can lead to inverting the test for high enough values. All offsets, addresses and usually counters are actually unsigned values, and should be tested as such. Replace the occurrences of signed condition codes where it was unnecessary by an unsigned test as the unsigned tests allow the full range of unsigned values to be used without inverting the result with some large operands. Change-Id: I58b7e98d03e3a4476dfb45230311f296d224980a Signed-off-by: Douglas Raillard <douglas.raillard@arm.com>
Diffstat (limited to 'lib')
-rw-r--r--lib/aarch32/cache_helpers.S8
-rw-r--r--lib/aarch32/misc_helpers.S2
-rw-r--r--lib/aarch64/cache_helpers.S8
-rw-r--r--lib/cpus/aarch32/cpu_helpers.S2
4 files changed, 10 insertions, 10 deletions
diff --git a/lib/aarch32/cache_helpers.S b/lib/aarch32/cache_helpers.S
index d0e5cd0687..b17b903736 100644
--- a/lib/aarch32/cache_helpers.S
+++ b/lib/aarch32/cache_helpers.S
@@ -118,7 +118,7 @@ loop1:
mov r12, r2, LSR r10 // extract cache type bits from clidr
and r12, r12, #7 // mask the bits for current cache only
cmp r12, #2 // see what cache we have at this level
- blt level_done // no cache or only instruction cache at this level
+ blo level_done // no cache or only instruction cache at this level
stcopr r1, CSSELR // select current cache level in csselr
isb // isb to sych the new cssr&csidr
@@ -138,14 +138,14 @@ loop3:
blx r6
subs r7, r7, #1 // decrement the set number
- bge loop3
+ bhs loop3
subs r9, r9, #1 // decrement the way number
- bge loop2
+ bhs loop2
level_done:
add r1, r1, #2 // increment the cache number
cmp r3, r1
dsb sy // ensure completion of previous cache maintenance instruction
- bgt loop1
+ bhi loop1
mov r6, #0
stcopr r6, CSSELR //select cache level 0 in csselr
diff --git a/lib/aarch32/misc_helpers.S b/lib/aarch32/misc_helpers.S
index dc8479951b..5b17c21cf5 100644
--- a/lib/aarch32/misc_helpers.S
+++ b/lib/aarch32/misc_helpers.S
@@ -170,7 +170,7 @@ func memcpy4
/* copy 4 bytes at a time */
m_loop4:
cmp r2, #4
- blt m_loop1
+ blo m_loop1
ldr r3, [r1], #4
str r3, [r0], #4
sub r2, r2, #4
diff --git a/lib/aarch64/cache_helpers.S b/lib/aarch64/cache_helpers.S
index 476b906e06..acafea70b7 100644
--- a/lib/aarch64/cache_helpers.S
+++ b/lib/aarch64/cache_helpers.S
@@ -119,7 +119,7 @@ loop1:
lsr x1, x0, x2 // extract cache type bits from clidr
and x1, x1, #7 // mask the bits for current cache only
cmp x1, #2 // see what cache we have at this level
- b.lt level_done // nothing to do if no cache or icache
+ b.lo level_done // nothing to do if no cache or icache
msr csselr_el1, x10 // select current cache level in csselr
isb // isb to sych the new cssr&csidr
@@ -144,10 +144,10 @@ loop3_\_op:
orr w11, w9, w7 // combine cache, way and set number
dc \_op, x11
subs w7, w7, w17 // decrement set number
- b.ge loop3_\_op
+ b.hs loop3_\_op
subs x9, x9, x16 // decrement way number
- b.ge loop2_\_op
+ b.hs loop2_\_op
b level_done
.endm
@@ -155,7 +155,7 @@ loop3_\_op:
level_done:
add x10, x10, #2 // increment cache number
cmp x3, x10
- b.gt loop1
+ b.hi loop1
msr csselr_el1, xzr // select cache level 0 in csselr
dsb sy // barrier to complete final cache operation
isb
diff --git a/lib/cpus/aarch32/cpu_helpers.S b/lib/cpus/aarch32/cpu_helpers.S
index c41978ed57..dc1b6e6198 100644
--- a/lib/cpus/aarch32/cpu_helpers.S
+++ b/lib/cpus/aarch32/cpu_helpers.S
@@ -157,7 +157,7 @@ func get_cpu_ops_ptr
1:
/* Check if we have reached end of list */
cmp r4, r5
- bge error_exit
+ bhs error_exit
/* load the midr from the cpu_ops */
ldr r1, [r4], #CPU_OPS_SIZE