summaryrefslogtreecommitdiff
path: root/src/arch/x86
diff options
context:
space:
mode:
authorHarshit Sharma <harshitsharmajs@gmail.com>2020-08-08 17:51:59 -0700
committerPatrick Georgi <pgeorgi@google.com>2020-08-21 07:46:04 +0000
commit51593dd0c6cf1f69b813ba9c507f74f3641bdc68 (patch)
treeda24930937f0f150e1a1cdec8965c7971474c4ca /src/arch/x86
parentc29c12376fee93da6654624b648f9c84233c6973 (diff)
downloadcoreboot-51593dd0c6cf1f69b813ba9c507f74f3641bdc68.tar.xz
arch/x86: Add support for ASan to memory functions
Compiler's instrumentation cannot insert asan memory checks in case of memory functions like memset, memcpy and memmove as they are written in assembly. So, we need to manually check the memory state before performing each of these operations to ensure that ASan is triggered in case of bad access. Change-Id: I2030437636c77aea7cccda8efe050df4b77c15c7 Signed-off-by: Harshit Sharma <harshitsharmajs@gmail.com> Reviewed-on: https://review.coreboot.org/c/coreboot/+/44307 Tested-by: build bot (Jenkins) <no-reply@coreboot.org> Reviewed-by: Werner Zeh <werner.zeh@siemens.com>
Diffstat (limited to 'src/arch/x86')
-rw-r--r--src/arch/x86/memcpy.c8
-rw-r--r--src/arch/x86/memmove.c8
-rw-r--r--src/arch/x86/memset.c7
3 files changed, 23 insertions, 0 deletions
diff --git a/src/arch/x86/memcpy.c b/src/arch/x86/memcpy.c
index 2f23219de5..1cfdf89175 100644
--- a/src/arch/x86/memcpy.c
+++ b/src/arch/x86/memcpy.c
@@ -1,11 +1,19 @@
/* SPDX-License-Identifier: GPL-2.0-only */
#include <string.h>
+#include <stdbool.h>
+#include <asan.h>
void *memcpy(void *dest, const void *src, size_t n)
{
unsigned long d0, d1, d2;
+#if (ENV_ROMSTAGE && CONFIG(ASAN_IN_ROMSTAGE)) || \
+ (ENV_RAMSTAGE && CONFIG(ASAN_IN_RAMSTAGE))
+ check_memory_region((unsigned long)src, n, false, _RET_IP_);
+ check_memory_region((unsigned long)dest, n, true, _RET_IP_);
+#endif
+
asm volatile(
#ifdef __x86_64__
"rep ; movsd\n\t"
diff --git a/src/arch/x86/memmove.c b/src/arch/x86/memmove.c
index cdd1e8dc70..3ec50b26ae 100644
--- a/src/arch/x86/memmove.c
+++ b/src/arch/x86/memmove.c
@@ -4,12 +4,20 @@
*/
#include <string.h>
+#include <stdbool.h>
+#include <asan.h>
void *memmove(void *dest, const void *src, size_t n)
{
int d0, d1, d2, d3, d4, d5;
char *ret = dest;
+#if (ENV_ROMSTAGE && CONFIG(ASAN_IN_ROMSTAGE)) || \
+ (ENV_RAMSTAGE && CONFIG(ASAN_IN_RAMSTAGE))
+ check_memory_region((unsigned long)src, n, false, _RET_IP_);
+ check_memory_region((unsigned long)dest, n, true, _RET_IP_);
+#endif
+
__asm__ __volatile__(
/* Handle more 16bytes in loop */
"cmp $0x10, %0\n\t"
diff --git a/src/arch/x86/memset.c b/src/arch/x86/memset.c
index 17963426c0..fc09a9bd52 100644
--- a/src/arch/x86/memset.c
+++ b/src/arch/x86/memset.c
@@ -4,6 +4,8 @@
#include <string.h>
#include <stdint.h>
+#include <stdbool.h>
+#include <asan.h>
typedef uint32_t op_t;
@@ -12,6 +14,11 @@ void *memset(void *dstpp, int c, size_t len)
int d0;
unsigned long int dstp = (unsigned long int) dstpp;
+#if (ENV_ROMSTAGE && CONFIG(ASAN_IN_ROMSTAGE)) || \
+ (ENV_RAMSTAGE && CONFIG(ASAN_IN_RAMSTAGE))
+ check_memory_region((unsigned long)dstpp, len, true, _RET_IP_);
+#endif
+
/* This explicit register allocation improves code very much indeed. */
register op_t x asm("ax");