summaryrefslogtreecommitdiff
path: root/src/cpu/x86/64bit/exit32.inc
blob: 48837d96a9d336b15e01411c8a74d1fe15f6eee4 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
/* SPDX-License-Identifier: GPL-2.0-only */

/*
 * For droping from long mode to protected mode.
 *
 * For reference see "AMD64 ArchitectureProgrammer's Manual Volume 2",
 * Document 24593-Rev. 3.31-July 2019 Chapter 5.3
 *
 * Clobbers: rax, rbx, rcx, rdx
 */
.code64

#include <cpu/x86/msr.h>
#include <cpu/x86/cr.h>
#include <arch/rom_segs.h>

drop_longmode:
	/* Ensure cache is clean. */
	wbinvd

	/* Set  32-bit code segment and ss */
	mov	$ROM_CODE_SEG, %rcx
	/* SetCodeSelector32 will drop us to protected mode on return */
	call	SetCodeSelector32

	/* Skip SetCodeSelector32 */
.code32
	jmp	__longmode_compatibility

.align 8
.code64
SetCodeSelector32:
	# pop the return address from stack
	pop	%rbx

	# save rsp because we need to push it after ss
	mov	%rsp, %rdx

	# use iret to jump to a 32-bit offset in a new code segment
	# iret will pop cs:rip, flags, then ss:rsp
	mov	%ss, %ax	# need to push ss, but push ss instuction
	push	%rax		# not valid in x64 mode, so use ax
	push	%rdx		# the rsp to load
	pushfq			# push rflags
	push	%rcx		# cx is code segment selector from caller
	push	%rbx		# push the IP for the next instruction

	# the iretq will behave like ret, with the new cs/ss value loaded
	iretq

.align 4
.code32
__longmode_compatibility:
	/* Running in 32-bit compatibility mode */

	/* Use flat data segment */
	movl	$ROM_DATA_SEG, %eax
	movl	%eax, %ds
	movl	%eax, %es
	movl	%eax, %ss
	movl	%eax, %fs
	movl	%eax, %gs

	/* Disable paging. */
	movl	%cr0, %eax
	andl	$(~CR0_PG), %eax
	movl	%eax, %cr0

	/* Disable long mode. */
	movl	$(IA32_EFER), %ecx
	rdmsr
	andl	$(~EFER_LME), %eax
	wrmsr

	/* Disable PAE. */
	movl	%cr4, %eax
	andl	$(~CR4_PAE), %eax
	movl	%eax, %cr4

	/* Clear page table register */
	xor	%eax, %eax
	movl	%eax, %cr3

__longmode_exit: