summaryrefslogtreecommitdiff
path: root/payloads/libpayload/arch/x86/head.S
blob: 0575dbe8cc44e492586d4a751ded51c1e20db609 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
/*
 * This file is part of the libpayload project.
 *
 * Copyright (C) 2008 Advanced Micro Devices, Inc.
 * Copyright (C) 2017 Patrick Rudolph <siro@das-labor.org>
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 * 3. The name of the author may not be used to endorse or promote products
 *    derived from this software without specific prior written permission.
 *
 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
 * SUCH DAMAGE.
 */

	.code32
	.global _entry
	.text
	.align 4

/*
 * Our entry point - assume that the CPU is in 32 bit protected mode and
 * all segments are in a flat model. That's our operating mode, so we won't
 * change anything.
 */
_entry:
	jmp _init

	.align 4

#define MB_MAGIC 0x1BADB002
#define MB_FLAGS 0x00010003

mb_header:
	.long MB_MAGIC
	.long MB_FLAGS
	.long -(MB_MAGIC + MB_FLAGS)
	.long mb_header
	.long _start
	.long _edata
	.long _end
	.long _init

#define CB_MAGIC_VALUE	0x12345678
#define CB_MAGIC	0x04
#define CB_ARGV		0x08
#define CB_ARGC		0x10

/*
 * This function saves off the previous stack and switches us to our
 * own execution environment.
 */
_init:
	/* No interrupts, please. */
	cli

	/* Store EAX and EBX */
	movl %eax, loader_eax
	movl %ebx, loader_ebx

	/* Copy argv[] and argc as demanded by the Payload API,
	 * see https://www.coreboot.org/Payload_API and exec.S.
	 */
	cmpl $CB_MAGIC_VALUE, CB_MAGIC(%esp)
	jne 1f

	movl CB_ARGV(%esp), %eax
	movl %eax, main_argv

	movl CB_ARGC(%esp), %eax
	movl %eax, main_argc
1:
	/* Store current stack pointer and set up new stack. */
	movl %esp, %eax
	movl $_stack, %esp
	pushl %eax

	/* Enable special x86 functions if present. */
	pushl %eax
	pushl %ebx
	pushl %ecx
	pushl %edx

	movl $0, %eax
	cpuid
	/* Test if CPUID(eax=1) is available. */
	test %eax, %eax
	je cpuid_done

	/* Get CPU features. */
	movl $1, %eax
	cpuid

cpuid_fpu:
	/* Test if x87 FPU is present */
	test $1, %edx
	je cpuid_sse

	fninit
	movl %cr0, %eax
	andl $0xFFFFFFFB, %eax	/* clear EM */
	orl $0x00000022, %eax	/* set MP, NE */
	movl %eax, %cr0

cpuid_sse:
	/* Test if SSE is available */
	test $0x02000000, %edx
	je cpuid_done

	movl %cr4, %eax
	orl $0x00000600, %eax	/* set OSFXSR, OSXMMEXCPT */
	movl %eax, %cr4

cpuid_done:
	popl %edx
	popl %ecx
	popl %ebx
	popl %eax

	/* Let's rock. */
	call start_main

	/* %eax has the return value - pass it on unmolested */
_leave:
	/* Restore old stack. */
	popl %esp

	/* Return to the original context. */
	ret