emulate.c 125 KB
Newer Older
Avi Kivity's avatar
Avi Kivity committed
1
/******************************************************************************
2
 * emulate.c
Avi Kivity's avatar
Avi Kivity committed
3
4
5
6
7
8
 *
 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
 *
 * Copyright (c) 2005 Keir Fraser
 *
 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
9
 * privileged instructions:
Avi Kivity's avatar
Avi Kivity committed
10
11
 *
 * Copyright (C) 2006 Qumranet
12
 * Copyright 2010 Red Hat, Inc. and/or its affiliates.
Avi Kivity's avatar
Avi Kivity committed
13
14
15
16
17
18
19
20
21
22
 *
 *   Avi Kivity <avi@qumranet.com>
 *   Yaniv Kamay <yaniv@qumranet.com>
 *
 * This work is licensed under the terms of the GNU GPL, version 2.  See
 * the COPYING file in the top-level directory.
 *
 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
 */

23
#include <linux/kvm_host.h>
24
#include "kvm_cache_regs.h"
Avi Kivity's avatar
Avi Kivity committed
25
#include <linux/module.h>
26
#include <asm/kvm_emulate.h>
27
#include <linux/stringify.h>
Avi Kivity's avatar
Avi Kivity committed
28

29
#include "x86.h"
30
#include "tss.h"
31

32
33
34
/*
 * Operand types
 */
35
36
37
38
39
40
41
42
43
#define OpNone             0ull
#define OpImplicit         1ull  /* No generic decode */
#define OpReg              2ull  /* Register */
#define OpMem              3ull  /* Memory */
#define OpAcc              4ull  /* Accumulator: AL/AX/EAX/RAX */
#define OpDI               5ull  /* ES:DI/EDI/RDI */
#define OpMem64            6ull  /* Memory, 64-bit */
#define OpImmUByte         7ull  /* Zero-extended 8-bit immediate */
#define OpDX               8ull  /* DX register */
44
45
46
#define OpCL               9ull  /* CL register (for shifts) */
#define OpImmByte         10ull  /* 8-bit sign extended immediate */
#define OpOne             11ull  /* Implied 1 */
47
#define OpImm             12ull  /* Sign extended up to 32-bit immediate */
48
49
50
51
52
53
54
#define OpMem16           13ull  /* Memory operand (16-bit). */
#define OpMem32           14ull  /* Memory operand (32-bit). */
#define OpImmU            15ull  /* Immediate operand, zero extended */
#define OpSI              16ull  /* SI/ESI/RSI */
#define OpImmFAddr        17ull  /* Immediate far address */
#define OpMemFAddr        18ull  /* Far address in memory */
#define OpImmU16          19ull  /* Immediate operand, 16 bits, zero extended */
55
56
57
58
59
60
#define OpES              20ull  /* ES */
#define OpCS              21ull  /* CS */
#define OpSS              22ull  /* SS */
#define OpDS              23ull  /* DS */
#define OpFS              24ull  /* FS */
#define OpGS              25ull  /* GS */
61
#define OpMem8            26ull  /* 8-bit zero extended memory operand */
62
#define OpImm64           27ull  /* Sign extended 16/32/64-bit immediate */
63
64

#define OpBits             5  /* Width of operand field */
65
#define OpMask             ((1ull << OpBits) - 1)
66

Avi Kivity's avatar
Avi Kivity committed
67
68
69
70
71
72
73
74
75
76
/*
 * Opcode effective-address decode tables.
 * Note that we only emulate instructions that have at least one memory
 * operand (excluding implicit stack references). We assume that stack
 * references and instruction fetches will never occur in special memory
 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
 * not be handled.
 */

/* Operand sizes: 8-bit operands or specified/overridden size. */
77
#define ByteOp      (1<<0)	/* 8-bit operands. */
Avi Kivity's avatar
Avi Kivity committed
78
/* Destination operand type. */
79
80
81
82
83
84
85
86
87
88
#define DstShift    1
#define ImplicitOps (OpImplicit << DstShift)
#define DstReg      (OpReg << DstShift)
#define DstMem      (OpMem << DstShift)
#define DstAcc      (OpAcc << DstShift)
#define DstDI       (OpDI << DstShift)
#define DstMem64    (OpMem64 << DstShift)
#define DstImmUByte (OpImmUByte << DstShift)
#define DstDX       (OpDX << DstShift)
#define DstMask     (OpMask << DstShift)
Avi Kivity's avatar
Avi Kivity committed
89
/* Source operand type. */
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
#define SrcShift    6
#define SrcNone     (OpNone << SrcShift)
#define SrcReg      (OpReg << SrcShift)
#define SrcMem      (OpMem << SrcShift)
#define SrcMem16    (OpMem16 << SrcShift)
#define SrcMem32    (OpMem32 << SrcShift)
#define SrcImm      (OpImm << SrcShift)
#define SrcImmByte  (OpImmByte << SrcShift)
#define SrcOne      (OpOne << SrcShift)
#define SrcImmUByte (OpImmUByte << SrcShift)
#define SrcImmU     (OpImmU << SrcShift)
#define SrcSI       (OpSI << SrcShift)
#define SrcImmFAddr (OpImmFAddr << SrcShift)
#define SrcMemFAddr (OpMemFAddr << SrcShift)
#define SrcAcc      (OpAcc << SrcShift)
#define SrcImmU16   (OpImmU16 << SrcShift)
106
#define SrcImm64    (OpImm64 << SrcShift)
107
#define SrcDX       (OpDX << SrcShift)
108
#define SrcMem8     (OpMem8 << SrcShift)
109
#define SrcMask     (OpMask << SrcShift)
110
111
112
113
114
115
116
117
118
#define BitOp       (1<<11)
#define MemAbs      (1<<12)      /* Memory operand is absolute displacement */
#define String      (1<<13)     /* String instruction (rep capable) */
#define Stack       (1<<14)     /* Stack instruction (push/pop) */
#define GroupMask   (7<<15)     /* Opcode uses one of the group mechanisms */
#define Group       (1<<15)     /* Bits 3:5 of modrm byte extend opcode */
#define GroupDual   (2<<15)     /* Alternate decoding of mod == 3 */
#define Prefix      (3<<15)     /* Instruction varies with 66/f2/f3 prefix */
#define RMExt       (4<<15)     /* Opcode extension in ModRM r/m if mod == 3 */
119
#define Escape      (5<<15)     /* Escape to coprocessor instruction */
120
#define Sse         (1<<18)     /* SSE Vector instruction */
121
122
123
124
/* Generic ModRM decode. */
#define ModRM       (1<<19)
/* Destination is only written; never read. */
#define Mov         (1<<20)
125
/* Misc flags */
126
#define Prot        (1<<21) /* instruction generates #UD if not in prot-mode */
127
#define VendorSpecific (1<<22) /* Vendor specific instruction */
128
#define NoAccess    (1<<23) /* Don't access memory (lea/invlpg/verr etc) */
129
#define Op3264      (1<<24) /* Operand is 64b in long mode, 32b otherwise */
130
#define Undefined   (1<<25) /* No Such Instruction */
131
#define Lock        (1<<26) /* lock prefix is allowed for the instruction */
132
#define Priv        (1<<27) /* instruction generates #GP if current CPL != 0 */
133
#define No64	    (1<<28)
134
#define PageTable   (1 << 29)   /* instruction used to write page table */
135
/* Source 2 operand type */
136
#define Src2Shift   (30)
137
138
139
140
141
#define Src2None    (OpNone << Src2Shift)
#define Src2CL      (OpCL << Src2Shift)
#define Src2ImmByte (OpImmByte << Src2Shift)
#define Src2One     (OpOne << Src2Shift)
#define Src2Imm     (OpImm << Src2Shift)
142
143
144
145
146
147
#define Src2ES      (OpES << Src2Shift)
#define Src2CS      (OpCS << Src2Shift)
#define Src2SS      (OpSS << Src2Shift)
#define Src2DS      (OpDS << Src2Shift)
#define Src2FS      (OpFS << Src2Shift)
#define Src2GS      (OpGS << Src2Shift)
148
#define Src2Mask    (OpMask << Src2Shift)
Avi Kivity's avatar
Avi Kivity committed
149
#define Mmx         ((u64)1 << 40)  /* MMX Vector instruction */
150
151
152
#define Aligned     ((u64)1 << 41)  /* Explicitly aligned (e.g. MOVDQA) */
#define Unaligned   ((u64)1 << 42)  /* Explicitly unaligned (e.g. MOVDQU) */
#define Avx         ((u64)1 << 43)  /* Advanced Vector Extensions */
153
#define Fastop      ((u64)1 << 44)  /* Use opcode::u.fastop */
154
#define NoWrite     ((u64)1 << 45)  /* No writeback */
Avi Kivity's avatar
Avi Kivity committed
155

156
157
158
159
160
161
162
163
#define X2(x...) x, x
#define X3(x...) X2(x), x
#define X4(x...) X2(x), X2(x)
#define X5(x...) X4(x), x
#define X6(x...) X4(x), X2(x)
#define X7(x...) X4(x), X3(x)
#define X8(x...) X4(x), X4(x)
#define X16(x...) X8(x), X8(x)
164

165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
#define NR_FASTOP (ilog2(sizeof(ulong)) + 1)
#define FASTOP_SIZE 8

/*
 * fastop functions have a special calling convention:
 *
 * dst:    [rdx]:rax  (in/out)
 * src:    rbx        (in/out)
 * src2:   rcx        (in)
 * flags:  rflags     (in/out)
 *
 * Moreover, they are all exactly FASTOP_SIZE bytes long, so functions for
 * different operand sizes can be reached by calculation, rather than a jump
 * table (which would be bigger than the code).
 *
 * fastop functions are declared as taking a never-defined fastop parameter,
 * so they can't be called from C directly.
 */

struct fastop;

186
struct opcode {
187
188
	u64 flags : 56;
	u64 intercept : 8;
189
	union {
190
		int (*execute)(struct x86_emulate_ctxt *ctxt);
191
192
193
		const struct opcode *group;
		const struct group_dual *gdual;
		const struct gprefix *gprefix;
194
		const struct escape *esc;
195
		void (*fastop)(struct fastop *fake);
196
	} u;
197
	int (*check_perm)(struct x86_emulate_ctxt *ctxt);
198
199
200
201
202
};

struct group_dual {
	struct opcode mod012[8];
	struct opcode mod3[8];
203
204
};

205
206
207
208
209
210
211
struct gprefix {
	struct opcode pfx_no;
	struct opcode pfx_66;
	struct opcode pfx_f2;
	struct opcode pfx_f3;
};

212
213
214
215
216
struct escape {
	struct opcode op[8];
	struct opcode high[64];
};

Avi Kivity's avatar
Avi Kivity committed
217
/* EFLAGS bit definitions. */
218
219
220
221
#define EFLG_ID (1<<21)
#define EFLG_VIP (1<<20)
#define EFLG_VIF (1<<19)
#define EFLG_AC (1<<18)
222
223
#define EFLG_VM (1<<17)
#define EFLG_RF (1<<16)
224
225
#define EFLG_IOPL (3<<12)
#define EFLG_NT (1<<14)
Avi Kivity's avatar
Avi Kivity committed
226
227
#define EFLG_OF (1<<11)
#define EFLG_DF (1<<10)
228
#define EFLG_IF (1<<9)
229
#define EFLG_TF (1<<8)
Avi Kivity's avatar
Avi Kivity committed
230
231
232
233
234
235
#define EFLG_SF (1<<7)
#define EFLG_ZF (1<<6)
#define EFLG_AF (1<<4)
#define EFLG_PF (1<<2)
#define EFLG_CF (1<<0)

236
237
238
#define EFLG_RESERVED_ZEROS_MASK 0xffc0802a
#define EFLG_RESERVED_ONE_MASK 2

239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
static ulong reg_read(struct x86_emulate_ctxt *ctxt, unsigned nr)
{
	if (!(ctxt->regs_valid & (1 << nr))) {
		ctxt->regs_valid |= 1 << nr;
		ctxt->_regs[nr] = ctxt->ops->read_gpr(ctxt, nr);
	}
	return ctxt->_regs[nr];
}

static ulong *reg_write(struct x86_emulate_ctxt *ctxt, unsigned nr)
{
	ctxt->regs_valid |= 1 << nr;
	ctxt->regs_dirty |= 1 << nr;
	return &ctxt->_regs[nr];
}

static ulong *reg_rmw(struct x86_emulate_ctxt *ctxt, unsigned nr)
{
	reg_read(ctxt, nr);
	return reg_write(ctxt, nr);
}

static void writeback_registers(struct x86_emulate_ctxt *ctxt)
{
	unsigned reg;

	for_each_set_bit(reg, (ulong *)&ctxt->regs_dirty, 16)
		ctxt->ops->write_gpr(ctxt, reg, ctxt->_regs[reg]);
}

static void invalidate_registers(struct x86_emulate_ctxt *ctxt)
{
	ctxt->regs_dirty = 0;
	ctxt->regs_valid = 0;
}

Avi Kivity's avatar
Avi Kivity committed
275
276
277
278
279
280
281
/*
 * Instruction emulation:
 * Most instructions are emulated directly via a fragment of inline assembly
 * code. This allows us to save/restore EFLAGS and thus very easily pick up
 * any modified flags.
 */

282
#if defined(CONFIG_X86_64)
Avi Kivity's avatar
Avi Kivity committed
283
284
285
286
287
288
289
290
291
292
293
294
295
296
#define _LO32 "k"		/* force 32-bit operand */
#define _STK  "%%rsp"		/* stack pointer */
#elif defined(__i386__)
#define _LO32 ""		/* force 32-bit operand */
#define _STK  "%%esp"		/* stack pointer */
#endif

/*
 * These EFLAGS bits are restored from saved value during emulation, and
 * any changes are written back to the saved value after emulation.
 */
#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)

/* Before executing instruction: restore necessary bits in EFLAGS. */
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
#define _PRE_EFLAGS(_sav, _msk, _tmp)					\
	/* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
	"movl %"_sav",%"_LO32 _tmp"; "                                  \
	"push %"_tmp"; "                                                \
	"push %"_tmp"; "                                                \
	"movl %"_msk",%"_LO32 _tmp"; "                                  \
	"andl %"_LO32 _tmp",("_STK"); "                                 \
	"pushf; "                                                       \
	"notl %"_LO32 _tmp"; "                                          \
	"andl %"_LO32 _tmp",("_STK"); "                                 \
	"andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); "	\
	"pop  %"_tmp"; "                                                \
	"orl  %"_LO32 _tmp",("_STK"); "                                 \
	"popf; "                                                        \
	"pop  %"_sav"; "
Avi Kivity's avatar
Avi Kivity committed
312
313
314
315
316
317
318
319
320

/* After executing instruction: write-back necessary bits in EFLAGS. */
#define _POST_EFLAGS(_sav, _msk, _tmp) \
	/* _sav |= EFLAGS & _msk; */		\
	"pushf; "				\
	"pop  %"_tmp"; "			\
	"andl %"_msk",%"_LO32 _tmp"; "		\
	"orl  %"_LO32 _tmp",%"_sav"; "

321
322
323
324
325
326
#ifdef CONFIG_X86_64
#define ON64(x) x
#else
#define ON64(x)
#endif

327
#define ____emulate_2op(ctxt, _op, _x, _y, _suffix, _dsttype)	\
328
329
330
331
332
	do {								\
		__asm__ __volatile__ (					\
			_PRE_EFLAGS("0", "4", "2")			\
			_op _suffix " %"_x"3,%1; "			\
			_POST_EFLAGS("0", "4", "2")			\
333
334
			: "=m" ((ctxt)->eflags),			\
			  "+q" (*(_dsttype*)&(ctxt)->dst.val),		\
335
			  "=&r" (_tmp)					\
336
			: _y ((ctxt)->src.val), "i" (EFLAGS_MASK));	\
337
	} while (0)
338
339


Avi Kivity's avatar
Avi Kivity committed
340
/* Raw emulation: instruction has two explicit operands. */
341
#define __emulate_2op_nobyte(ctxt,_op,_wx,_wy,_lx,_ly,_qx,_qy)		\
342
343
344
	do {								\
		unsigned long _tmp;					\
									\
345
		switch ((ctxt)->dst.bytes) {				\
346
		case 2:							\
347
			____emulate_2op(ctxt,_op,_wx,_wy,"w",u16);	\
348
349
			break;						\
		case 4:							\
350
			____emulate_2op(ctxt,_op,_lx,_ly,"l",u32);	\
351
352
			break;						\
		case 8:							\
353
			ON64(____emulate_2op(ctxt,_op,_qx,_qy,"q",u64)); \
354
355
			break;						\
		}							\
Avi Kivity's avatar
Avi Kivity committed
356
357
	} while (0)

358
#define __emulate_2op(ctxt,_op,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy)		     \
Avi Kivity's avatar
Avi Kivity committed
359
	do {								     \
360
		unsigned long _tmp;					     \
361
		switch ((ctxt)->dst.bytes) {				     \
Avi Kivity's avatar
Avi Kivity committed
362
		case 1:							     \
363
			____emulate_2op(ctxt,_op,_bx,_by,"b",u8);	     \
Avi Kivity's avatar
Avi Kivity committed
364
365
			break;						     \
		default:						     \
366
			__emulate_2op_nobyte(ctxt, _op,			     \
Avi Kivity's avatar
Avi Kivity committed
367
368
369
370
371
372
					     _wx, _wy, _lx, _ly, _qx, _qy);  \
			break;						     \
		}							     \
	} while (0)

/* Source operand is byte-sized and may be restricted to just %cl. */
373
374
#define emulate_2op_SrcB(ctxt, _op)					\
	__emulate_2op(ctxt, _op, "b", "c", "b", "c", "b", "c", "b", "c")
Avi Kivity's avatar
Avi Kivity committed
375
376

/* Source operand is byte, word, long or quad sized. */
377
378
#define emulate_2op_SrcV(ctxt, _op)					\
	__emulate_2op(ctxt, _op, "b", "q", "w", "r", _LO32, "r", "", "r")
Avi Kivity's avatar
Avi Kivity committed
379
380

/* Source operand is word, long or quad sized. */
381
382
#define emulate_2op_SrcV_nobyte(ctxt, _op)				\
	__emulate_2op_nobyte(ctxt, _op, "w", "r", _LO32, "r", "", "r")
Avi Kivity's avatar
Avi Kivity committed
383

384
/* Instruction has three operands and one operand is stored in ECX register */
385
#define __emulate_2op_cl(ctxt, _op, _suffix, _type)		\
386
387
	do {								\
		unsigned long _tmp;					\
388
389
390
		_type _clv  = (ctxt)->src2.val;				\
		_type _srcv = (ctxt)->src.val;				\
		_type _dstv = (ctxt)->dst.val;				\
391
392
393
394
395
									\
		__asm__ __volatile__ (					\
			_PRE_EFLAGS("0", "5", "2")			\
			_op _suffix " %4,%1 \n"				\
			_POST_EFLAGS("0", "5", "2")			\
396
			: "=m" ((ctxt)->eflags), "+r" (_dstv), "=&r" (_tmp) \
397
398
399
			: "c" (_clv) , "r" (_srcv), "i" (EFLAGS_MASK)	\
			);						\
									\
400
401
402
		(ctxt)->src2.val  = (unsigned long) _clv;		\
		(ctxt)->src2.val = (unsigned long) _srcv;		\
		(ctxt)->dst.val = (unsigned long) _dstv;		\
403
404
	} while (0)

405
#define emulate_2op_cl(ctxt, _op)					\
406
	do {								\
407
		switch ((ctxt)->dst.bytes) {				\
408
		case 2:							\
409
			__emulate_2op_cl(ctxt, _op, "w", u16);		\
410
411
			break;						\
		case 4:							\
412
			__emulate_2op_cl(ctxt, _op, "l", u32);		\
413
414
			break;						\
		case 8:							\
415
			ON64(__emulate_2op_cl(ctxt, _op, "q", ulong));	\
416
417
			break;						\
		}							\
418
419
	} while (0)

420
#define __emulate_1op(ctxt, _op, _suffix)				\
Avi Kivity's avatar
Avi Kivity committed
421
422
423
	do {								\
		unsigned long _tmp;					\
									\
424
425
426
427
		__asm__ __volatile__ (					\
			_PRE_EFLAGS("0", "3", "2")			\
			_op _suffix " %1; "				\
			_POST_EFLAGS("0", "3", "2")			\
428
			: "=m" ((ctxt)->eflags), "+m" ((ctxt)->dst.val), \
429
430
431
432
433
			  "=&r" (_tmp)					\
			: "i" (EFLAGS_MASK));				\
	} while (0)

/* Instruction has only one explicit operand (no source operand). */
434
#define emulate_1op(ctxt, _op)						\
435
	do {								\
436
437
438
439
440
		switch ((ctxt)->dst.bytes) {				\
		case 1:	__emulate_1op(ctxt, _op, "b"); break;		\
		case 2:	__emulate_1op(ctxt, _op, "w"); break;		\
		case 4:	__emulate_1op(ctxt, _op, "l"); break;		\
		case 8:	ON64(__emulate_1op(ctxt, _op, "q")); break;	\
Avi Kivity's avatar
Avi Kivity committed
441
442
443
		}							\
	} while (0)

444
445
446
447
448
449
450
451
452
453
454
455
456
#define FOP_ALIGN ".align " __stringify(FASTOP_SIZE) " \n\t"
#define FOP_RET   "ret \n\t"

#define FOP_START(op) \
	extern void em_##op(struct fastop *fake); \
	asm(".pushsection .text, \"ax\" \n\t" \
	    ".global em_" #op " \n\t" \
            FOP_ALIGN \
	    "em_" #op ": \n\t"

#define FOP_END \
	    ".popsection")

457
458
#define FOPNOP() FOP_ALIGN FOP_RET

459
460
461
462
463
464
465
466
467
468
469
#define FOP1E(op,  dst) \
	FOP_ALIGN #op " %" #dst " \n\t" FOP_RET

#define FASTOP1(op) \
	FOP_START(op) \
	FOP1E(op##b, al) \
	FOP1E(op##w, ax) \
	FOP1E(op##l, eax) \
	ON64(FOP1E(op##q, rax))	\
	FOP_END

470
471
472
473
474
475
476
477
478
479
480
#define FOP2E(op,  dst, src)	   \
	FOP_ALIGN #op " %" #src ", %" #dst " \n\t" FOP_RET

#define FASTOP2(op) \
	FOP_START(op) \
	FOP2E(op##b, al, bl) \
	FOP2E(op##w, ax, bx) \
	FOP2E(op##l, eax, ebx) \
	ON64(FOP2E(op##q, rax, rbx)) \
	FOP_END

481
482
483
484
485
486
487
488
489
/* 2 operand, src is CL */
#define FASTOP2CL(op) \
	FOP_START(op) \
	FOP2E(op##b, al, cl) \
	FOP2E(op##w, ax, cl) \
	FOP2E(op##l, eax, cl) \
	ON64(FOP2E(op##q, rax, cl)) \
	FOP_END

490
491
492
493
494
495
496
497
498
499
500
501
#define FOP3E(op,  dst, src, src2) \
	FOP_ALIGN #op " %" #src2 ", %" #src ", %" #dst " \n\t" FOP_RET

/* 3-operand, word-only, src2=cl */
#define FASTOP3WCL(op) \
	FOP_START(op) \
	FOPNOP() \
	FOP3E(op##w, ax, bx, cl) \
	FOP3E(op##l, eax, ebx, cl) \
	ON64(FOP3E(op##q, rax, rbx, cl)) \
	FOP_END

502
#define __emulate_1op_rax_rdx(ctxt, _op, _suffix, _ex)			\
503
504
	do {								\
		unsigned long _tmp;					\
505
506
		ulong *rax = reg_rmw((ctxt), VCPU_REGS_RAX);		\
		ulong *rdx = reg_rmw((ctxt), VCPU_REGS_RDX);		\
507
508
509
510
511
512
513
514
515
516
517
518
									\
		__asm__ __volatile__ (					\
			_PRE_EFLAGS("0", "5", "1")			\
			"1: \n\t"					\
			_op _suffix " %6; "				\
			"2: \n\t"					\
			_POST_EFLAGS("0", "5", "1")			\
			".pushsection .fixup,\"ax\" \n\t"		\
			"3: movb $1, %4 \n\t"				\
			"jmp 2b \n\t"					\
			".popsection \n\t"				\
			_ASM_EXTABLE(1b, 3b)				\
519
520
			: "=m" ((ctxt)->eflags), "=&r" (_tmp),		\
			  "+a" (*rax), "+d" (*rdx), "+qm"(_ex)		\
521
			: "i" (EFLAGS_MASK), "m" ((ctxt)->src.val));	\
522
523
	} while (0)

524
/* instruction has only one source operand, destination is implicit (e.g. mul, div, imul, idiv) */
525
#define emulate_1op_rax_rdx(ctxt, _op, _ex)	\
526
	do {								\
527
		switch((ctxt)->src.bytes) {				\
528
		case 1:							\
529
			__emulate_1op_rax_rdx(ctxt, _op, "b", _ex);	\
530
531
			break;						\
		case 2:							\
532
			__emulate_1op_rax_rdx(ctxt, _op, "w", _ex);	\
533
534
			break;						\
		case 4:							\
535
			__emulate_1op_rax_rdx(ctxt, _op, "l", _ex);	\
536
537
			break;						\
		case 8: ON64(						\
538
			__emulate_1op_rax_rdx(ctxt, _op, "q", _ex));	\
539
540
541
542
			break;						\
		}							\
	} while (0)

543
544
545
546
547
548
static int emulator_check_intercept(struct x86_emulate_ctxt *ctxt,
				    enum x86_intercept intercept,
				    enum x86_intercept_stage stage)
{
	struct x86_instruction_info info = {
		.intercept  = intercept,
549
550
551
552
553
554
555
556
		.rep_prefix = ctxt->rep_prefix,
		.modrm_mod  = ctxt->modrm_mod,
		.modrm_reg  = ctxt->modrm_reg,
		.modrm_rm   = ctxt->modrm_rm,
		.src_val    = ctxt->src.val64,
		.src_bytes  = ctxt->src.bytes,
		.dst_bytes  = ctxt->dst.bytes,
		.ad_bytes   = ctxt->ad_bytes,
557
558
559
		.next_rip   = ctxt->eip,
	};

560
	return ctxt->ops->intercept(ctxt, &info, stage);
561
562
}

Avi Kivity's avatar
Avi Kivity committed
563
564
565
566
567
static void assign_masked(ulong *dest, ulong src, ulong mask)
{
	*dest = (*dest & ~mask) | (src & mask);
}

568
static inline unsigned long ad_mask(struct x86_emulate_ctxt *ctxt)
569
{
570
	return (1UL << (ctxt->ad_bytes << 3)) - 1;
571
572
}

Avi Kivity's avatar
Avi Kivity committed
573
574
575
576
577
578
579
580
581
582
583
static ulong stack_mask(struct x86_emulate_ctxt *ctxt)
{
	u16 sel;
	struct desc_struct ss;

	if (ctxt->mode == X86EMUL_MODE_PROT64)
		return ~0UL;
	ctxt->ops->get_segment(ctxt, &sel, &ss, NULL, VCPU_SREG_SS);
	return ~0U >> ((ss.d ^ 1) * 16);  /* d=0: 0xffff; d=1: 0xffffffff */
}

584
585
586
587
588
static int stack_size(struct x86_emulate_ctxt *ctxt)
{
	return (__fls(stack_mask(ctxt)) + 1) >> 3;
}

Avi Kivity's avatar
Avi Kivity committed
589
/* Access/update address held in a register, based on addressing mode. */
590
static inline unsigned long
591
address_mask(struct x86_emulate_ctxt *ctxt, unsigned long reg)
592
{
593
	if (ctxt->ad_bytes == sizeof(unsigned long))
594
595
		return reg;
	else
596
		return reg & ad_mask(ctxt);
597
598
599
}

static inline unsigned long
600
register_address(struct x86_emulate_ctxt *ctxt, unsigned long reg)
601
{
602
	return address_mask(ctxt, reg);
603
604
}

605
606
607
608
609
static void masked_increment(ulong *reg, ulong mask, int inc)
{
	assign_masked(reg, *reg + inc, mask);
}

610
static inline void
611
register_address_increment(struct x86_emulate_ctxt *ctxt, unsigned long *reg, int inc)
612
{
613
614
	ulong mask;

615
	if (ctxt->ad_bytes == sizeof(unsigned long))
616
		mask = ~0UL;
617
	else
618
619
620
621
622
623
		mask = ad_mask(ctxt);
	masked_increment(reg, mask, inc);
}

static void rsp_increment(struct x86_emulate_ctxt *ctxt, int inc)
{
624
	masked_increment(reg_rmw(ctxt, VCPU_REGS_RSP), stack_mask(ctxt), inc);
625
}
Avi Kivity's avatar
Avi Kivity committed
626

627
static inline void jmp_rel(struct x86_emulate_ctxt *ctxt, int rel)
628
{
629
	register_address_increment(ctxt, &ctxt->_eip, rel);
630
}
631

632
633
634
635
636
637
638
static u32 desc_limit_scaled(struct desc_struct *desc)
{
	u32 limit = get_desc_limit(desc);

	return desc->g ? (limit << 12) | 0xfff : limit;
}

639
static void set_seg_override(struct x86_emulate_ctxt *ctxt, int seg)
640
{
641
642
	ctxt->has_seg_override = true;
	ctxt->seg_override = seg;
643
644
}

645
static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
646
647
648
649
{
	if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
		return 0;

650
	return ctxt->ops->get_cached_segment_base(ctxt, seg);
651
652
}

653
static unsigned seg_override(struct x86_emulate_ctxt *ctxt)
654
{
655
	if (!ctxt->has_seg_override)
656
657
		return 0;

658
	return ctxt->seg_override;
659
660
}

661
662
static int emulate_exception(struct x86_emulate_ctxt *ctxt, int vec,
			     u32 error, bool valid)
663
{
664
665
666
	ctxt->exception.vector = vec;
	ctxt->exception.error_code = error;
	ctxt->exception.error_code_valid = valid;
667
	return X86EMUL_PROPAGATE_FAULT;
668
669
}

670
671
672
673
674
static int emulate_db(struct x86_emulate_ctxt *ctxt)
{
	return emulate_exception(ctxt, DB_VECTOR, 0, false);
}

675
static int emulate_gp(struct x86_emulate_ctxt *ctxt, int err)
676
{
677
	return emulate_exception(ctxt, GP_VECTOR, err, true);
678
679
}

680
681
682
683
684
static int emulate_ss(struct x86_emulate_ctxt *ctxt, int err)
{
	return emulate_exception(ctxt, SS_VECTOR, err, true);
}

685
static int emulate_ud(struct x86_emulate_ctxt *ctxt)
686
{
687
	return emulate_exception(ctxt, UD_VECTOR, 0, false);
688
689
}

690
static int emulate_ts(struct x86_emulate_ctxt *ctxt, int err)
691
{
692
	return emulate_exception(ctxt, TS_VECTOR, err, true);
693
694
}

695
696
static int emulate_de(struct x86_emulate_ctxt *ctxt)
{
697
	return emulate_exception(ctxt, DE_VECTOR, 0, false);
698
699
}

Avi Kivity's avatar
Avi Kivity committed
700
701
702
703
704
static int emulate_nm(struct x86_emulate_ctxt *ctxt)
{
	return emulate_exception(ctxt, NM_VECTOR, 0, false);
}

705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
static u16 get_segment_selector(struct x86_emulate_ctxt *ctxt, unsigned seg)
{
	u16 selector;
	struct desc_struct desc;

	ctxt->ops->get_segment(ctxt, &selector, &desc, NULL, seg);
	return selector;
}

static void set_segment_selector(struct x86_emulate_ctxt *ctxt, u16 selector,
				 unsigned seg)
{
	u16 dummy;
	u32 base3;
	struct desc_struct desc;

	ctxt->ops->get_segment(ctxt, &dummy, &desc, &base3, seg);
	ctxt->ops->set_segment(ctxt, selector, &desc, base3, seg);
}

725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
/*
 * x86 defines three classes of vector instructions: explicitly
 * aligned, explicitly unaligned, and the rest, which change behaviour
 * depending on whether they're AVX encoded or not.
 *
 * Also included is CMPXCHG16B which is not a vector instruction, yet it is
 * subject to the same check.
 */
static bool insn_aligned(struct x86_emulate_ctxt *ctxt, unsigned size)
{
	if (likely(size < 16))
		return false;

	if (ctxt->d & Aligned)
		return true;
	else if (ctxt->d & Unaligned)
		return false;
	else if (ctxt->d & Avx)
		return false;
	else
		return true;
}

748
static int __linearize(struct x86_emulate_ctxt *ctxt,
749
		     struct segmented_address addr,
750
		     unsigned size, bool write, bool fetch,
751
752
		     ulong *linear)
{
753
754
	struct desc_struct desc;
	bool usable;
755
	ulong la;
756
	u32 lim;
757
	u16 sel;
758
	unsigned cpl;
759

760
	la = seg_base(ctxt, addr.seg) + addr.ea;
761
762
763
764
765
766
	switch (ctxt->mode) {
	case X86EMUL_MODE_PROT64:
		if (((signed long)la << 16) >> 16 != la)
			return emulate_gp(ctxt, 0);
		break;
	default:
767
768
		usable = ctxt->ops->get_segment(ctxt, &sel, &desc, NULL,
						addr.seg);
769
770
		if (!usable)
			goto bad;
771
772
773
		/* code segment in protected mode or read-only data segment */
		if ((((ctxt->mode != X86EMUL_MODE_REAL) && (desc.type & 8))
					|| !(desc.type & 2)) && write)
774
775
			goto bad;
		/* unreadable code segment */
776
		if (!fetch && (desc.type & 8) && !(desc.type & 2))
777
778
779
780
781
782
783
			goto bad;
		lim = desc_limit_scaled(&desc);
		if ((desc.type & 8) || !(desc.type & 4)) {
			/* expand-up segment */
			if (addr.ea > lim || (u32)(addr.ea + size - 1) > lim)
				goto bad;
		} else {
Guo Chao's avatar
Guo Chao committed
784
			/* expand-down segment */
785
786
787
788
789
790
			if (addr.ea <= lim || (u32)(addr.ea + size - 1) <= lim)
				goto bad;
			lim = desc.d ? 0xffffffff : 0xffff;
			if (addr.ea > lim || (u32)(addr.ea + size - 1) > lim)
				goto bad;
		}
791
		cpl = ctxt->ops->cpl(ctxt);
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
		if (!(desc.type & 8)) {
			/* data segment */
			if (cpl > desc.dpl)
				goto bad;
		} else if ((desc.type & 8) && !(desc.type & 4)) {
			/* nonconforming code segment */
			if (cpl != desc.dpl)
				goto bad;
		} else if ((desc.type & 8) && (desc.type & 4)) {
			/* conforming code segment */
			if (cpl < desc.dpl)
				goto bad;
		}
		break;
	}
807
	if (fetch ? ctxt->mode != X86EMUL_MODE_PROT64 : ctxt->ad_bytes != 8)
808
		la &= (u32)-1;
809
810
	if (insn_aligned(ctxt, size) && ((la & (size - 1)) != 0))
		return emulate_gp(ctxt, 0);
811
812
	*linear = la;
	return X86EMUL_CONTINUE;
813
814
bad:
	if (addr.seg == VCPU_SREG_SS)
815
		return emulate_ss(ctxt, sel);
816
	else
817
		return emulate_gp(ctxt, sel);
818
819
}

820
821
822
823
824
825
826
827
828
static int linearize(struct x86_emulate_ctxt *ctxt,
		     struct segmented_address addr,
		     unsigned size, bool write,
		     ulong *linear)
{
	return __linearize(ctxt, addr, size, write, false, linear);
}


829
830
831
832
833
static int segmented_read_std(struct x86_emulate_ctxt *ctxt,
			      struct segmented_address addr,
			      void *data,
			      unsigned size)
{
834
835
836
	int rc;
	ulong linear;

837
	rc = linearize(ctxt, addr, size, false, &linear);
838
839
	if (rc != X86EMUL_CONTINUE)
		return rc;
840
	return ctxt->ops->read_std(ctxt, linear, data, size, &ctxt->exception);
841
842
}

843
844
845
846
847
848
849
850
/*
 * Fetch the next byte of the instruction being emulated which is pointed to
 * by ctxt->_eip, then increment ctxt->_eip.
 *
 * Also prefetch the remaining bytes of the instruction without crossing page
 * boundary if they are not in fetch_cache yet.
 */
static int do_insn_fetch_byte(struct x86_emulate_ctxt *ctxt, u8 *dest)
851
{
852
	struct fetch_cache *fc = &ctxt->fetch;
853
	int rc;
854
	int size, cur_size;
855

856
	if (ctxt->_eip == fc->end) {
857
		unsigned long linear;
858
859
		struct segmented_address addr = { .seg = VCPU_SREG_CS,
						  .ea  = ctxt->_eip };
860
		cur_size = fc->end - fc->start;
861
862
		size = min(15UL - cur_size,
			   PAGE_SIZE - offset_in_page(ctxt->_eip));
863
		rc = __linearize(ctxt, addr, size, false, true, &linear);
864
		if (unlikely(rc != X86EMUL_CONTINUE))
865
			return rc;
866
867
		rc = ctxt->ops->fetch(ctxt, linear, fc->data + cur_size,
				      size, &ctxt->exception);
868
		if (unlikely(rc != X86EMUL_CONTINUE))
869
			return rc;
870
		fc->end += size;
871
	}
872
873
	*dest = fc->data[ctxt->_eip - fc->start];
	ctxt->_eip++;
874
	return X86EMUL_CONTINUE;
875
876
877
}

static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
878
			 void *dest, unsigned size)
879
{
880
	int rc;
881

882
	/* x86 instructions are limited to 15 bytes. */
883
	if (unlikely(ctxt->_eip + size - ctxt->eip > 15))
884
		return X86EMUL_UNHANDLEABLE;
885
	while (size--) {
886
		rc = do_insn_fetch_byte(ctxt, dest++);
887
		if (rc != X86EMUL_CONTINUE)
888
889
			return rc;
	}
890
	return X86EMUL_CONTINUE;
891
892
}

893
/* Fetch next part of the instruction being emulated. */
894
#define insn_fetch(_type, _ctxt)					\
895
({	unsigned long _x;						\
896
	rc = do_insn_fetch(_ctxt, &_x, sizeof(_type));			\
897
898
899
900
901
	if (rc != X86EMUL_CONTINUE)					\
		goto done;						\
	(_type)_x;							\
})

902
903
#define insn_fetch_arr(_arr, _size, _ctxt)				\
({	rc = do_insn_fetch(_ctxt, _arr, (_size));			\
904
905
906
907
	if (rc != X86EMUL_CONTINUE)					\
		goto done;						\
})

908
909
910
911
912
/*
 * Given the 'reg' portion of a ModRM byte, and a register block, return a
 * pointer into the block that addresses the relevant register.
 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
 */
913
static void *decode_register(struct x86_emulate_ctxt *ctxt, u8 modrm_reg,
914
			     int highbyte_regs)
Avi Kivity's avatar
Avi Kivity committed
915
916
917
918
{
	void *p;

	if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
919
920
921
		p = (unsigned char *)reg_rmw(ctxt, modrm_reg & 3) + 1;
	else
		p = reg_rmw(ctxt, modrm_reg);
Avi Kivity's avatar
Avi Kivity committed
922
923
924
925
	return p;
}

static int read_descriptor(struct x86_emulate_ctxt *ctxt,
926
			   struct segmented_address addr,
Avi Kivity's avatar
Avi Kivity committed
927
928
929
930
931
932
933
			   u16 *size, unsigned long *address, int op_bytes)
{
	int rc;

	if (op_bytes == 2)
		op_bytes = 3;
	*address = 0;
934
	rc = segmented_read_std(ctxt, addr, size, 2);
935
	if (rc != X86EMUL_CONTINUE)
Avi Kivity's avatar
Avi Kivity committed
936
		return rc;
937
	addr.ea += 2;
938
	rc = segmented_read_std(ctxt, addr, address, op_bytes);
Avi Kivity's avatar
Avi Kivity committed
939
940
941
	return rc;
}

942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
static int test_cc(unsigned int condition, unsigned int flags)
{
	int rc = 0;

	switch ((condition & 15) >> 1) {
	case 0: /* o */
		rc |= (flags & EFLG_OF);
		break;
	case 1: /* b/c/nae */
		rc |= (flags & EFLG_CF);
		break;
	case 2: /* z/e */
		rc |= (flags & EFLG_ZF);
		break;
	case 3: /* be/na */
		rc |= (flags & (EFLG_CF|EFLG_ZF));
		break;
	case 4: /* s */
		rc |= (flags & EFLG_SF);
		break;
	case 5: /* p/pe */
		rc |= (flags & EFLG_PF);
		break;
	case 7: /* le/ng */
		rc |= (flags & EFLG_ZF);
		/* fall through */
	case 6: /* l/nge */
		rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
		break;
	}

	/* Odd condition identifiers (lsb == 1) have inverted sense. */
	return (!!rc ^ (condition & 1));
}

977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
static void fetch_register_operand(struct operand *op)
{
	switch (op->bytes) {
	case 1:
		op->val = *(u8 *)op->addr.reg;
		break;
	case 2:
		op->val = *(u16 *)op->addr.reg;
		break;
	case 4:
		op->val = *(u32 *)op->addr.reg;
		break;
	case 8:
		op->val = *(u64 *)op->addr.reg;
		break;
	}
}

Avi Kivity's avatar
Avi Kivity committed
995
996
997
998
static void read_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data, int reg)
{
	ctxt->ops->get_fpu(ctxt);
	switch (reg) {
999
1000
1001
1002
1003
1004
1005
1006
	case 0: asm("movdqa %%xmm0, %0" : "=m"(*data)); break;
	case 1: asm("movdqa %%xmm1, %0" : "=m"(*data)); break;
	case 2: asm("movdqa %%xmm2, %0" : "=m"(*data)); break;
	case 3: asm("movdqa %%xmm3, %0" : "=m"(*data)); break;
	case 4: asm("movdqa %%xmm4, %0" : "=m"(*data)); break;
	case 5: asm("movdqa %%xmm5, %0" : "=m"(*data)); break;
	case 6: asm("movdqa %%xmm6, %0" : "=m"(*data)); break;
	case 7: asm("movdqa %%xmm7, %0" : "=m"(*data)); break;
Avi Kivity's avatar
Avi Kivity committed
1007
#ifdef CONFIG_X86_64
1008
1009
1010
1011
1012
1013
1014
1015
	case 8: asm("movdqa %%xmm8, %0" : "=m"(*data)); break;
	case 9: asm("movdqa %%xmm9, %0" : "=m"(*data)); break;
	case 10: asm("movdqa %%xmm10, %0" : "=m"(*data)); break;
	case 11: asm("movdqa %%xmm11, %0" : "=m"(*data)); break;
	case 12: asm("movdqa %%xmm12, %0" : "=m"(*data)); break;
	case 13: asm("movdqa %%xmm13, %0" : "=m"(*data)); break;
	case 14: asm("movdqa %%xmm14, %0" : "=m"(*data)); break;
	case 15: asm("movdqa %%xmm15, %0" : "=m"(*data)); break;
Avi Kivity's avatar
Avi Kivity committed
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
#endif
	default: BUG();
	}
	ctxt->ops->put_fpu(ctxt);
}

static void write_sse_reg(struct x86_emulate_ctxt *ctxt, sse128_t *data,
			  int reg)
{
	ctxt->ops->get_fpu(ctxt);
	switch (reg) {
1027
1028
1029
1030
1031
1032
1033
1034
	case 0: asm("movdqa %0, %%xmm0" : : "m"(*data)); break;
	case 1: asm("movdqa %0, %%xmm1" : : "m"(*data)); break;
	case 2: asm("movdqa %0, %%xmm2" : : "m"(*data)); break;
	case 3: asm("movdqa %0, %%xmm3" : : "m"(*data)); break;
	case 4: asm("movdqa %0, %%xmm4" : : "m"(*data)); break;
	case 5: asm("movdqa %0, %%xmm5" : : "m"(*data)); break;
	case 6: asm("movdqa %0, %%xmm6" : : "m"(*data)); break;
	case 7: asm("movdqa %0, %%xmm7" : : "m"(*data)); break;
Avi Kivity's avatar
Avi Kivity committed
1035
#ifdef CONFIG_X86_64
1036
1037
1038
1039
1040
1041
1042
1043
	case 8: asm("movdqa %0, %%xmm8" : : "m"(*data)); break;
	case 9: asm("movdqa %0, %%xmm9" : : "m"(*data)); break;
	case 10: asm("movdqa %0, %%xmm10" : : "m"(*data)); break;
	case 11: asm("movdqa %0, %%xmm11" : : "m"(*data)); break;
	case 12: asm("movdqa %0, %%xmm12" : : "m"(*data)); break;
	case 13: asm("movdqa %0, %%xmm13" : : "m"(*data)); break;
	case 14: asm("movdqa %0, %%xmm14" : : "m"(*data)); break;
	case 15: asm("movdqa %0, %%xmm15" : : "m"(*data)); break;
Avi Kivity's avatar
Avi Kivity committed
1044
1045
1046
1047
1048
1049
#endif
	default: BUG();
	}
	ctxt->ops->put_fpu(ctxt);
}

Avi Kivity's avatar
Avi Kivity committed
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
static void read_mmx_reg(struct x86_emulate_ctxt *ctxt, u64 *data, int reg)
{
	ctxt->ops->get_fpu(ctxt);
	switch (reg) {
	case 0: asm("movq %%mm0, %0" : "=m"(*data)); break;
	case 1: asm("movq %%mm1, %0" : "=m"(*data)); break;
	case 2: asm("movq %%mm2, %0" : "=m"(*data)); break;
	case 3: asm("movq %%mm3, %0" : "=m"(*data)); break;
	case 4: asm("movq %%mm4, %0" : "=m"(*data)); break;
	case 5: asm("movq %%mm5, %0" : "=m"(*data)); break;
	case 6: asm("movq %%mm6, %0" : "=m"(*data)); break;
	case 7: asm("movq %%mm7, %0" : "=m"(*data)); break;
	default: BUG();
	}
	ctxt->ops->put_fpu(ctxt);
}

static void write_mmx_reg(struct x86_emulate_ctxt *ctxt, u64 *data, int reg)
{
	ctxt->ops->get_fpu(ctxt);
	switch (reg) {
	case 0: asm("movq %0, %%mm0" : : "m"(*data)); break;
	case 1: asm("movq %0, %%mm1" : : "m"(*data)); break;
	case 2: asm("movq %0, %%mm2" : : "m"(*data)); break;
	case 3: asm("movq %0, %%mm3" : : "m"(*data)); break;
	case 4: asm("movq %0, %%mm4" : : "m"(*data)); break;
	case 5: asm("movq %0, %%mm5" : : "m"(*data)); break;
	case 6: asm("movq %0, %%mm6" : : "m"(*data)); break;
	case 7: asm("movq %0, %%mm7" : : "m"(*data)); break;
	default: BUG();
	}
	ctxt->ops->put_fpu(ctxt);
}

1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
static int em_fninit(struct x86_emulate_ctxt *ctxt)
{
	if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
		return emulate_nm(ctxt);

	ctxt->ops->get_fpu(ctxt);
	asm volatile("fninit");
	ctxt->ops->put_fpu(ctxt);
	return X86EMUL_CONTINUE;
}

static int em_fnstcw(struct x86_emulate_ctxt *ctxt)
{
	u16 fcw;

	if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
		return emulate_nm(ctxt);

	ctxt->ops->get_fpu(ctxt);
	asm volatile("fnstcw %0": "+m"(fcw));
	ctxt->ops->put_fpu(ctxt);

	/* force 2 byte destination */
	ctxt->dst.bytes = 2;
	ctxt->dst.val = fcw;

	return X86EMUL_CONTINUE;
}

static int em_fnstsw(struct x86_emulate_ctxt *ctxt)
{
	u16 fsw;

	if (ctxt->ops->get_cr(ctxt, 0) & (X86_CR0_TS | X86_CR0_EM))
		return emulate_nm(ctxt);

	ctxt->ops->get_fpu(ctxt);
	asm volatile("fnstsw %0": "+m"(fsw));
	ctxt->ops->put_fpu(ctxt);

	/* force 2 byte destination */
	ctxt->dst.bytes = 2;
	ctxt->dst.val = fsw;

	return X86EMUL_CONTINUE;
}

Avi Kivity's avatar
Avi Kivity committed
1131
static void decode_register_operand(struct x86_emulate_ctxt *ctxt,
1132
				    struct operand *op)
1133
{
1134
1135
	unsigned reg = ctxt->modrm_reg;
	int highbyte_regs = ctxt->rex_prefix == 0;
1136

1137
1138
	if (!(ctxt->d & ModRM))
		reg = (ctxt->b & 7) | ((ctxt->rex_prefix & 1) << 3);
Avi Kivity's avatar
Avi Kivity committed
1139

1140
	if (ctxt->d & Sse) {
Avi Kivity's avatar
Avi Kivity committed
1141
1142
1143
1144
1145
1146
		op->type = OP_XMM;
		op->bytes = 16;
		op->addr.xmm = reg;
		read_sse_reg(ctxt, &op->vec_val, reg);
		return;
	}
Avi Kivity's avatar
Avi Kivity committed
1147
1148
1149
1150
1151
1152
1153
	if (ctxt->d & Mmx) {
		reg &= 7;
		op->type = OP_MM;
		op->bytes = 8;
		op->addr.mm = reg;
		return;
	}
Avi Kivity's avatar
Avi Kivity committed
1154

1155
	op->type = OP_REG;
1156
	if (ctxt->d & ByteOp) {
1157
		op->addr.reg = decode_register(ctxt, reg, highbyte_regs);
1158
1159
		op->bytes = 1;
	} else {
1160
		op->addr.reg = decode_register(ctxt, reg, 0);
1161
		op->bytes = ctxt->op_bytes;
1162
	}
1163
	fetch_register_operand(op);
1164
1165
1166
	op->orig_val = op->val;
}

1167
1168
1169
1170
1171
1172
static void adjust_modrm_seg(struct x86_emulate_ctxt *ctxt, int base_reg)
{
	if (base_reg == VCPU_REGS_RSP || base_reg == VCPU_REGS_RBP)
		ctxt->modrm_seg = VCPU_SREG_SS;
}

1173
static int decode_modrm(struct x86_emulate_ctxt *ctxt,
1174
			struct operand *op)
1175
1176
{
	u8 sib;
1177
	int index_reg = 0, base_reg = 0, scale;
1178
	int rc = X86EMUL_CONTINUE;
1179
	ulong modrm_ea = 0;
1180