1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22#ifndef __KERNEL__
23#include <stdio.h>
24#include <stdint.h>
25#include <public/xen.h>
26#define DPRINTF(_f, _a ...) printf(_f , ## _a)
27#else
28#include <linux/kvm_host.h>
29#include "kvm_cache_regs.h"
30#define DPRINTF(x...) do {} while (0)
31#endif
32#include <linux/module.h>
33#include <asm/kvm_x86_emulate.h>
34
35
36
37
38
39
40
41
42
43
44
45#define ByteOp (1<<0)
46
47#define ImplicitOps (1<<1)
48#define DstReg (2<<1)
49#define DstMem (3<<1)
50#define DstAcc (4<<1)
51#define DstMask (7<<1)
52
53#define SrcNone (0<<4)
54#define SrcImplicit (0<<4)
55#define SrcReg (1<<4)
56#define SrcMem (2<<4)
57#define SrcMem16 (3<<4)
58#define SrcMem32 (4<<4)
59#define SrcImm (5<<4)
60#define SrcImmByte (6<<4)
61#define SrcMask (7<<4)
62
63#define ModRM (1<<7)
64
65#define Mov (1<<8)
66#define BitOp (1<<9)
67#define MemAbs (1<<10)
68#define String (1<<12)
69#define Stack (1<<13)
70#define Group (1<<14)
71#define GroupDual (1<<15)
72#define GroupMask 0xff
73
74enum {
75 Group1_80, Group1_81, Group1_82, Group1_83,
76 Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
77};
78
79static u16 opcode_table[256] = {
80
81 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
82 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
83 0, 0, 0, 0,
84
85 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
86 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
87 0, 0, 0, 0,
88
89 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
90 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
91 0, 0, 0, 0,
92
93 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
94 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
95 0, 0, 0, 0,
96
97 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
98 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
99 DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
100
101 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
102 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
103 0, 0, 0, 0,
104
105 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
106 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
107 0, 0, 0, 0,
108
109 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
110 ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
111 ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
112 0, 0,
113
114 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
115
116 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
117
118 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
119 SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
120
121 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
122 DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
123
124 0, 0, 0, DstReg | SrcMem32 | ModRM | Mov ,
125 0, 0, 0, 0,
126
127 SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
128 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
129 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
130
131 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
132 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
133
134 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
135 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
136
137 Group | Group1_80, Group | Group1_81,
138 Group | Group1_82, Group | Group1_83,
139 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
140 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
141
142 ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
143 ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
144 DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
145 DstReg | SrcMem | ModRM | Mov, Group | Group1A,
146
147 DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
148
149 0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
150
151 ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
152 ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
153 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
154 ByteOp | ImplicitOps | String, ImplicitOps | String,
155
156 0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
157 ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
158 ByteOp | ImplicitOps | String, ImplicitOps | String,
159
160 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
161 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
162 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
163 ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
164
165 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
166 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
167 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
168 DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
169
170 ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
171 0, ImplicitOps | Stack, 0, 0,
172 ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
173
174 0, 0, 0, 0, 0, 0, 0, 0,
175
176 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
177 ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
178 0, 0, 0, 0,
179
180 0, 0, 0, 0, 0, 0, 0, 0,
181
182 0, 0, 0, 0,
183 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
184 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
185
186 ImplicitOps | Stack, SrcImm | ImplicitOps,
187 ImplicitOps, SrcImmByte | ImplicitOps,
188 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
189 SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
190
191 0, 0, 0, 0,
192 ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
193
194 ImplicitOps, 0, ImplicitOps, ImplicitOps,
195 ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
196};
197
198static u16 twobyte_table[256] = {
199
200 0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
201 ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
202
203 0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
204
205 ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
206 0, 0, 0, 0, 0, 0, 0, 0,
207
208 ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
209
210 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
211 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
212 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
213 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
214
215 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
216 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
217 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
218 DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
219
220 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
221
222 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
223
224 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
225
226 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
227 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
228 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
229 ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
230
231 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
232
233 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
234
235 0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
236
237 ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
238 DstMem | SrcReg | ModRM | BitOp,
239 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
240 DstReg | SrcMem16 | ModRM | Mov,
241
242 0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
243 0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
244 DstReg | SrcMem16 | ModRM | Mov,
245
246 0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
247 0, 0, 0, 0, 0, 0, 0, 0,
248
249 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
250
251 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
252
253 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
254};
255
256static u16 group_table[] = {
257 [Group1_80*8] =
258 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
259 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
260 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
261 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
262 [Group1_81*8] =
263 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
264 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
265 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
266 DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
267 [Group1_82*8] =
268 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
269 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
270 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
271 ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
272 [Group1_83*8] =
273 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
274 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
275 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
276 DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
277 [Group1A*8] =
278 DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
279 [Group3_Byte*8] =
280 ByteOp | SrcImm | DstMem | ModRM, 0,
281 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
282 0, 0, 0, 0,
283 [Group3*8] =
284 DstMem | SrcImm | ModRM, 0,
285 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
286 0, 0, 0, 0,
287 [Group4*8] =
288 ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
289 0, 0, 0, 0, 0, 0,
290 [Group5*8] =
291 DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
292 SrcMem | ModRM | Stack, 0,
293 SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
294 [Group7*8] =
295 0, 0, ModRM | SrcMem, ModRM | SrcMem,
296 SrcNone | ModRM | DstMem | Mov, 0,
297 SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
298};
299
300static u16 group2_table[] = {
301 [Group7*8] =
302 SrcNone | ModRM, 0, 0, 0,
303 SrcNone | ModRM | DstMem | Mov, 0,
304 SrcMem16 | ModRM | Mov, 0,
305};
306
307
308#define EFLG_OF (1<<11)
309#define EFLG_DF (1<<10)
310#define EFLG_SF (1<<7)
311#define EFLG_ZF (1<<6)
312#define EFLG_AF (1<<4)
313#define EFLG_PF (1<<2)
314#define EFLG_CF (1<<0)
315
316
317
318
319
320
321
322
323#if defined(CONFIG_X86_64)
324#define _LO32 "k"
325#define _STK "%%rsp"
326#elif defined(__i386__)
327#define _LO32 ""
328#define _STK "%%esp"
329#endif
330
331
332
333
334
335#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
336
337
338#define _PRE_EFLAGS(_sav, _msk, _tmp) \
339 \
340 "movl %"_sav",%"_LO32 _tmp"; " \
341 "push %"_tmp"; " \
342 "push %"_tmp"; " \
343 "movl %"_msk",%"_LO32 _tmp"; " \
344 "andl %"_LO32 _tmp",("_STK"); " \
345 "pushf; " \
346 "notl %"_LO32 _tmp"; " \
347 "andl %"_LO32 _tmp",("_STK"); " \
348 "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); " \
349 "pop %"_tmp"; " \
350 "orl %"_LO32 _tmp",("_STK"); " \
351 "popf; " \
352 "pop %"_sav"; "
353
354
355#define _POST_EFLAGS(_sav, _msk, _tmp) \
356 \
357 "pushf; " \
358 "pop %"_tmp"; " \
359 "andl %"_msk",%"_LO32 _tmp"; " \
360 "orl %"_LO32 _tmp",%"_sav"; "
361
362
363#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
364 do { \
365 unsigned long _tmp; \
366 \
367 switch ((_dst).bytes) { \
368 case 2: \
369 __asm__ __volatile__ ( \
370 _PRE_EFLAGS("0", "4", "2") \
371 _op"w %"_wx"3,%1; " \
372 _POST_EFLAGS("0", "4", "2") \
373 : "=m" (_eflags), "=m" ((_dst).val), \
374 "=&r" (_tmp) \
375 : _wy ((_src).val), "i" (EFLAGS_MASK)); \
376 break; \
377 case 4: \
378 __asm__ __volatile__ ( \
379 _PRE_EFLAGS("0", "4", "2") \
380 _op"l %"_lx"3,%1; " \
381 _POST_EFLAGS("0", "4", "2") \
382 : "=m" (_eflags), "=m" ((_dst).val), \
383 "=&r" (_tmp) \
384 : _ly ((_src).val), "i" (EFLAGS_MASK)); \
385 break; \
386 case 8: \
387 __emulate_2op_8byte(_op, _src, _dst, \
388 _eflags, _qx, _qy); \
389 break; \
390 } \
391 } while (0)
392
393#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
394 do { \
395 unsigned long __tmp; \
396 switch ((_dst).bytes) { \
397 case 1: \
398 __asm__ __volatile__ ( \
399 _PRE_EFLAGS("0", "4", "2") \
400 _op"b %"_bx"3,%1; " \
401 _POST_EFLAGS("0", "4", "2") \
402 : "=m" (_eflags), "=m" ((_dst).val), \
403 "=&r" (__tmp) \
404 : _by ((_src).val), "i" (EFLAGS_MASK)); \
405 break; \
406 default: \
407 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
408 _wx, _wy, _lx, _ly, _qx, _qy); \
409 break; \
410 } \
411 } while (0)
412
413
414#define emulate_2op_SrcB(_op, _src, _dst, _eflags) \
415 __emulate_2op(_op, _src, _dst, _eflags, \
416 "b", "c", "b", "c", "b", "c", "b", "c")
417
418
419#define emulate_2op_SrcV(_op, _src, _dst, _eflags) \
420 __emulate_2op(_op, _src, _dst, _eflags, \
421 "b", "q", "w", "r", _LO32, "r", "", "r")
422
423
424#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags) \
425 __emulate_2op_nobyte(_op, _src, _dst, _eflags, \
426 "w", "r", _LO32, "r", "", "r")
427
428
429#define emulate_1op(_op, _dst, _eflags) \
430 do { \
431 unsigned long _tmp; \
432 \
433 switch ((_dst).bytes) { \
434 case 1: \
435 __asm__ __volatile__ ( \
436 _PRE_EFLAGS("0", "3", "2") \
437 _op"b %1; " \
438 _POST_EFLAGS("0", "3", "2") \
439 : "=m" (_eflags), "=m" ((_dst).val), \
440 "=&r" (_tmp) \
441 : "i" (EFLAGS_MASK)); \
442 break; \
443 case 2: \
444 __asm__ __volatile__ ( \
445 _PRE_EFLAGS("0", "3", "2") \
446 _op"w %1; " \
447 _POST_EFLAGS("0", "3", "2") \
448 : "=m" (_eflags), "=m" ((_dst).val), \
449 "=&r" (_tmp) \
450 : "i" (EFLAGS_MASK)); \
451 break; \
452 case 4: \
453 __asm__ __volatile__ ( \
454 _PRE_EFLAGS("0", "3", "2") \
455 _op"l %1; " \
456 _POST_EFLAGS("0", "3", "2") \
457 : "=m" (_eflags), "=m" ((_dst).val), \
458 "=&r" (_tmp) \
459 : "i" (EFLAGS_MASK)); \
460 break; \
461 case 8: \
462 __emulate_1op_8byte(_op, _dst, _eflags); \
463 break; \
464 } \
465 } while (0)
466
467
468#if defined(CONFIG_X86_64)
469#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy) \
470 do { \
471 __asm__ __volatile__ ( \
472 _PRE_EFLAGS("0", "4", "2") \
473 _op"q %"_qx"3,%1; " \
474 _POST_EFLAGS("0", "4", "2") \
475 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
476 : _qy ((_src).val), "i" (EFLAGS_MASK)); \
477 } while (0)
478
479#define __emulate_1op_8byte(_op, _dst, _eflags) \
480 do { \
481 __asm__ __volatile__ ( \
482 _PRE_EFLAGS("0", "3", "2") \
483 _op"q %1; " \
484 _POST_EFLAGS("0", "3", "2") \
485 : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
486 : "i" (EFLAGS_MASK)); \
487 } while (0)
488
489#elif defined(__i386__)
490#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
491#define __emulate_1op_8byte(_op, _dst, _eflags)
492#endif
493
494
495#define insn_fetch(_type, _size, _eip) \
496({ unsigned long _x; \
497 rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size)); \
498 if (rc != 0) \
499 goto done; \
500 (_eip) += (_size); \
501 (_type)_x; \
502})
503
504static inline unsigned long ad_mask(struct decode_cache *c)
505{
506 return (1UL << (c->ad_bytes << 3)) - 1;
507}
508
509
510static inline unsigned long
511address_mask(struct decode_cache *c, unsigned long reg)
512{
513 if (c->ad_bytes == sizeof(unsigned long))
514 return reg;
515 else
516 return reg & ad_mask(c);
517}
518
519static inline unsigned long
520register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
521{
522 return base + address_mask(c, reg);
523}
524
525static inline void
526register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
527{
528 if (c->ad_bytes == sizeof(unsigned long))
529 *reg += inc;
530 else
531 *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
532}
533
534static inline void jmp_rel(struct decode_cache *c, int rel)
535{
536 register_address_increment(c, &c->eip, rel);
537}
538
539static void set_seg_override(struct decode_cache *c, int seg)
540{
541 c->has_seg_override = true;
542 c->seg_override = seg;
543}
544
545static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
546{
547 if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
548 return 0;
549
550 return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
551}
552
553static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
554 struct decode_cache *c)
555{
556 if (!c->has_seg_override)
557 return 0;
558
559 return seg_base(ctxt, c->seg_override);
560}
561
562static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
563{
564 return seg_base(ctxt, VCPU_SREG_ES);
565}
566
567static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
568{
569 return seg_base(ctxt, VCPU_SREG_SS);
570}
571
572static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
573 struct x86_emulate_ops *ops,
574 unsigned long linear, u8 *dest)
575{
576 struct fetch_cache *fc = &ctxt->decode.fetch;
577 int rc;
578 int size;
579
580 if (linear < fc->start || linear >= fc->end) {
581 size = min(15UL, PAGE_SIZE - offset_in_page(linear));
582 rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
583 if (rc)
584 return rc;
585 fc->start = linear;
586 fc->end = linear + size;
587 }
588 *dest = fc->data[linear - fc->start];
589 return 0;
590}
591
592static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
593 struct x86_emulate_ops *ops,
594 unsigned long eip, void *dest, unsigned size)
595{
596 int rc = 0;
597
598 eip += ctxt->cs_base;
599 while (size--) {
600 rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
601 if (rc)
602 return rc;
603 }
604 return 0;
605}
606
607
608
609
610
611
612static void *decode_register(u8 modrm_reg, unsigned long *regs,
613 int highbyte_regs)
614{
615 void *p;
616
617 p = ®s[modrm_reg];
618 if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
619 p = (unsigned char *)®s[modrm_reg & 3] + 1;
620 return p;
621}
622
623static int read_descriptor(struct x86_emulate_ctxt *ctxt,
624 struct x86_emulate_ops *ops,
625 void *ptr,
626 u16 *size, unsigned long *address, int op_bytes)
627{
628 int rc;
629
630 if (op_bytes == 2)
631 op_bytes = 3;
632 *address = 0;
633 rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
634 ctxt->vcpu);
635 if (rc)
636 return rc;
637 rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
638 ctxt->vcpu);
639 return rc;
640}
641
642static int test_cc(unsigned int condition, unsigned int flags)
643{
644 int rc = 0;
645
646 switch ((condition & 15) >> 1) {
647 case 0:
648 rc |= (flags & EFLG_OF);
649 break;
650 case 1:
651 rc |= (flags & EFLG_CF);
652 break;
653 case 2:
654 rc |= (flags & EFLG_ZF);
655 break;
656 case 3:
657 rc |= (flags & (EFLG_CF|EFLG_ZF));
658 break;
659 case 4:
660 rc |= (flags & EFLG_SF);
661 break;
662 case 5:
663 rc |= (flags & EFLG_PF);
664 break;
665 case 7:
666 rc |= (flags & EFLG_ZF);
667
668 case 6:
669 rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
670 break;
671 }
672
673
674 return (!!rc ^ (condition & 1));
675}
676
677static void decode_register_operand(struct operand *op,
678 struct decode_cache *c,
679 int inhibit_bytereg)
680{
681 unsigned reg = c->modrm_reg;
682 int highbyte_regs = c->rex_prefix == 0;
683
684 if (!(c->d & ModRM))
685 reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
686 op->type = OP_REG;
687 if ((c->d & ByteOp) && !inhibit_bytereg) {
688 op->ptr = decode_register(reg, c->regs, highbyte_regs);
689 op->val = *(u8 *)op->ptr;
690 op->bytes = 1;
691 } else {
692 op->ptr = decode_register(reg, c->regs, 0);
693 op->bytes = c->op_bytes;
694 switch (op->bytes) {
695 case 2:
696 op->val = *(u16 *)op->ptr;
697 break;
698 case 4:
699 op->val = *(u32 *)op->ptr;
700 break;
701 case 8:
702 op->val = *(u64 *) op->ptr;
703 break;
704 }
705 }
706 op->orig_val = op->val;
707}
708
709static int decode_modrm(struct x86_emulate_ctxt *ctxt,
710 struct x86_emulate_ops *ops)
711{
712 struct decode_cache *c = &ctxt->decode;
713 u8 sib;
714 int index_reg = 0, base_reg = 0, scale;
715 int rc = 0;
716
717 if (c->rex_prefix) {
718 c->modrm_reg = (c->rex_prefix & 4) << 1;
719 index_reg = (c->rex_prefix & 2) << 2;
720 c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3;
721 }
722
723 c->modrm = insn_fetch(u8, 1, c->eip);
724 c->modrm_mod |= (c->modrm & 0xc0) >> 6;
725 c->modrm_reg |= (c->modrm & 0x38) >> 3;
726 c->modrm_rm |= (c->modrm & 0x07);
727 c->modrm_ea = 0;
728 c->use_modrm_ea = 1;
729
730 if (c->modrm_mod == 3) {
731 c->modrm_ptr = decode_register(c->modrm_rm,
732 c->regs, c->d & ByteOp);
733 c->modrm_val = *(unsigned long *)c->modrm_ptr;
734 return rc;
735 }
736
737 if (c->ad_bytes == 2) {
738 unsigned bx = c->regs[VCPU_REGS_RBX];
739 unsigned bp = c->regs[VCPU_REGS_RBP];
740 unsigned si = c->regs[VCPU_REGS_RSI];
741 unsigned di = c->regs[VCPU_REGS_RDI];
742
743
744 switch (c->modrm_mod) {
745 case 0:
746 if (c->modrm_rm == 6)
747 c->modrm_ea += insn_fetch(u16, 2, c->eip);
748 break;
749 case 1:
750 c->modrm_ea += insn_fetch(s8, 1, c->eip);
751 break;
752 case 2:
753 c->modrm_ea += insn_fetch(u16, 2, c->eip);
754 break;
755 }
756 switch (c->modrm_rm) {
757 case 0:
758 c->modrm_ea += bx + si;
759 break;
760 case 1:
761 c->modrm_ea += bx + di;
762 break;
763 case 2:
764 c->modrm_ea += bp + si;
765 break;
766 case 3:
767 c->modrm_ea += bp + di;
768 break;
769 case 4:
770 c->modrm_ea += si;
771 break;
772 case 5:
773 c->modrm_ea += di;
774 break;
775 case 6:
776 if (c->modrm_mod != 0)
777 c->modrm_ea += bp;
778 break;
779 case 7:
780 c->modrm_ea += bx;
781 break;
782 }
783 if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
784 (c->modrm_rm == 6 && c->modrm_mod != 0))
785 if (!c->has_seg_override)
786 set_seg_override(c, VCPU_SREG_SS);
787 c->modrm_ea = (u16)c->modrm_ea;
788 } else {
789
790 if ((c->modrm_rm & 7) == 4) {
791 sib = insn_fetch(u8, 1, c->eip);
792 index_reg |= (sib >> 3) & 7;
793 base_reg |= sib & 7;
794 scale = sib >> 6;
795
796 if ((base_reg & 7) == 5 && c->modrm_mod == 0)
797 c->modrm_ea += insn_fetch(s32, 4, c->eip);
798 else
799 c->modrm_ea += c->regs[base_reg];
800 if (index_reg != 4)
801 c->modrm_ea += c->regs[index_reg] << scale;
802 } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
803 if (ctxt->mode == X86EMUL_MODE_PROT64)
804 c->rip_relative = 1;
805 } else
806 c->modrm_ea += c->regs[c->modrm_rm];
807 switch (c->modrm_mod) {
808 case 0:
809 if (c->modrm_rm == 5)
810 c->modrm_ea += insn_fetch(s32, 4, c->eip);
811 break;
812 case 1:
813 c->modrm_ea += insn_fetch(s8, 1, c->eip);
814 break;
815 case 2:
816 c->modrm_ea += insn_fetch(s32, 4, c->eip);
817 break;
818 }
819 }
820done:
821 return rc;
822}
823
824static int decode_abs(struct x86_emulate_ctxt *ctxt,
825 struct x86_emulate_ops *ops)
826{
827 struct decode_cache *c = &ctxt->decode;
828 int rc = 0;
829
830 switch (c->ad_bytes) {
831 case 2:
832 c->modrm_ea = insn_fetch(u16, 2, c->eip);
833 break;
834 case 4:
835 c->modrm_ea = insn_fetch(u32, 4, c->eip);
836 break;
837 case 8:
838 c->modrm_ea = insn_fetch(u64, 8, c->eip);
839 break;
840 }
841done:
842 return rc;
843}
844
845int
846x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
847{
848 struct decode_cache *c = &ctxt->decode;
849 int rc = 0;
850 int mode = ctxt->mode;
851 int def_op_bytes, def_ad_bytes, group;
852
853
854
855 memset(c, 0, sizeof(struct decode_cache));
856 c->eip = kvm_rip_read(ctxt->vcpu);
857 ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
858 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
859
860 switch (mode) {
861 case X86EMUL_MODE_REAL:
862 case X86EMUL_MODE_PROT16:
863 def_op_bytes = def_ad_bytes = 2;
864 break;
865 case X86EMUL_MODE_PROT32:
866 def_op_bytes = def_ad_bytes = 4;
867 break;
868#ifdef CONFIG_X86_64
869 case X86EMUL_MODE_PROT64:
870 def_op_bytes = 4;
871 def_ad_bytes = 8;
872 break;
873#endif
874 default:
875 return -1;
876 }
877
878 c->op_bytes = def_op_bytes;
879 c->ad_bytes = def_ad_bytes;
880
881
882 for (;;) {
883 switch (c->b = insn_fetch(u8, 1, c->eip)) {
884 case 0x66:
885
886 c->op_bytes = def_op_bytes ^ 6;
887 break;
888 case 0x67:
889 if (mode == X86EMUL_MODE_PROT64)
890
891 c->ad_bytes = def_ad_bytes ^ 12;
892 else
893
894 c->ad_bytes = def_ad_bytes ^ 6;
895 break;
896 case 0x26:
897 case 0x2e:
898 case 0x36:
899 case 0x3e:
900 set_seg_override(c, (c->b >> 3) & 3);
901 break;
902 case 0x64:
903 case 0x65:
904 set_seg_override(c, c->b & 7);
905 break;
906 case 0x40 ... 0x4f:
907 if (mode != X86EMUL_MODE_PROT64)
908 goto done_prefixes;
909 c->rex_prefix = c->b;
910 continue;
911 case 0xf0:
912 c->lock_prefix = 1;
913 break;
914 case 0xf2:
915 c->rep_prefix = REPNE_PREFIX;
916 break;
917 case 0xf3:
918 c->rep_prefix = REPE_PREFIX;
919 break;
920 default:
921 goto done_prefixes;
922 }
923
924
925
926 c->rex_prefix = 0;
927 }
928
929done_prefixes:
930
931
932 if (c->rex_prefix)
933 if (c->rex_prefix & 8)
934 c->op_bytes = 8;
935
936
937 c->d = opcode_table[c->b];
938 if (c->d == 0) {
939
940 if (c->b == 0x0f) {
941 c->twobyte = 1;
942 c->b = insn_fetch(u8, 1, c->eip);
943 c->d = twobyte_table[c->b];
944 }
945 }
946
947 if (c->d & Group) {
948 group = c->d & GroupMask;
949 c->modrm = insn_fetch(u8, 1, c->eip);
950 --c->eip;
951
952 group = (group << 3) + ((c->modrm >> 3) & 7);
953 if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
954 c->d = group2_table[group];
955 else
956 c->d = group_table[group];
957 }
958
959
960 if (c->d == 0) {
961 DPRINTF("Cannot emulate %02x\n", c->b);
962 return -1;
963 }
964
965 if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
966 c->op_bytes = 8;
967
968
969 if (c->d & ModRM)
970 rc = decode_modrm(ctxt, ops);
971 else if (c->d & MemAbs)
972 rc = decode_abs(ctxt, ops);
973 if (rc)
974 goto done;
975
976 if (!c->has_seg_override)
977 set_seg_override(c, VCPU_SREG_DS);
978
979 if (!(!c->twobyte && c->b == 0x8d))
980 c->modrm_ea += seg_override_base(ctxt, c);
981
982 if (c->ad_bytes != 8)
983 c->modrm_ea = (u32)c->modrm_ea;
984
985
986
987
988 switch (c->d & SrcMask) {
989 case SrcNone:
990 break;
991 case SrcReg:
992 decode_register_operand(&c->src, c, 0);
993 break;
994 case SrcMem16:
995 c->src.bytes = 2;
996 goto srcmem_common;
997 case SrcMem32:
998 c->src.bytes = 4;
999 goto srcmem_common;
1000 case SrcMem:
1001 c->src.bytes = (c->d & ByteOp) ? 1 :
1002 c->op_bytes;
1003
1004 if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1005 break;
1006 srcmem_common:
1007
1008
1009
1010
1011 if ((c->d & ModRM) && c->modrm_mod == 3) {
1012 c->src.type = OP_REG;
1013 c->src.val = c->modrm_val;
1014 c->src.ptr = c->modrm_ptr;
1015 break;
1016 }
1017 c->src.type = OP_MEM;
1018 break;
1019 case SrcImm:
1020 c->src.type = OP_IMM;
1021 c->src.ptr = (unsigned long *)c->eip;
1022 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1023 if (c->src.bytes == 8)
1024 c->src.bytes = 4;
1025
1026 switch (c->src.bytes) {
1027 case 1:
1028 c->src.val = insn_fetch(s8, 1, c->eip);
1029 break;
1030 case 2:
1031 c->src.val = insn_fetch(s16, 2, c->eip);
1032 break;
1033 case 4:
1034 c->src.val = insn_fetch(s32, 4, c->eip);
1035 break;
1036 }
1037 break;
1038 case SrcImmByte:
1039 c->src.type = OP_IMM;
1040 c->src.ptr = (unsigned long *)c->eip;
1041 c->src.bytes = 1;
1042 c->src.val = insn_fetch(s8, 1, c->eip);
1043 break;
1044 }
1045
1046
1047 switch (c->d & DstMask) {
1048 case ImplicitOps:
1049
1050 return 0;
1051 case DstReg:
1052 decode_register_operand(&c->dst, c,
1053 c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1054 break;
1055 case DstMem:
1056 if ((c->d & ModRM) && c->modrm_mod == 3) {
1057 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1058 c->dst.type = OP_REG;
1059 c->dst.val = c->dst.orig_val = c->modrm_val;
1060 c->dst.ptr = c->modrm_ptr;
1061 break;
1062 }
1063 c->dst.type = OP_MEM;
1064 break;
1065 case DstAcc:
1066 c->dst.type = OP_REG;
1067 c->dst.bytes = c->op_bytes;
1068 c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1069 switch (c->op_bytes) {
1070 case 1:
1071 c->dst.val = *(u8 *)c->dst.ptr;
1072 break;
1073 case 2:
1074 c->dst.val = *(u16 *)c->dst.ptr;
1075 break;
1076 case 4:
1077 c->dst.val = *(u32 *)c->dst.ptr;
1078 break;
1079 }
1080 c->dst.orig_val = c->dst.val;
1081 break;
1082 }
1083
1084 if (c->rip_relative)
1085 c->modrm_ea += c->eip;
1086
1087done:
1088 return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1089}
1090
1091static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1092{
1093 struct decode_cache *c = &ctxt->decode;
1094
1095 c->dst.type = OP_MEM;
1096 c->dst.bytes = c->op_bytes;
1097 c->dst.val = c->src.val;
1098 register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1099 c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1100 c->regs[VCPU_REGS_RSP]);
1101}
1102
1103static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1104 struct x86_emulate_ops *ops)
1105{
1106 struct decode_cache *c = &ctxt->decode;
1107 int rc;
1108
1109 rc = ops->read_std(register_address(c, ss_base(ctxt),
1110 c->regs[VCPU_REGS_RSP]),
1111 &c->dst.val, c->dst.bytes, ctxt->vcpu);
1112 if (rc != 0)
1113 return rc;
1114
1115 register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1116
1117 return 0;
1118}
1119
1120static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1121{
1122 struct decode_cache *c = &ctxt->decode;
1123 switch (c->modrm_reg) {
1124 case 0:
1125 emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1126 break;
1127 case 1:
1128 emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1129 break;
1130 case 2:
1131 emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1132 break;
1133 case 3:
1134 emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1135 break;
1136 case 4:
1137 case 6:
1138 emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1139 break;
1140 case 5:
1141 emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1142 break;
1143 case 7:
1144 emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1145 break;
1146 }
1147}
1148
1149static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1150 struct x86_emulate_ops *ops)
1151{
1152 struct decode_cache *c = &ctxt->decode;
1153 int rc = 0;
1154
1155 switch (c->modrm_reg) {
1156 case 0 ... 1:
1157 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1158 break;
1159 case 2:
1160 c->dst.val = ~c->dst.val;
1161 break;
1162 case 3:
1163 emulate_1op("neg", c->dst, ctxt->eflags);
1164 break;
1165 default:
1166 DPRINTF("Cannot emulate %02x\n", c->b);
1167 rc = X86EMUL_UNHANDLEABLE;
1168 break;
1169 }
1170 return rc;
1171}
1172
1173static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1174 struct x86_emulate_ops *ops)
1175{
1176 struct decode_cache *c = &ctxt->decode;
1177
1178 switch (c->modrm_reg) {
1179 case 0:
1180 emulate_1op("inc", c->dst, ctxt->eflags);
1181 break;
1182 case 1:
1183 emulate_1op("dec", c->dst, ctxt->eflags);
1184 break;
1185 case 2: {
1186 long int old_eip;
1187 old_eip = c->eip;
1188 c->eip = c->src.val;
1189 c->src.val = old_eip;
1190 emulate_push(ctxt);
1191 break;
1192 }
1193 case 4:
1194 c->eip = c->src.val;
1195 break;
1196 case 6:
1197 emulate_push(ctxt);
1198 break;
1199 }
1200 return 0;
1201}
1202
1203static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1204 struct x86_emulate_ops *ops,
1205 unsigned long memop)
1206{
1207 struct decode_cache *c = &ctxt->decode;
1208 u64 old, new;
1209 int rc;
1210
1211 rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1212 if (rc != 0)
1213 return rc;
1214
1215 if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1216 ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1217
1218 c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1219 c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1220 ctxt->eflags &= ~EFLG_ZF;
1221
1222 } else {
1223 new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1224 (u32) c->regs[VCPU_REGS_RBX];
1225
1226 rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1227 if (rc != 0)
1228 return rc;
1229 ctxt->eflags |= EFLG_ZF;
1230 }
1231 return 0;
1232}
1233
1234static inline int writeback(struct x86_emulate_ctxt *ctxt,
1235 struct x86_emulate_ops *ops)
1236{
1237 int rc;
1238 struct decode_cache *c = &ctxt->decode;
1239
1240 switch (c->dst.type) {
1241 case OP_REG:
1242
1243
1244
1245 switch (c->dst.bytes) {
1246 case 1:
1247 *(u8 *)c->dst.ptr = (u8)c->dst.val;
1248 break;
1249 case 2:
1250 *(u16 *)c->dst.ptr = (u16)c->dst.val;
1251 break;
1252 case 4:
1253 *c->dst.ptr = (u32)c->dst.val;
1254 break;
1255 case 8:
1256 *c->dst.ptr = c->dst.val;
1257 break;
1258 }
1259 break;
1260 case OP_MEM:
1261 if (c->lock_prefix)
1262 rc = ops->cmpxchg_emulated(
1263 (unsigned long)c->dst.ptr,
1264 &c->dst.orig_val,
1265 &c->dst.val,
1266 c->dst.bytes,
1267 ctxt->vcpu);
1268 else
1269 rc = ops->write_emulated(
1270 (unsigned long)c->dst.ptr,
1271 &c->dst.val,
1272 c->dst.bytes,
1273 ctxt->vcpu);
1274 if (rc != 0)
1275 return rc;
1276 break;
1277 case OP_NONE:
1278
1279 break;
1280 default:
1281 break;
1282 }
1283 return 0;
1284}
1285
1286int
1287x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1288{
1289 unsigned long memop = 0;
1290 u64 msr_data;
1291 unsigned long saved_eip = 0;
1292 struct decode_cache *c = &ctxt->decode;
1293 unsigned int port;
1294 int io_dir_in;
1295 int rc = 0;
1296
1297
1298
1299
1300
1301
1302 memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1303 saved_eip = c->eip;
1304
1305 if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1306 memop = c->modrm_ea;
1307
1308 if (c->rep_prefix && (c->d & String)) {
1309
1310 if (c->regs[VCPU_REGS_RCX] == 0) {
1311 kvm_rip_write(ctxt->vcpu, c->eip);
1312 goto done;
1313 }
1314
1315
1316
1317
1318
1319
1320
1321 if ((c->b == 0xa6) || (c->b == 0xa7) ||
1322 (c->b == 0xae) || (c->b == 0xaf)) {
1323 if ((c->rep_prefix == REPE_PREFIX) &&
1324 ((ctxt->eflags & EFLG_ZF) == 0)) {
1325 kvm_rip_write(ctxt->vcpu, c->eip);
1326 goto done;
1327 }
1328 if ((c->rep_prefix == REPNE_PREFIX) &&
1329 ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1330 kvm_rip_write(ctxt->vcpu, c->eip);
1331 goto done;
1332 }
1333 }
1334 c->regs[VCPU_REGS_RCX]--;
1335 c->eip = kvm_rip_read(ctxt->vcpu);
1336 }
1337
1338 if (c->src.type == OP_MEM) {
1339 c->src.ptr = (unsigned long *)memop;
1340 c->src.val = 0;
1341 rc = ops->read_emulated((unsigned long)c->src.ptr,
1342 &c->src.val,
1343 c->src.bytes,
1344 ctxt->vcpu);
1345 if (rc != 0)
1346 goto done;
1347 c->src.orig_val = c->src.val;
1348 }
1349
1350 if ((c->d & DstMask) == ImplicitOps)
1351 goto special_insn;
1352
1353
1354 if (c->dst.type == OP_MEM) {
1355 c->dst.ptr = (unsigned long *)memop;
1356 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1357 c->dst.val = 0;
1358 if (c->d & BitOp) {
1359 unsigned long mask = ~(c->dst.bytes * 8 - 1);
1360
1361 c->dst.ptr = (void *)c->dst.ptr +
1362 (c->src.val & mask) / 8;
1363 }
1364 if (!(c->d & Mov) &&
1365
1366 ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1367 &c->dst.val,
1368 c->dst.bytes, ctxt->vcpu)) != 0))
1369 goto done;
1370 }
1371 c->dst.orig_val = c->dst.val;
1372
1373special_insn:
1374
1375 if (c->twobyte)
1376 goto twobyte_insn;
1377
1378 switch (c->b) {
1379 case 0x00 ... 0x05:
1380 add:
1381 emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1382 break;
1383 case 0x08 ... 0x0d:
1384 or:
1385 emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1386 break;
1387 case 0x10 ... 0x15:
1388 adc:
1389 emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1390 break;
1391 case 0x18 ... 0x1d:
1392 sbb:
1393 emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1394 break;
1395 case 0x20 ... 0x25:
1396 and:
1397 emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1398 break;
1399 case 0x28 ... 0x2d:
1400 sub:
1401 emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1402 break;
1403 case 0x30 ... 0x35:
1404 xor:
1405 emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1406 break;
1407 case 0x38 ... 0x3d:
1408 cmp:
1409 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1410 break;
1411 case 0x40 ... 0x47:
1412 emulate_1op("inc", c->dst, ctxt->eflags);
1413 break;
1414 case 0x48 ... 0x4f:
1415 emulate_1op("dec", c->dst, ctxt->eflags);
1416 break;
1417 case 0x50 ... 0x57:
1418 c->dst.type = OP_MEM;
1419 c->dst.bytes = c->op_bytes;
1420 c->dst.val = c->src.val;
1421 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1422 -c->op_bytes);
1423 c->dst.ptr = (void *) register_address(
1424 c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1425 break;
1426 case 0x58 ... 0x5f:
1427 pop_instruction:
1428 if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1429 c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1430 c->op_bytes, ctxt->vcpu)) != 0)
1431 goto done;
1432
1433 register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1434 c->op_bytes);
1435 c->dst.type = OP_NONE;
1436 break;
1437 case 0x63:
1438 if (ctxt->mode != X86EMUL_MODE_PROT64)
1439 goto cannot_emulate;
1440 c->dst.val = (s32) c->src.val;
1441 break;
1442 case 0x68:
1443 case 0x6a:
1444 emulate_push(ctxt);
1445 break;
1446 case 0x6c:
1447 case 0x6d:
1448 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1449 1,
1450 (c->d & ByteOp) ? 1 : c->op_bytes,
1451 c->rep_prefix ?
1452 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1453 (ctxt->eflags & EFLG_DF),
1454 register_address(c, es_base(ctxt),
1455 c->regs[VCPU_REGS_RDI]),
1456 c->rep_prefix,
1457 c->regs[VCPU_REGS_RDX]) == 0) {
1458 c->eip = saved_eip;
1459 return -1;
1460 }
1461 return 0;
1462 case 0x6e:
1463 case 0x6f:
1464 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1465 0,
1466 (c->d & ByteOp) ? 1 : c->op_bytes,
1467 c->rep_prefix ?
1468 address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1469 (ctxt->eflags & EFLG_DF),
1470 register_address(c,
1471 seg_override_base(ctxt, c),
1472 c->regs[VCPU_REGS_RSI]),
1473 c->rep_prefix,
1474 c->regs[VCPU_REGS_RDX]) == 0) {
1475 c->eip = saved_eip;
1476 return -1;
1477 }
1478 return 0;
1479 case 0x70 ... 0x7f: {
1480 int rel = insn_fetch(s8, 1, c->eip);
1481
1482 if (test_cc(c->b, ctxt->eflags))
1483 jmp_rel(c, rel);
1484 break;
1485 }
1486 case 0x80 ... 0x83:
1487 switch (c->modrm_reg) {
1488 case 0:
1489 goto add;
1490 case 1:
1491 goto or;
1492 case 2:
1493 goto adc;
1494 case 3:
1495 goto sbb;
1496 case 4:
1497 goto and;
1498 case 5:
1499 goto sub;
1500 case 6:
1501 goto xor;
1502 case 7:
1503 goto cmp;
1504 }
1505 break;
1506 case 0x84 ... 0x85:
1507 emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1508 break;
1509 case 0x86 ... 0x87:
1510 xchg:
1511
1512 switch (c->dst.bytes) {
1513 case 1:
1514 *(u8 *) c->src.ptr = (u8) c->dst.val;
1515 break;
1516 case 2:
1517 *(u16 *) c->src.ptr = (u16) c->dst.val;
1518 break;
1519 case 4:
1520 *c->src.ptr = (u32) c->dst.val;
1521 break;
1522 case 8:
1523 *c->src.ptr = c->dst.val;
1524 break;
1525 }
1526
1527
1528
1529
1530 c->dst.val = c->src.val;
1531 c->lock_prefix = 1;
1532 break;
1533 case 0x88 ... 0x8b:
1534 goto mov;
1535 case 0x8c: {
1536 struct kvm_segment segreg;
1537
1538 if (c->modrm_reg <= 5)
1539 kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1540 else {
1541 printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1542 c->modrm);
1543 goto cannot_emulate;
1544 }
1545 c->dst.val = segreg.selector;
1546 break;
1547 }
1548 case 0x8d:
1549 c->dst.val = c->modrm_ea;
1550 break;
1551 case 0x8e: {
1552 uint16_t sel;
1553 int type_bits;
1554 int err;
1555
1556 sel = c->src.val;
1557 if (c->modrm_reg <= 5) {
1558 type_bits = (c->modrm_reg == 1) ? 9 : 1;
1559 err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1560 type_bits, c->modrm_reg);
1561 } else {
1562 printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1563 c->modrm);
1564 goto cannot_emulate;
1565 }
1566
1567 if (err < 0)
1568 goto cannot_emulate;
1569
1570 c->dst.type = OP_NONE;
1571 break;
1572 }
1573 case 0x8f:
1574 rc = emulate_grp1a(ctxt, ops);
1575 if (rc != 0)
1576 goto done;
1577 break;
1578 case 0x90:
1579 if (!(c->rex_prefix & 1)) {
1580 c->dst.type = OP_NONE;
1581 break;
1582 }
1583 case 0x91 ... 0x97:
1584 c->src.type = c->dst.type = OP_REG;
1585 c->src.bytes = c->dst.bytes = c->op_bytes;
1586 c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1587 c->src.val = *(c->src.ptr);
1588 goto xchg;
1589 case 0x9c:
1590 c->src.val = (unsigned long) ctxt->eflags;
1591 emulate_push(ctxt);
1592 break;
1593 case 0x9d:
1594 c->dst.ptr = (unsigned long *) &ctxt->eflags;
1595 goto pop_instruction;
1596 case 0xa0 ... 0xa1:
1597 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1598 c->dst.val = c->src.val;
1599 break;
1600 case 0xa2 ... 0xa3:
1601 c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1602 break;
1603 case 0xa4 ... 0xa5:
1604 c->dst.type = OP_MEM;
1605 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1606 c->dst.ptr = (unsigned long *)register_address(c,
1607 es_base(ctxt),
1608 c->regs[VCPU_REGS_RDI]);
1609 if ((rc = ops->read_emulated(register_address(c,
1610 seg_override_base(ctxt, c),
1611 c->regs[VCPU_REGS_RSI]),
1612 &c->dst.val,
1613 c->dst.bytes, ctxt->vcpu)) != 0)
1614 goto done;
1615 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1616 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1617 : c->dst.bytes);
1618 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1619 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1620 : c->dst.bytes);
1621 break;
1622 case 0xa6 ... 0xa7:
1623 c->src.type = OP_NONE;
1624 c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1625 c->src.ptr = (unsigned long *)register_address(c,
1626 seg_override_base(ctxt, c),
1627 c->regs[VCPU_REGS_RSI]);
1628 if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1629 &c->src.val,
1630 c->src.bytes,
1631 ctxt->vcpu)) != 0)
1632 goto done;
1633
1634 c->dst.type = OP_NONE;
1635 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1636 c->dst.ptr = (unsigned long *)register_address(c,
1637 es_base(ctxt),
1638 c->regs[VCPU_REGS_RDI]);
1639 if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1640 &c->dst.val,
1641 c->dst.bytes,
1642 ctxt->vcpu)) != 0)
1643 goto done;
1644
1645 DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1646
1647 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1648
1649 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1650 (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1651 : c->src.bytes);
1652 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1653 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1654 : c->dst.bytes);
1655
1656 break;
1657 case 0xaa ... 0xab:
1658 c->dst.type = OP_MEM;
1659 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1660 c->dst.ptr = (unsigned long *)register_address(c,
1661 es_base(ctxt),
1662 c->regs[VCPU_REGS_RDI]);
1663 c->dst.val = c->regs[VCPU_REGS_RAX];
1664 register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1665 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1666 : c->dst.bytes);
1667 break;
1668 case 0xac ... 0xad:
1669 c->dst.type = OP_REG;
1670 c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1671 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1672 if ((rc = ops->read_emulated(register_address(c,
1673 seg_override_base(ctxt, c),
1674 c->regs[VCPU_REGS_RSI]),
1675 &c->dst.val,
1676 c->dst.bytes,
1677 ctxt->vcpu)) != 0)
1678 goto done;
1679 register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1680 (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1681 : c->dst.bytes);
1682 break;
1683 case 0xae ... 0xaf:
1684 DPRINTF("Urk! I don't handle SCAS.\n");
1685 goto cannot_emulate;
1686 case 0xb0 ... 0xbf:
1687 goto mov;
1688 case 0xc0 ... 0xc1:
1689 emulate_grp2(ctxt);
1690 break;
1691 case 0xc3:
1692 c->dst.ptr = &c->eip;
1693 goto pop_instruction;
1694 case 0xc6 ... 0xc7:
1695 mov:
1696 c->dst.val = c->src.val;
1697 break;
1698 case 0xd0 ... 0xd1:
1699 c->src.val = 1;
1700 emulate_grp2(ctxt);
1701 break;
1702 case 0xd2 ... 0xd3:
1703 c->src.val = c->regs[VCPU_REGS_RCX];
1704 emulate_grp2(ctxt);
1705 break;
1706 case 0xe4:
1707 case 0xe5:
1708 port = insn_fetch(u8, 1, c->eip);
1709 io_dir_in = 1;
1710 goto do_io;
1711 case 0xe6:
1712 case 0xe7:
1713 port = insn_fetch(u8, 1, c->eip);
1714 io_dir_in = 0;
1715 goto do_io;
1716 case 0xe8: {
1717 long int rel;
1718 switch (c->op_bytes) {
1719 case 2:
1720 rel = insn_fetch(s16, 2, c->eip);
1721 break;
1722 case 4:
1723 rel = insn_fetch(s32, 4, c->eip);
1724 break;
1725 default:
1726 DPRINTF("Call: Invalid op_bytes\n");
1727 goto cannot_emulate;
1728 }
1729 c->src.val = (unsigned long) c->eip;
1730 jmp_rel(c, rel);
1731 c->op_bytes = c->ad_bytes;
1732 emulate_push(ctxt);
1733 break;
1734 }
1735 case 0xe9:
1736 goto jmp;
1737 case 0xea: {
1738 uint32_t eip;
1739 uint16_t sel;
1740
1741 switch (c->op_bytes) {
1742 case 2:
1743 eip = insn_fetch(u16, 2, c->eip);
1744 break;
1745 case 4:
1746 eip = insn_fetch(u32, 4, c->eip);
1747 break;
1748 default:
1749 DPRINTF("jmp far: Invalid op_bytes\n");
1750 goto cannot_emulate;
1751 }
1752 sel = insn_fetch(u16, 2, c->eip);
1753 if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1754 DPRINTF("jmp far: Failed to load CS descriptor\n");
1755 goto cannot_emulate;
1756 }
1757
1758 c->eip = eip;
1759 break;
1760 }
1761 case 0xeb:
1762 jmp:
1763 jmp_rel(c, c->src.val);
1764 c->dst.type = OP_NONE;
1765 break;
1766 case 0xec:
1767 case 0xed:
1768 port = c->regs[VCPU_REGS_RDX];
1769 io_dir_in = 1;
1770 goto do_io;
1771 case 0xee:
1772 case 0xef:
1773 port = c->regs[VCPU_REGS_RDX];
1774 io_dir_in = 0;
1775 do_io: if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1776 (c->d & ByteOp) ? 1 : c->op_bytes,
1777 port) != 0) {
1778 c->eip = saved_eip;
1779 goto cannot_emulate;
1780 }
1781 return 0;
1782 case 0xf4:
1783 ctxt->vcpu->arch.halt_request = 1;
1784 break;
1785 case 0xf5:
1786
1787 ctxt->eflags ^= EFLG_CF;
1788 c->dst.type = OP_NONE;
1789 break;
1790 case 0xf6 ... 0xf7:
1791 rc = emulate_grp3(ctxt, ops);
1792 if (rc != 0)
1793 goto done;
1794 break;
1795 case 0xf8:
1796 ctxt->eflags &= ~EFLG_CF;
1797 c->dst.type = OP_NONE;
1798 break;
1799 case 0xfa:
1800 ctxt->eflags &= ~X86_EFLAGS_IF;
1801 c->dst.type = OP_NONE;
1802 break;
1803 case 0xfb:
1804 ctxt->eflags |= X86_EFLAGS_IF;
1805 c->dst.type = OP_NONE;
1806 break;
1807 case 0xfc:
1808 ctxt->eflags &= ~EFLG_DF;
1809 c->dst.type = OP_NONE;
1810 break;
1811 case 0xfd:
1812 ctxt->eflags |= EFLG_DF;
1813 c->dst.type = OP_NONE;
1814 break;
1815 case 0xfe ... 0xff:
1816 rc = emulate_grp45(ctxt, ops);
1817 if (rc != 0)
1818 goto done;
1819 break;
1820 }
1821
1822writeback:
1823 rc = writeback(ctxt, ops);
1824 if (rc != 0)
1825 goto done;
1826
1827
1828 memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1829 kvm_rip_write(ctxt->vcpu, c->eip);
1830
1831done:
1832 if (rc == X86EMUL_UNHANDLEABLE) {
1833 c->eip = saved_eip;
1834 return -1;
1835 }
1836 return 0;
1837
1838twobyte_insn:
1839 switch (c->b) {
1840 case 0x01:
1841 switch (c->modrm_reg) {
1842 u16 size;
1843 unsigned long address;
1844
1845 case 0:
1846 if (c->modrm_mod != 3 || c->modrm_rm != 1)
1847 goto cannot_emulate;
1848
1849 rc = kvm_fix_hypercall(ctxt->vcpu);
1850 if (rc)
1851 goto done;
1852
1853
1854 c->eip = kvm_rip_read(ctxt->vcpu);
1855
1856 c->dst.type = OP_NONE;
1857 break;
1858 case 2:
1859 rc = read_descriptor(ctxt, ops, c->src.ptr,
1860 &size, &address, c->op_bytes);
1861 if (rc)
1862 goto done;
1863 realmode_lgdt(ctxt->vcpu, size, address);
1864
1865 c->dst.type = OP_NONE;
1866 break;
1867 case 3:
1868 if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1869 rc = kvm_fix_hypercall(ctxt->vcpu);
1870 if (rc)
1871 goto done;
1872 kvm_emulate_hypercall(ctxt->vcpu);
1873 } else {
1874 rc = read_descriptor(ctxt, ops, c->src.ptr,
1875 &size, &address,
1876 c->op_bytes);
1877 if (rc)
1878 goto done;
1879 realmode_lidt(ctxt->vcpu, size, address);
1880 }
1881
1882 c->dst.type = OP_NONE;
1883 break;
1884 case 4:
1885 c->dst.bytes = 2;
1886 c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1887 break;
1888 case 6:
1889 realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1890 &ctxt->eflags);
1891 c->dst.type = OP_NONE;
1892 break;
1893 case 7:
1894 emulate_invlpg(ctxt->vcpu, memop);
1895
1896 c->dst.type = OP_NONE;
1897 break;
1898 default:
1899 goto cannot_emulate;
1900 }
1901 break;
1902 case 0x06:
1903 emulate_clts(ctxt->vcpu);
1904 c->dst.type = OP_NONE;
1905 break;
1906 case 0x08:
1907 case 0x09:
1908 case 0x0d:
1909 case 0x18:
1910 c->dst.type = OP_NONE;
1911 break;
1912 case 0x20:
1913 if (c->modrm_mod != 3)
1914 goto cannot_emulate;
1915 c->regs[c->modrm_rm] =
1916 realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1917 c->dst.type = OP_NONE;
1918 break;
1919 case 0x21:
1920 if (c->modrm_mod != 3)
1921 goto cannot_emulate;
1922 rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1923 if (rc)
1924 goto cannot_emulate;
1925 c->dst.type = OP_NONE;
1926 break;
1927 case 0x22:
1928 if (c->modrm_mod != 3)
1929 goto cannot_emulate;
1930 realmode_set_cr(ctxt->vcpu,
1931 c->modrm_reg, c->modrm_val, &ctxt->eflags);
1932 c->dst.type = OP_NONE;
1933 break;
1934 case 0x23:
1935 if (c->modrm_mod != 3)
1936 goto cannot_emulate;
1937 rc = emulator_set_dr(ctxt, c->modrm_reg,
1938 c->regs[c->modrm_rm]);
1939 if (rc)
1940 goto cannot_emulate;
1941 c->dst.type = OP_NONE;
1942 break;
1943 case 0x30:
1944
1945 msr_data = (u32)c->regs[VCPU_REGS_RAX]
1946 | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1947 rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1948 if (rc) {
1949 kvm_inject_gp(ctxt->vcpu, 0);
1950 c->eip = kvm_rip_read(ctxt->vcpu);
1951 }
1952 rc = X86EMUL_CONTINUE;
1953 c->dst.type = OP_NONE;
1954 break;
1955 case 0x32:
1956
1957 rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1958 if (rc) {
1959 kvm_inject_gp(ctxt->vcpu, 0);
1960 c->eip = kvm_rip_read(ctxt->vcpu);
1961 } else {
1962 c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1963 c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1964 }
1965 rc = X86EMUL_CONTINUE;
1966 c->dst.type = OP_NONE;
1967 break;
1968 case 0x40 ... 0x4f:
1969 c->dst.val = c->dst.orig_val = c->src.val;
1970 if (!test_cc(c->b, ctxt->eflags))
1971 c->dst.type = OP_NONE;
1972 break;
1973 case 0x80 ... 0x8f: {
1974 long int rel;
1975
1976 switch (c->op_bytes) {
1977 case 2:
1978 rel = insn_fetch(s16, 2, c->eip);
1979 break;
1980 case 4:
1981 rel = insn_fetch(s32, 4, c->eip);
1982 break;
1983 case 8:
1984 rel = insn_fetch(s64, 8, c->eip);
1985 break;
1986 default:
1987 DPRINTF("jnz: Invalid op_bytes\n");
1988 goto cannot_emulate;
1989 }
1990 if (test_cc(c->b, ctxt->eflags))
1991 jmp_rel(c, rel);
1992 c->dst.type = OP_NONE;
1993 break;
1994 }
1995 case 0xa3:
1996 bt:
1997 c->dst.type = OP_NONE;
1998
1999 c->src.val &= (c->dst.bytes << 3) - 1;
2000 emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2001 break;
2002 case 0xab:
2003 bts:
2004
2005 c->src.val &= (c->dst.bytes << 3) - 1;
2006 emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2007 break;
2008 case 0xae:
2009 break;
2010 case 0xb0 ... 0xb1:
2011
2012
2013
2014
2015 c->src.orig_val = c->src.val;
2016 c->src.val = c->regs[VCPU_REGS_RAX];
2017 emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2018 if (ctxt->eflags & EFLG_ZF) {
2019
2020 c->dst.val = c->src.orig_val;
2021 } else {
2022
2023 c->dst.type = OP_REG;
2024 c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2025 }
2026 break;
2027 case 0xb3:
2028 btr:
2029
2030 c->src.val &= (c->dst.bytes << 3) - 1;
2031 emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2032 break;
2033 case 0xb6 ... 0xb7:
2034 c->dst.bytes = c->op_bytes;
2035 c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2036 : (u16) c->src.val;
2037 break;
2038 case 0xba:
2039 switch (c->modrm_reg & 3) {
2040 case 0:
2041 goto bt;
2042 case 1:
2043 goto bts;
2044 case 2:
2045 goto btr;
2046 case 3:
2047 goto btc;
2048 }
2049 break;
2050 case 0xbb:
2051 btc:
2052
2053 c->src.val &= (c->dst.bytes << 3) - 1;
2054 emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2055 break;
2056 case 0xbe ... 0xbf:
2057 c->dst.bytes = c->op_bytes;
2058 c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2059 (s16) c->src.val;
2060 break;
2061 case 0xc3:
2062 c->dst.bytes = c->op_bytes;
2063 c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2064 (u64) c->src.val;
2065 break;
2066 case 0xc7:
2067 rc = emulate_grp9(ctxt, ops, memop);
2068 if (rc != 0)
2069 goto done;
2070 c->dst.type = OP_NONE;
2071 break;
2072 }
2073 goto writeback;
2074
2075cannot_emulate:
2076 DPRINTF("Cannot emulate %02x\n", c->b);
2077 c->eip = saved_eip;
2078 return -1;
2079}
2080