linux/arch/x86/kvm/x86_emulate.c
<<
>>
Prefs
   1/******************************************************************************
   2 * x86_emulate.c
   3 *
   4 * Generic x86 (32-bit and 64-bit) instruction decoder and emulator.
   5 *
   6 * Copyright (c) 2005 Keir Fraser
   7 *
   8 * Linux coding style, mod r/m decoder, segment base fixes, real-mode
   9 * privileged instructions:
  10 *
  11 * Copyright (C) 2006 Qumranet
  12 *
  13 *   Avi Kivity <avi@qumranet.com>
  14 *   Yaniv Kamay <yaniv@qumranet.com>
  15 *
  16 * This work is licensed under the terms of the GNU GPL, version 2.  See
  17 * the COPYING file in the top-level directory.
  18 *
  19 * From: xen-unstable 10676:af9809f51f81a3c43f276f00c81a52ef558afda4
  20 */
  21
  22#ifndef __KERNEL__
  23#include <stdio.h>
  24#include <stdint.h>
  25#include <public/xen.h>
  26#define DPRINTF(_f, _a ...) printf(_f , ## _a)
  27#else
  28#include <linux/kvm_host.h>
  29#include "kvm_cache_regs.h"
  30#define DPRINTF(x...) do {} while (0)
  31#endif
  32#include <linux/module.h>
  33#include <asm/kvm_x86_emulate.h>
  34
  35/*
  36 * Opcode effective-address decode tables.
  37 * Note that we only emulate instructions that have at least one memory
  38 * operand (excluding implicit stack references). We assume that stack
  39 * references and instruction fetches will never occur in special memory
  40 * areas that require emulation. So, for example, 'mov <imm>,<reg>' need
  41 * not be handled.
  42 */
  43
  44/* Operand sizes: 8-bit operands or specified/overridden size. */
  45#define ByteOp      (1<<0)      /* 8-bit operands. */
  46/* Destination operand type. */
  47#define ImplicitOps (1<<1)      /* Implicit in opcode. No generic decode. */
  48#define DstReg      (2<<1)      /* Register operand. */
  49#define DstMem      (3<<1)      /* Memory operand. */
  50#define DstAcc      (4<<1)      /* Destination Accumulator */
  51#define DstMask     (7<<1)
  52/* Source operand type. */
  53#define SrcNone     (0<<4)      /* No source operand. */
  54#define SrcImplicit (0<<4)      /* Source operand is implicit in the opcode. */
  55#define SrcReg      (1<<4)      /* Register operand. */
  56#define SrcMem      (2<<4)      /* Memory operand. */
  57#define SrcMem16    (3<<4)      /* Memory operand (16-bit). */
  58#define SrcMem32    (4<<4)      /* Memory operand (32-bit). */
  59#define SrcImm      (5<<4)      /* Immediate operand. */
  60#define SrcImmByte  (6<<4)      /* 8-bit sign-extended immediate operand. */
  61#define SrcMask     (7<<4)
  62/* Generic ModRM decode. */
  63#define ModRM       (1<<7)
  64/* Destination is only written; never read. */
  65#define Mov         (1<<8)
  66#define BitOp       (1<<9)
  67#define MemAbs      (1<<10)      /* Memory operand is absolute displacement */
  68#define String      (1<<12)     /* String instruction (rep capable) */
  69#define Stack       (1<<13)     /* Stack instruction (push/pop) */
  70#define Group       (1<<14)     /* Bits 3:5 of modrm byte extend opcode */
  71#define GroupDual   (1<<15)     /* Alternate decoding of mod == 3 */
  72#define GroupMask   0xff        /* Group number stored in bits 0:7 */
  73
  74enum {
  75        Group1_80, Group1_81, Group1_82, Group1_83,
  76        Group1A, Group3_Byte, Group3, Group4, Group5, Group7,
  77};
  78
  79static u16 opcode_table[256] = {
  80        /* 0x00 - 0x07 */
  81        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  82        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  83        0, 0, 0, 0,
  84        /* 0x08 - 0x0F */
  85        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  86        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  87        0, 0, 0, 0,
  88        /* 0x10 - 0x17 */
  89        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  90        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  91        0, 0, 0, 0,
  92        /* 0x18 - 0x1F */
  93        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  94        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  95        0, 0, 0, 0,
  96        /* 0x20 - 0x27 */
  97        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
  98        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
  99        DstAcc | SrcImmByte, DstAcc | SrcImm, 0, 0,
 100        /* 0x28 - 0x2F */
 101        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
 102        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
 103        0, 0, 0, 0,
 104        /* 0x30 - 0x37 */
 105        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
 106        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
 107        0, 0, 0, 0,
 108        /* 0x38 - 0x3F */
 109        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
 110        ByteOp | DstReg | SrcMem | ModRM, DstReg | SrcMem | ModRM,
 111        ByteOp | DstAcc | SrcImm, DstAcc | SrcImm,
 112        0, 0,
 113        /* 0x40 - 0x47 */
 114        DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
 115        /* 0x48 - 0x4F */
 116        DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
 117        /* 0x50 - 0x57 */
 118        SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
 119        SrcReg | Stack, SrcReg | Stack, SrcReg | Stack, SrcReg | Stack,
 120        /* 0x58 - 0x5F */
 121        DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
 122        DstReg | Stack, DstReg | Stack, DstReg | Stack, DstReg | Stack,
 123        /* 0x60 - 0x67 */
 124        0, 0, 0, DstReg | SrcMem32 | ModRM | Mov /* movsxd (x86/64) */ ,
 125        0, 0, 0, 0,
 126        /* 0x68 - 0x6F */
 127        SrcImm | Mov | Stack, 0, SrcImmByte | Mov | Stack, 0,
 128        SrcNone  | ByteOp  | ImplicitOps, SrcNone  | ImplicitOps, /* insb, insw/insd */
 129        SrcNone  | ByteOp  | ImplicitOps, SrcNone  | ImplicitOps, /* outsb, outsw/outsd */
 130        /* 0x70 - 0x77 */
 131        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 132        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 133        /* 0x78 - 0x7F */
 134        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 135        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 136        /* 0x80 - 0x87 */
 137        Group | Group1_80, Group | Group1_81,
 138        Group | Group1_82, Group | Group1_83,
 139        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
 140        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM,
 141        /* 0x88 - 0x8F */
 142        ByteOp | DstMem | SrcReg | ModRM | Mov, DstMem | SrcReg | ModRM | Mov,
 143        ByteOp | DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 144        DstMem | SrcReg | ModRM | Mov, ModRM | DstReg,
 145        DstReg | SrcMem | ModRM | Mov, Group | Group1A,
 146        /* 0x90 - 0x97 */
 147        DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg, DstReg,
 148        /* 0x98 - 0x9F */
 149        0, 0, 0, 0, ImplicitOps | Stack, ImplicitOps | Stack, 0, 0,
 150        /* 0xA0 - 0xA7 */
 151        ByteOp | DstReg | SrcMem | Mov | MemAbs, DstReg | SrcMem | Mov | MemAbs,
 152        ByteOp | DstMem | SrcReg | Mov | MemAbs, DstMem | SrcReg | Mov | MemAbs,
 153        ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
 154        ByteOp | ImplicitOps | String, ImplicitOps | String,
 155        /* 0xA8 - 0xAF */
 156        0, 0, ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
 157        ByteOp | ImplicitOps | Mov | String, ImplicitOps | Mov | String,
 158        ByteOp | ImplicitOps | String, ImplicitOps | String,
 159        /* 0xB0 - 0xB7 */
 160        ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
 161        ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
 162        ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
 163        ByteOp | DstReg | SrcImm | Mov, ByteOp | DstReg | SrcImm | Mov,
 164        /* 0xB8 - 0xBF */
 165        DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
 166        DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
 167        DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
 168        DstReg | SrcImm | Mov, DstReg | SrcImm | Mov,
 169        /* 0xC0 - 0xC7 */
 170        ByteOp | DstMem | SrcImm | ModRM, DstMem | SrcImmByte | ModRM,
 171        0, ImplicitOps | Stack, 0, 0,
 172        ByteOp | DstMem | SrcImm | ModRM | Mov, DstMem | SrcImm | ModRM | Mov,
 173        /* 0xC8 - 0xCF */
 174        0, 0, 0, 0, 0, 0, 0, 0,
 175        /* 0xD0 - 0xD7 */
 176        ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
 177        ByteOp | DstMem | SrcImplicit | ModRM, DstMem | SrcImplicit | ModRM,
 178        0, 0, 0, 0,
 179        /* 0xD8 - 0xDF */
 180        0, 0, 0, 0, 0, 0, 0, 0,
 181        /* 0xE0 - 0xE7 */
 182        0, 0, 0, 0,
 183        SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
 184        SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
 185        /* 0xE8 - 0xEF */
 186        ImplicitOps | Stack, SrcImm | ImplicitOps,
 187        ImplicitOps, SrcImmByte | ImplicitOps,
 188        SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
 189        SrcNone | ByteOp | ImplicitOps, SrcNone | ImplicitOps,
 190        /* 0xF0 - 0xF7 */
 191        0, 0, 0, 0,
 192        ImplicitOps, ImplicitOps, Group | Group3_Byte, Group | Group3,
 193        /* 0xF8 - 0xFF */
 194        ImplicitOps, 0, ImplicitOps, ImplicitOps,
 195        ImplicitOps, ImplicitOps, Group | Group4, Group | Group5,
 196};
 197
 198static u16 twobyte_table[256] = {
 199        /* 0x00 - 0x0F */
 200        0, Group | GroupDual | Group7, 0, 0, 0, 0, ImplicitOps, 0,
 201        ImplicitOps, ImplicitOps, 0, 0, 0, ImplicitOps | ModRM, 0, 0,
 202        /* 0x10 - 0x1F */
 203        0, 0, 0, 0, 0, 0, 0, 0, ImplicitOps | ModRM, 0, 0, 0, 0, 0, 0, 0,
 204        /* 0x20 - 0x2F */
 205        ModRM | ImplicitOps, ModRM, ModRM | ImplicitOps, ModRM, 0, 0, 0, 0,
 206        0, 0, 0, 0, 0, 0, 0, 0,
 207        /* 0x30 - 0x3F */
 208        ImplicitOps, 0, ImplicitOps, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 209        /* 0x40 - 0x47 */
 210        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 211        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 212        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 213        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 214        /* 0x48 - 0x4F */
 215        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 216        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 217        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 218        DstReg | SrcMem | ModRM | Mov, DstReg | SrcMem | ModRM | Mov,
 219        /* 0x50 - 0x5F */
 220        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 221        /* 0x60 - 0x6F */
 222        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 223        /* 0x70 - 0x7F */
 224        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 225        /* 0x80 - 0x8F */
 226        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 227        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 228        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 229        ImplicitOps, ImplicitOps, ImplicitOps, ImplicitOps,
 230        /* 0x90 - 0x9F */
 231        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 232        /* 0xA0 - 0xA7 */
 233        0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, 0, 0,
 234        /* 0xA8 - 0xAF */
 235        0, 0, 0, DstMem | SrcReg | ModRM | BitOp, 0, 0, ModRM, 0,
 236        /* 0xB0 - 0xB7 */
 237        ByteOp | DstMem | SrcReg | ModRM, DstMem | SrcReg | ModRM, 0,
 238            DstMem | SrcReg | ModRM | BitOp,
 239        0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
 240            DstReg | SrcMem16 | ModRM | Mov,
 241        /* 0xB8 - 0xBF */
 242        0, 0, DstMem | SrcImmByte | ModRM, DstMem | SrcReg | ModRM | BitOp,
 243        0, 0, ByteOp | DstReg | SrcMem | ModRM | Mov,
 244            DstReg | SrcMem16 | ModRM | Mov,
 245        /* 0xC0 - 0xCF */
 246        0, 0, 0, DstMem | SrcReg | ModRM | Mov, 0, 0, 0, ImplicitOps | ModRM,
 247        0, 0, 0, 0, 0, 0, 0, 0,
 248        /* 0xD0 - 0xDF */
 249        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 250        /* 0xE0 - 0xEF */
 251        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
 252        /* 0xF0 - 0xFF */
 253        0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
 254};
 255
 256static u16 group_table[] = {
 257        [Group1_80*8] =
 258        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 259        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 260        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 261        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 262        [Group1_81*8] =
 263        DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
 264        DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
 265        DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
 266        DstMem | SrcImm | ModRM, DstMem | SrcImm | ModRM,
 267        [Group1_82*8] =
 268        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 269        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 270        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 271        ByteOp | DstMem | SrcImm | ModRM, ByteOp | DstMem | SrcImm | ModRM,
 272        [Group1_83*8] =
 273        DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
 274        DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
 275        DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
 276        DstMem | SrcImmByte | ModRM, DstMem | SrcImmByte | ModRM,
 277        [Group1A*8] =
 278        DstMem | SrcNone | ModRM | Mov | Stack, 0, 0, 0, 0, 0, 0, 0,
 279        [Group3_Byte*8] =
 280        ByteOp | SrcImm | DstMem | ModRM, 0,
 281        ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
 282        0, 0, 0, 0,
 283        [Group3*8] =
 284        DstMem | SrcImm | ModRM, 0,
 285        DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
 286        0, 0, 0, 0,
 287        [Group4*8] =
 288        ByteOp | DstMem | SrcNone | ModRM, ByteOp | DstMem | SrcNone | ModRM,
 289        0, 0, 0, 0, 0, 0,
 290        [Group5*8] =
 291        DstMem | SrcNone | ModRM, DstMem | SrcNone | ModRM,
 292        SrcMem | ModRM | Stack, 0,
 293        SrcMem | ModRM | Stack, 0, SrcMem | ModRM | Stack, 0,
 294        [Group7*8] =
 295        0, 0, ModRM | SrcMem, ModRM | SrcMem,
 296        SrcNone | ModRM | DstMem | Mov, 0,
 297        SrcMem16 | ModRM | Mov, SrcMem | ModRM | ByteOp,
 298};
 299
 300static u16 group2_table[] = {
 301        [Group7*8] =
 302        SrcNone | ModRM, 0, 0, 0,
 303        SrcNone | ModRM | DstMem | Mov, 0,
 304        SrcMem16 | ModRM | Mov, 0,
 305};
 306
 307/* EFLAGS bit definitions. */
 308#define EFLG_OF (1<<11)
 309#define EFLG_DF (1<<10)
 310#define EFLG_SF (1<<7)
 311#define EFLG_ZF (1<<6)
 312#define EFLG_AF (1<<4)
 313#define EFLG_PF (1<<2)
 314#define EFLG_CF (1<<0)
 315
 316/*
 317 * Instruction emulation:
 318 * Most instructions are emulated directly via a fragment of inline assembly
 319 * code. This allows us to save/restore EFLAGS and thus very easily pick up
 320 * any modified flags.
 321 */
 322
 323#if defined(CONFIG_X86_64)
 324#define _LO32 "k"               /* force 32-bit operand */
 325#define _STK  "%%rsp"           /* stack pointer */
 326#elif defined(__i386__)
 327#define _LO32 ""                /* force 32-bit operand */
 328#define _STK  "%%esp"           /* stack pointer */
 329#endif
 330
 331/*
 332 * These EFLAGS bits are restored from saved value during emulation, and
 333 * any changes are written back to the saved value after emulation.
 334 */
 335#define EFLAGS_MASK (EFLG_OF|EFLG_SF|EFLG_ZF|EFLG_AF|EFLG_PF|EFLG_CF)
 336
 337/* Before executing instruction: restore necessary bits in EFLAGS. */
 338#define _PRE_EFLAGS(_sav, _msk, _tmp)                                   \
 339        /* EFLAGS = (_sav & _msk) | (EFLAGS & ~_msk); _sav &= ~_msk; */ \
 340        "movl %"_sav",%"_LO32 _tmp"; "                                  \
 341        "push %"_tmp"; "                                                \
 342        "push %"_tmp"; "                                                \
 343        "movl %"_msk",%"_LO32 _tmp"; "                                  \
 344        "andl %"_LO32 _tmp",("_STK"); "                                 \
 345        "pushf; "                                                       \
 346        "notl %"_LO32 _tmp"; "                                          \
 347        "andl %"_LO32 _tmp",("_STK"); "                                 \
 348        "andl %"_LO32 _tmp","__stringify(BITS_PER_LONG/4)"("_STK"); "   \
 349        "pop  %"_tmp"; "                                                \
 350        "orl  %"_LO32 _tmp",("_STK"); "                                 \
 351        "popf; "                                                        \
 352        "pop  %"_sav"; "
 353
 354/* After executing instruction: write-back necessary bits in EFLAGS. */
 355#define _POST_EFLAGS(_sav, _msk, _tmp) \
 356        /* _sav |= EFLAGS & _msk; */            \
 357        "pushf; "                               \
 358        "pop  %"_tmp"; "                        \
 359        "andl %"_msk",%"_LO32 _tmp"; "          \
 360        "orl  %"_LO32 _tmp",%"_sav"; "
 361
 362/* Raw emulation: instruction has two explicit operands. */
 363#define __emulate_2op_nobyte(_op,_src,_dst,_eflags,_wx,_wy,_lx,_ly,_qx,_qy) \
 364        do {                                                                \
 365                unsigned long _tmp;                                         \
 366                                                                            \
 367                switch ((_dst).bytes) {                                     \
 368                case 2:                                                     \
 369                        __asm__ __volatile__ (                              \
 370                                _PRE_EFLAGS("0", "4", "2")                  \
 371                                _op"w %"_wx"3,%1; "                         \
 372                                _POST_EFLAGS("0", "4", "2")                 \
 373                                : "=m" (_eflags), "=m" ((_dst).val),        \
 374                                  "=&r" (_tmp)                              \
 375                                : _wy ((_src).val), "i" (EFLAGS_MASK));     \
 376                        break;                                              \
 377                case 4:                                                     \
 378                        __asm__ __volatile__ (                              \
 379                                _PRE_EFLAGS("0", "4", "2")                  \
 380                                _op"l %"_lx"3,%1; "                         \
 381                                _POST_EFLAGS("0", "4", "2")                 \
 382                                : "=m" (_eflags), "=m" ((_dst).val),        \
 383                                  "=&r" (_tmp)                              \
 384                                : _ly ((_src).val), "i" (EFLAGS_MASK));     \
 385                        break;                                              \
 386                case 8:                                                     \
 387                        __emulate_2op_8byte(_op, _src, _dst,                \
 388                                            _eflags, _qx, _qy);             \
 389                        break;                                              \
 390                }                                                           \
 391        } while (0)
 392
 393#define __emulate_2op(_op,_src,_dst,_eflags,_bx,_by,_wx,_wy,_lx,_ly,_qx,_qy) \
 394        do {                                                                 \
 395                unsigned long __tmp;                                         \
 396                switch ((_dst).bytes) {                                      \
 397                case 1:                                                      \
 398                        __asm__ __volatile__ (                               \
 399                                _PRE_EFLAGS("0", "4", "2")                   \
 400                                _op"b %"_bx"3,%1; "                          \
 401                                _POST_EFLAGS("0", "4", "2")                  \
 402                                : "=m" (_eflags), "=m" ((_dst).val),         \
 403                                  "=&r" (__tmp)                              \
 404                                : _by ((_src).val), "i" (EFLAGS_MASK));      \
 405                        break;                                               \
 406                default:                                                     \
 407                        __emulate_2op_nobyte(_op, _src, _dst, _eflags,       \
 408                                             _wx, _wy, _lx, _ly, _qx, _qy);  \
 409                        break;                                               \
 410                }                                                            \
 411        } while (0)
 412
 413/* Source operand is byte-sized and may be restricted to just %cl. */
 414#define emulate_2op_SrcB(_op, _src, _dst, _eflags)                      \
 415        __emulate_2op(_op, _src, _dst, _eflags,                         \
 416                      "b", "c", "b", "c", "b", "c", "b", "c")
 417
 418/* Source operand is byte, word, long or quad sized. */
 419#define emulate_2op_SrcV(_op, _src, _dst, _eflags)                      \
 420        __emulate_2op(_op, _src, _dst, _eflags,                         \
 421                      "b", "q", "w", "r", _LO32, "r", "", "r")
 422
 423/* Source operand is word, long or quad sized. */
 424#define emulate_2op_SrcV_nobyte(_op, _src, _dst, _eflags)               \
 425        __emulate_2op_nobyte(_op, _src, _dst, _eflags,                  \
 426                             "w", "r", _LO32, "r", "", "r")
 427
 428/* Instruction has only one explicit operand (no source operand). */
 429#define emulate_1op(_op, _dst, _eflags)                                    \
 430        do {                                                            \
 431                unsigned long _tmp;                                     \
 432                                                                        \
 433                switch ((_dst).bytes) {                                 \
 434                case 1:                                                 \
 435                        __asm__ __volatile__ (                          \
 436                                _PRE_EFLAGS("0", "3", "2")              \
 437                                _op"b %1; "                             \
 438                                _POST_EFLAGS("0", "3", "2")             \
 439                                : "=m" (_eflags), "=m" ((_dst).val),    \
 440                                  "=&r" (_tmp)                          \
 441                                : "i" (EFLAGS_MASK));                   \
 442                        break;                                          \
 443                case 2:                                                 \
 444                        __asm__ __volatile__ (                          \
 445                                _PRE_EFLAGS("0", "3", "2")              \
 446                                _op"w %1; "                             \
 447                                _POST_EFLAGS("0", "3", "2")             \
 448                                : "=m" (_eflags), "=m" ((_dst).val),    \
 449                                  "=&r" (_tmp)                          \
 450                                : "i" (EFLAGS_MASK));                   \
 451                        break;                                          \
 452                case 4:                                                 \
 453                        __asm__ __volatile__ (                          \
 454                                _PRE_EFLAGS("0", "3", "2")              \
 455                                _op"l %1; "                             \
 456                                _POST_EFLAGS("0", "3", "2")             \
 457                                : "=m" (_eflags), "=m" ((_dst).val),    \
 458                                  "=&r" (_tmp)                          \
 459                                : "i" (EFLAGS_MASK));                   \
 460                        break;                                          \
 461                case 8:                                                 \
 462                        __emulate_1op_8byte(_op, _dst, _eflags);        \
 463                        break;                                          \
 464                }                                                       \
 465        } while (0)
 466
 467/* Emulate an instruction with quadword operands (x86/64 only). */
 468#if defined(CONFIG_X86_64)
 469#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)           \
 470        do {                                                              \
 471                __asm__ __volatile__ (                                    \
 472                        _PRE_EFLAGS("0", "4", "2")                        \
 473                        _op"q %"_qx"3,%1; "                               \
 474                        _POST_EFLAGS("0", "4", "2")                       \
 475                        : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
 476                        : _qy ((_src).val), "i" (EFLAGS_MASK));         \
 477        } while (0)
 478
 479#define __emulate_1op_8byte(_op, _dst, _eflags)                           \
 480        do {                                                              \
 481                __asm__ __volatile__ (                                    \
 482                        _PRE_EFLAGS("0", "3", "2")                        \
 483                        _op"q %1; "                                       \
 484                        _POST_EFLAGS("0", "3", "2")                       \
 485                        : "=m" (_eflags), "=m" ((_dst).val), "=&r" (_tmp) \
 486                        : "i" (EFLAGS_MASK));                             \
 487        } while (0)
 488
 489#elif defined(__i386__)
 490#define __emulate_2op_8byte(_op, _src, _dst, _eflags, _qx, _qy)
 491#define __emulate_1op_8byte(_op, _dst, _eflags)
 492#endif                          /* __i386__ */
 493
 494/* Fetch next part of the instruction being emulated. */
 495#define insn_fetch(_type, _size, _eip)                                  \
 496({      unsigned long _x;                                               \
 497        rc = do_insn_fetch(ctxt, ops, (_eip), &_x, (_size));            \
 498        if (rc != 0)                                                    \
 499                goto done;                                              \
 500        (_eip) += (_size);                                              \
 501        (_type)_x;                                                      \
 502})
 503
 504static inline unsigned long ad_mask(struct decode_cache *c)
 505{
 506        return (1UL << (c->ad_bytes << 3)) - 1;
 507}
 508
 509/* Access/update address held in a register, based on addressing mode. */
 510static inline unsigned long
 511address_mask(struct decode_cache *c, unsigned long reg)
 512{
 513        if (c->ad_bytes == sizeof(unsigned long))
 514                return reg;
 515        else
 516                return reg & ad_mask(c);
 517}
 518
 519static inline unsigned long
 520register_address(struct decode_cache *c, unsigned long base, unsigned long reg)
 521{
 522        return base + address_mask(c, reg);
 523}
 524
 525static inline void
 526register_address_increment(struct decode_cache *c, unsigned long *reg, int inc)
 527{
 528        if (c->ad_bytes == sizeof(unsigned long))
 529                *reg += inc;
 530        else
 531                *reg = (*reg & ~ad_mask(c)) | ((*reg + inc) & ad_mask(c));
 532}
 533
 534static inline void jmp_rel(struct decode_cache *c, int rel)
 535{
 536        register_address_increment(c, &c->eip, rel);
 537}
 538
 539static void set_seg_override(struct decode_cache *c, int seg)
 540{
 541        c->has_seg_override = true;
 542        c->seg_override = seg;
 543}
 544
 545static unsigned long seg_base(struct x86_emulate_ctxt *ctxt, int seg)
 546{
 547        if (ctxt->mode == X86EMUL_MODE_PROT64 && seg < VCPU_SREG_FS)
 548                return 0;
 549
 550        return kvm_x86_ops->get_segment_base(ctxt->vcpu, seg);
 551}
 552
 553static unsigned long seg_override_base(struct x86_emulate_ctxt *ctxt,
 554                                       struct decode_cache *c)
 555{
 556        if (!c->has_seg_override)
 557                return 0;
 558
 559        return seg_base(ctxt, c->seg_override);
 560}
 561
 562static unsigned long es_base(struct x86_emulate_ctxt *ctxt)
 563{
 564        return seg_base(ctxt, VCPU_SREG_ES);
 565}
 566
 567static unsigned long ss_base(struct x86_emulate_ctxt *ctxt)
 568{
 569        return seg_base(ctxt, VCPU_SREG_SS);
 570}
 571
 572static int do_fetch_insn_byte(struct x86_emulate_ctxt *ctxt,
 573                              struct x86_emulate_ops *ops,
 574                              unsigned long linear, u8 *dest)
 575{
 576        struct fetch_cache *fc = &ctxt->decode.fetch;
 577        int rc;
 578        int size;
 579
 580        if (linear < fc->start || linear >= fc->end) {
 581                size = min(15UL, PAGE_SIZE - offset_in_page(linear));
 582                rc = ops->read_std(linear, fc->data, size, ctxt->vcpu);
 583                if (rc)
 584                        return rc;
 585                fc->start = linear;
 586                fc->end = linear + size;
 587        }
 588        *dest = fc->data[linear - fc->start];
 589        return 0;
 590}
 591
 592static int do_insn_fetch(struct x86_emulate_ctxt *ctxt,
 593                         struct x86_emulate_ops *ops,
 594                         unsigned long eip, void *dest, unsigned size)
 595{
 596        int rc = 0;
 597
 598        eip += ctxt->cs_base;
 599        while (size--) {
 600                rc = do_fetch_insn_byte(ctxt, ops, eip++, dest++);
 601                if (rc)
 602                        return rc;
 603        }
 604        return 0;
 605}
 606
 607/*
 608 * Given the 'reg' portion of a ModRM byte, and a register block, return a
 609 * pointer into the block that addresses the relevant register.
 610 * @highbyte_regs specifies whether to decode AH,CH,DH,BH.
 611 */
 612static void *decode_register(u8 modrm_reg, unsigned long *regs,
 613                             int highbyte_regs)
 614{
 615        void *p;
 616
 617        p = &regs[modrm_reg];
 618        if (highbyte_regs && modrm_reg >= 4 && modrm_reg < 8)
 619                p = (unsigned char *)&regs[modrm_reg & 3] + 1;
 620        return p;
 621}
 622
 623static int read_descriptor(struct x86_emulate_ctxt *ctxt,
 624                           struct x86_emulate_ops *ops,
 625                           void *ptr,
 626                           u16 *size, unsigned long *address, int op_bytes)
 627{
 628        int rc;
 629
 630        if (op_bytes == 2)
 631                op_bytes = 3;
 632        *address = 0;
 633        rc = ops->read_std((unsigned long)ptr, (unsigned long *)size, 2,
 634                           ctxt->vcpu);
 635        if (rc)
 636                return rc;
 637        rc = ops->read_std((unsigned long)ptr + 2, address, op_bytes,
 638                           ctxt->vcpu);
 639        return rc;
 640}
 641
 642static int test_cc(unsigned int condition, unsigned int flags)
 643{
 644        int rc = 0;
 645
 646        switch ((condition & 15) >> 1) {
 647        case 0: /* o */
 648                rc |= (flags & EFLG_OF);
 649                break;
 650        case 1: /* b/c/nae */
 651                rc |= (flags & EFLG_CF);
 652                break;
 653        case 2: /* z/e */
 654                rc |= (flags & EFLG_ZF);
 655                break;
 656        case 3: /* be/na */
 657                rc |= (flags & (EFLG_CF|EFLG_ZF));
 658                break;
 659        case 4: /* s */
 660                rc |= (flags & EFLG_SF);
 661                break;
 662        case 5: /* p/pe */
 663                rc |= (flags & EFLG_PF);
 664                break;
 665        case 7: /* le/ng */
 666                rc |= (flags & EFLG_ZF);
 667                /* fall through */
 668        case 6: /* l/nge */
 669                rc |= (!(flags & EFLG_SF) != !(flags & EFLG_OF));
 670                break;
 671        }
 672
 673        /* Odd condition identifiers (lsb == 1) have inverted sense. */
 674        return (!!rc ^ (condition & 1));
 675}
 676
 677static void decode_register_operand(struct operand *op,
 678                                    struct decode_cache *c,
 679                                    int inhibit_bytereg)
 680{
 681        unsigned reg = c->modrm_reg;
 682        int highbyte_regs = c->rex_prefix == 0;
 683
 684        if (!(c->d & ModRM))
 685                reg = (c->b & 7) | ((c->rex_prefix & 1) << 3);
 686        op->type = OP_REG;
 687        if ((c->d & ByteOp) && !inhibit_bytereg) {
 688                op->ptr = decode_register(reg, c->regs, highbyte_regs);
 689                op->val = *(u8 *)op->ptr;
 690                op->bytes = 1;
 691        } else {
 692                op->ptr = decode_register(reg, c->regs, 0);
 693                op->bytes = c->op_bytes;
 694                switch (op->bytes) {
 695                case 2:
 696                        op->val = *(u16 *)op->ptr;
 697                        break;
 698                case 4:
 699                        op->val = *(u32 *)op->ptr;
 700                        break;
 701                case 8:
 702                        op->val = *(u64 *) op->ptr;
 703                        break;
 704                }
 705        }
 706        op->orig_val = op->val;
 707}
 708
 709static int decode_modrm(struct x86_emulate_ctxt *ctxt,
 710                        struct x86_emulate_ops *ops)
 711{
 712        struct decode_cache *c = &ctxt->decode;
 713        u8 sib;
 714        int index_reg = 0, base_reg = 0, scale;
 715        int rc = 0;
 716
 717        if (c->rex_prefix) {
 718                c->modrm_reg = (c->rex_prefix & 4) << 1;        /* REX.R */
 719                index_reg = (c->rex_prefix & 2) << 2; /* REX.X */
 720                c->modrm_rm = base_reg = (c->rex_prefix & 1) << 3; /* REG.B */
 721        }
 722
 723        c->modrm = insn_fetch(u8, 1, c->eip);
 724        c->modrm_mod |= (c->modrm & 0xc0) >> 6;
 725        c->modrm_reg |= (c->modrm & 0x38) >> 3;
 726        c->modrm_rm |= (c->modrm & 0x07);
 727        c->modrm_ea = 0;
 728        c->use_modrm_ea = 1;
 729
 730        if (c->modrm_mod == 3) {
 731                c->modrm_ptr = decode_register(c->modrm_rm,
 732                                               c->regs, c->d & ByteOp);
 733                c->modrm_val = *(unsigned long *)c->modrm_ptr;
 734                return rc;
 735        }
 736
 737        if (c->ad_bytes == 2) {
 738                unsigned bx = c->regs[VCPU_REGS_RBX];
 739                unsigned bp = c->regs[VCPU_REGS_RBP];
 740                unsigned si = c->regs[VCPU_REGS_RSI];
 741                unsigned di = c->regs[VCPU_REGS_RDI];
 742
 743                /* 16-bit ModR/M decode. */
 744                switch (c->modrm_mod) {
 745                case 0:
 746                        if (c->modrm_rm == 6)
 747                                c->modrm_ea += insn_fetch(u16, 2, c->eip);
 748                        break;
 749                case 1:
 750                        c->modrm_ea += insn_fetch(s8, 1, c->eip);
 751                        break;
 752                case 2:
 753                        c->modrm_ea += insn_fetch(u16, 2, c->eip);
 754                        break;
 755                }
 756                switch (c->modrm_rm) {
 757                case 0:
 758                        c->modrm_ea += bx + si;
 759                        break;
 760                case 1:
 761                        c->modrm_ea += bx + di;
 762                        break;
 763                case 2:
 764                        c->modrm_ea += bp + si;
 765                        break;
 766                case 3:
 767                        c->modrm_ea += bp + di;
 768                        break;
 769                case 4:
 770                        c->modrm_ea += si;
 771                        break;
 772                case 5:
 773                        c->modrm_ea += di;
 774                        break;
 775                case 6:
 776                        if (c->modrm_mod != 0)
 777                                c->modrm_ea += bp;
 778                        break;
 779                case 7:
 780                        c->modrm_ea += bx;
 781                        break;
 782                }
 783                if (c->modrm_rm == 2 || c->modrm_rm == 3 ||
 784                    (c->modrm_rm == 6 && c->modrm_mod != 0))
 785                        if (!c->has_seg_override)
 786                                set_seg_override(c, VCPU_SREG_SS);
 787                c->modrm_ea = (u16)c->modrm_ea;
 788        } else {
 789                /* 32/64-bit ModR/M decode. */
 790                if ((c->modrm_rm & 7) == 4) {
 791                        sib = insn_fetch(u8, 1, c->eip);
 792                        index_reg |= (sib >> 3) & 7;
 793                        base_reg |= sib & 7;
 794                        scale = sib >> 6;
 795
 796                        if ((base_reg & 7) == 5 && c->modrm_mod == 0)
 797                                c->modrm_ea += insn_fetch(s32, 4, c->eip);
 798                        else
 799                                c->modrm_ea += c->regs[base_reg];
 800                        if (index_reg != 4)
 801                                c->modrm_ea += c->regs[index_reg] << scale;
 802                } else if ((c->modrm_rm & 7) == 5 && c->modrm_mod == 0) {
 803                        if (ctxt->mode == X86EMUL_MODE_PROT64)
 804                                c->rip_relative = 1;
 805                } else
 806                        c->modrm_ea += c->regs[c->modrm_rm];
 807                switch (c->modrm_mod) {
 808                case 0:
 809                        if (c->modrm_rm == 5)
 810                                c->modrm_ea += insn_fetch(s32, 4, c->eip);
 811                        break;
 812                case 1:
 813                        c->modrm_ea += insn_fetch(s8, 1, c->eip);
 814                        break;
 815                case 2:
 816                        c->modrm_ea += insn_fetch(s32, 4, c->eip);
 817                        break;
 818                }
 819        }
 820done:
 821        return rc;
 822}
 823
 824static int decode_abs(struct x86_emulate_ctxt *ctxt,
 825                      struct x86_emulate_ops *ops)
 826{
 827        struct decode_cache *c = &ctxt->decode;
 828        int rc = 0;
 829
 830        switch (c->ad_bytes) {
 831        case 2:
 832                c->modrm_ea = insn_fetch(u16, 2, c->eip);
 833                break;
 834        case 4:
 835                c->modrm_ea = insn_fetch(u32, 4, c->eip);
 836                break;
 837        case 8:
 838                c->modrm_ea = insn_fetch(u64, 8, c->eip);
 839                break;
 840        }
 841done:
 842        return rc;
 843}
 844
 845int
 846x86_decode_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
 847{
 848        struct decode_cache *c = &ctxt->decode;
 849        int rc = 0;
 850        int mode = ctxt->mode;
 851        int def_op_bytes, def_ad_bytes, group;
 852
 853        /* Shadow copy of register state. Committed on successful emulation. */
 854
 855        memset(c, 0, sizeof(struct decode_cache));
 856        c->eip = kvm_rip_read(ctxt->vcpu);
 857        ctxt->cs_base = seg_base(ctxt, VCPU_SREG_CS);
 858        memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
 859
 860        switch (mode) {
 861        case X86EMUL_MODE_REAL:
 862        case X86EMUL_MODE_PROT16:
 863                def_op_bytes = def_ad_bytes = 2;
 864                break;
 865        case X86EMUL_MODE_PROT32:
 866                def_op_bytes = def_ad_bytes = 4;
 867                break;
 868#ifdef CONFIG_X86_64
 869        case X86EMUL_MODE_PROT64:
 870                def_op_bytes = 4;
 871                def_ad_bytes = 8;
 872                break;
 873#endif
 874        default:
 875                return -1;
 876        }
 877
 878        c->op_bytes = def_op_bytes;
 879        c->ad_bytes = def_ad_bytes;
 880
 881        /* Legacy prefixes. */
 882        for (;;) {
 883                switch (c->b = insn_fetch(u8, 1, c->eip)) {
 884                case 0x66:      /* operand-size override */
 885                        /* switch between 2/4 bytes */
 886                        c->op_bytes = def_op_bytes ^ 6;
 887                        break;
 888                case 0x67:      /* address-size override */
 889                        if (mode == X86EMUL_MODE_PROT64)
 890                                /* switch between 4/8 bytes */
 891                                c->ad_bytes = def_ad_bytes ^ 12;
 892                        else
 893                                /* switch between 2/4 bytes */
 894                                c->ad_bytes = def_ad_bytes ^ 6;
 895                        break;
 896                case 0x26:      /* ES override */
 897                case 0x2e:      /* CS override */
 898                case 0x36:      /* SS override */
 899                case 0x3e:      /* DS override */
 900                        set_seg_override(c, (c->b >> 3) & 3);
 901                        break;
 902                case 0x64:      /* FS override */
 903                case 0x65:      /* GS override */
 904                        set_seg_override(c, c->b & 7);
 905                        break;
 906                case 0x40 ... 0x4f: /* REX */
 907                        if (mode != X86EMUL_MODE_PROT64)
 908                                goto done_prefixes;
 909                        c->rex_prefix = c->b;
 910                        continue;
 911                case 0xf0:      /* LOCK */
 912                        c->lock_prefix = 1;
 913                        break;
 914                case 0xf2:      /* REPNE/REPNZ */
 915                        c->rep_prefix = REPNE_PREFIX;
 916                        break;
 917                case 0xf3:      /* REP/REPE/REPZ */
 918                        c->rep_prefix = REPE_PREFIX;
 919                        break;
 920                default:
 921                        goto done_prefixes;
 922                }
 923
 924                /* Any legacy prefix after a REX prefix nullifies its effect. */
 925
 926                c->rex_prefix = 0;
 927        }
 928
 929done_prefixes:
 930
 931        /* REX prefix. */
 932        if (c->rex_prefix)
 933                if (c->rex_prefix & 8)
 934                        c->op_bytes = 8;        /* REX.W */
 935
 936        /* Opcode byte(s). */
 937        c->d = opcode_table[c->b];
 938        if (c->d == 0) {
 939                /* Two-byte opcode? */
 940                if (c->b == 0x0f) {
 941                        c->twobyte = 1;
 942                        c->b = insn_fetch(u8, 1, c->eip);
 943                        c->d = twobyte_table[c->b];
 944                }
 945        }
 946
 947        if (c->d & Group) {
 948                group = c->d & GroupMask;
 949                c->modrm = insn_fetch(u8, 1, c->eip);
 950                --c->eip;
 951
 952                group = (group << 3) + ((c->modrm >> 3) & 7);
 953                if ((c->d & GroupDual) && (c->modrm >> 6) == 3)
 954                        c->d = group2_table[group];
 955                else
 956                        c->d = group_table[group];
 957        }
 958
 959        /* Unrecognised? */
 960        if (c->d == 0) {
 961                DPRINTF("Cannot emulate %02x\n", c->b);
 962                return -1;
 963        }
 964
 965        if (mode == X86EMUL_MODE_PROT64 && (c->d & Stack))
 966                c->op_bytes = 8;
 967
 968        /* ModRM and SIB bytes. */
 969        if (c->d & ModRM)
 970                rc = decode_modrm(ctxt, ops);
 971        else if (c->d & MemAbs)
 972                rc = decode_abs(ctxt, ops);
 973        if (rc)
 974                goto done;
 975
 976        if (!c->has_seg_override)
 977                set_seg_override(c, VCPU_SREG_DS);
 978
 979        if (!(!c->twobyte && c->b == 0x8d))
 980                c->modrm_ea += seg_override_base(ctxt, c);
 981
 982        if (c->ad_bytes != 8)
 983                c->modrm_ea = (u32)c->modrm_ea;
 984        /*
 985         * Decode and fetch the source operand: register, memory
 986         * or immediate.
 987         */
 988        switch (c->d & SrcMask) {
 989        case SrcNone:
 990                break;
 991        case SrcReg:
 992                decode_register_operand(&c->src, c, 0);
 993                break;
 994        case SrcMem16:
 995                c->src.bytes = 2;
 996                goto srcmem_common;
 997        case SrcMem32:
 998                c->src.bytes = 4;
 999                goto srcmem_common;
1000        case SrcMem:
1001                c->src.bytes = (c->d & ByteOp) ? 1 :
1002                                                           c->op_bytes;
1003                /* Don't fetch the address for invlpg: it could be unmapped. */
1004                if (c->twobyte && c->b == 0x01 && c->modrm_reg == 7)
1005                        break;
1006        srcmem_common:
1007                /*
1008                 * For instructions with a ModR/M byte, switch to register
1009                 * access if Mod = 3.
1010                 */
1011                if ((c->d & ModRM) && c->modrm_mod == 3) {
1012                        c->src.type = OP_REG;
1013                        c->src.val = c->modrm_val;
1014                        c->src.ptr = c->modrm_ptr;
1015                        break;
1016                }
1017                c->src.type = OP_MEM;
1018                break;
1019        case SrcImm:
1020                c->src.type = OP_IMM;
1021                c->src.ptr = (unsigned long *)c->eip;
1022                c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1023                if (c->src.bytes == 8)
1024                        c->src.bytes = 4;
1025                /* NB. Immediates are sign-extended as necessary. */
1026                switch (c->src.bytes) {
1027                case 1:
1028                        c->src.val = insn_fetch(s8, 1, c->eip);
1029                        break;
1030                case 2:
1031                        c->src.val = insn_fetch(s16, 2, c->eip);
1032                        break;
1033                case 4:
1034                        c->src.val = insn_fetch(s32, 4, c->eip);
1035                        break;
1036                }
1037                break;
1038        case SrcImmByte:
1039                c->src.type = OP_IMM;
1040                c->src.ptr = (unsigned long *)c->eip;
1041                c->src.bytes = 1;
1042                c->src.val = insn_fetch(s8, 1, c->eip);
1043                break;
1044        }
1045
1046        /* Decode and fetch the destination operand: register or memory. */
1047        switch (c->d & DstMask) {
1048        case ImplicitOps:
1049                /* Special instructions do their own operand decoding. */
1050                return 0;
1051        case DstReg:
1052                decode_register_operand(&c->dst, c,
1053                         c->twobyte && (c->b == 0xb6 || c->b == 0xb7));
1054                break;
1055        case DstMem:
1056                if ((c->d & ModRM) && c->modrm_mod == 3) {
1057                        c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1058                        c->dst.type = OP_REG;
1059                        c->dst.val = c->dst.orig_val = c->modrm_val;
1060                        c->dst.ptr = c->modrm_ptr;
1061                        break;
1062                }
1063                c->dst.type = OP_MEM;
1064                break;
1065        case DstAcc:
1066                c->dst.type = OP_REG;
1067                c->dst.bytes = c->op_bytes;
1068                c->dst.ptr = &c->regs[VCPU_REGS_RAX];
1069                switch (c->op_bytes) {
1070                        case 1:
1071                                c->dst.val = *(u8 *)c->dst.ptr;
1072                                break;
1073                        case 2:
1074                                c->dst.val = *(u16 *)c->dst.ptr;
1075                                break;
1076                        case 4:
1077                                c->dst.val = *(u32 *)c->dst.ptr;
1078                                break;
1079                }
1080                c->dst.orig_val = c->dst.val;
1081                break;
1082        }
1083
1084        if (c->rip_relative)
1085                c->modrm_ea += c->eip;
1086
1087done:
1088        return (rc == X86EMUL_UNHANDLEABLE) ? -1 : 0;
1089}
1090
1091static inline void emulate_push(struct x86_emulate_ctxt *ctxt)
1092{
1093        struct decode_cache *c = &ctxt->decode;
1094
1095        c->dst.type  = OP_MEM;
1096        c->dst.bytes = c->op_bytes;
1097        c->dst.val = c->src.val;
1098        register_address_increment(c, &c->regs[VCPU_REGS_RSP], -c->op_bytes);
1099        c->dst.ptr = (void *) register_address(c, ss_base(ctxt),
1100                                               c->regs[VCPU_REGS_RSP]);
1101}
1102
1103static inline int emulate_grp1a(struct x86_emulate_ctxt *ctxt,
1104                                struct x86_emulate_ops *ops)
1105{
1106        struct decode_cache *c = &ctxt->decode;
1107        int rc;
1108
1109        rc = ops->read_std(register_address(c, ss_base(ctxt),
1110                                            c->regs[VCPU_REGS_RSP]),
1111                           &c->dst.val, c->dst.bytes, ctxt->vcpu);
1112        if (rc != 0)
1113                return rc;
1114
1115        register_address_increment(c, &c->regs[VCPU_REGS_RSP], c->dst.bytes);
1116
1117        return 0;
1118}
1119
1120static inline void emulate_grp2(struct x86_emulate_ctxt *ctxt)
1121{
1122        struct decode_cache *c = &ctxt->decode;
1123        switch (c->modrm_reg) {
1124        case 0: /* rol */
1125                emulate_2op_SrcB("rol", c->src, c->dst, ctxt->eflags);
1126                break;
1127        case 1: /* ror */
1128                emulate_2op_SrcB("ror", c->src, c->dst, ctxt->eflags);
1129                break;
1130        case 2: /* rcl */
1131                emulate_2op_SrcB("rcl", c->src, c->dst, ctxt->eflags);
1132                break;
1133        case 3: /* rcr */
1134                emulate_2op_SrcB("rcr", c->src, c->dst, ctxt->eflags);
1135                break;
1136        case 4: /* sal/shl */
1137        case 6: /* sal/shl */
1138                emulate_2op_SrcB("sal", c->src, c->dst, ctxt->eflags);
1139                break;
1140        case 5: /* shr */
1141                emulate_2op_SrcB("shr", c->src, c->dst, ctxt->eflags);
1142                break;
1143        case 7: /* sar */
1144                emulate_2op_SrcB("sar", c->src, c->dst, ctxt->eflags);
1145                break;
1146        }
1147}
1148
1149static inline int emulate_grp3(struct x86_emulate_ctxt *ctxt,
1150                               struct x86_emulate_ops *ops)
1151{
1152        struct decode_cache *c = &ctxt->decode;
1153        int rc = 0;
1154
1155        switch (c->modrm_reg) {
1156        case 0 ... 1:   /* test */
1157                emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1158                break;
1159        case 2: /* not */
1160                c->dst.val = ~c->dst.val;
1161                break;
1162        case 3: /* neg */
1163                emulate_1op("neg", c->dst, ctxt->eflags);
1164                break;
1165        default:
1166                DPRINTF("Cannot emulate %02x\n", c->b);
1167                rc = X86EMUL_UNHANDLEABLE;
1168                break;
1169        }
1170        return rc;
1171}
1172
1173static inline int emulate_grp45(struct x86_emulate_ctxt *ctxt,
1174                               struct x86_emulate_ops *ops)
1175{
1176        struct decode_cache *c = &ctxt->decode;
1177
1178        switch (c->modrm_reg) {
1179        case 0: /* inc */
1180                emulate_1op("inc", c->dst, ctxt->eflags);
1181                break;
1182        case 1: /* dec */
1183                emulate_1op("dec", c->dst, ctxt->eflags);
1184                break;
1185        case 2: /* call near abs */ {
1186                long int old_eip;
1187                old_eip = c->eip;
1188                c->eip = c->src.val;
1189                c->src.val = old_eip;
1190                emulate_push(ctxt);
1191                break;
1192        }
1193        case 4: /* jmp abs */
1194                c->eip = c->src.val;
1195                break;
1196        case 6: /* push */
1197                emulate_push(ctxt);
1198                break;
1199        }
1200        return 0;
1201}
1202
1203static inline int emulate_grp9(struct x86_emulate_ctxt *ctxt,
1204                               struct x86_emulate_ops *ops,
1205                               unsigned long memop)
1206{
1207        struct decode_cache *c = &ctxt->decode;
1208        u64 old, new;
1209        int rc;
1210
1211        rc = ops->read_emulated(memop, &old, 8, ctxt->vcpu);
1212        if (rc != 0)
1213                return rc;
1214
1215        if (((u32) (old >> 0) != (u32) c->regs[VCPU_REGS_RAX]) ||
1216            ((u32) (old >> 32) != (u32) c->regs[VCPU_REGS_RDX])) {
1217
1218                c->regs[VCPU_REGS_RAX] = (u32) (old >> 0);
1219                c->regs[VCPU_REGS_RDX] = (u32) (old >> 32);
1220                ctxt->eflags &= ~EFLG_ZF;
1221
1222        } else {
1223                new = ((u64)c->regs[VCPU_REGS_RCX] << 32) |
1224                       (u32) c->regs[VCPU_REGS_RBX];
1225
1226                rc = ops->cmpxchg_emulated(memop, &old, &new, 8, ctxt->vcpu);
1227                if (rc != 0)
1228                        return rc;
1229                ctxt->eflags |= EFLG_ZF;
1230        }
1231        return 0;
1232}
1233
1234static inline int writeback(struct x86_emulate_ctxt *ctxt,
1235                            struct x86_emulate_ops *ops)
1236{
1237        int rc;
1238        struct decode_cache *c = &ctxt->decode;
1239
1240        switch (c->dst.type) {
1241        case OP_REG:
1242                /* The 4-byte case *is* correct:
1243                 * in 64-bit mode we zero-extend.
1244                 */
1245                switch (c->dst.bytes) {
1246                case 1:
1247                        *(u8 *)c->dst.ptr = (u8)c->dst.val;
1248                        break;
1249                case 2:
1250                        *(u16 *)c->dst.ptr = (u16)c->dst.val;
1251                        break;
1252                case 4:
1253                        *c->dst.ptr = (u32)c->dst.val;
1254                        break;  /* 64b: zero-ext */
1255                case 8:
1256                        *c->dst.ptr = c->dst.val;
1257                        break;
1258                }
1259                break;
1260        case OP_MEM:
1261                if (c->lock_prefix)
1262                        rc = ops->cmpxchg_emulated(
1263                                        (unsigned long)c->dst.ptr,
1264                                        &c->dst.orig_val,
1265                                        &c->dst.val,
1266                                        c->dst.bytes,
1267                                        ctxt->vcpu);
1268                else
1269                        rc = ops->write_emulated(
1270                                        (unsigned long)c->dst.ptr,
1271                                        &c->dst.val,
1272                                        c->dst.bytes,
1273                                        ctxt->vcpu);
1274                if (rc != 0)
1275                        return rc;
1276                break;
1277        case OP_NONE:
1278                /* no writeback */
1279                break;
1280        default:
1281                break;
1282        }
1283        return 0;
1284}
1285
1286int
1287x86_emulate_insn(struct x86_emulate_ctxt *ctxt, struct x86_emulate_ops *ops)
1288{
1289        unsigned long memop = 0;
1290        u64 msr_data;
1291        unsigned long saved_eip = 0;
1292        struct decode_cache *c = &ctxt->decode;
1293        unsigned int port;
1294        int io_dir_in;
1295        int rc = 0;
1296
1297        /* Shadow copy of register state. Committed on successful emulation.
1298         * NOTE: we can copy them from vcpu as x86_decode_insn() doesn't
1299         * modify them.
1300         */
1301
1302        memcpy(c->regs, ctxt->vcpu->arch.regs, sizeof c->regs);
1303        saved_eip = c->eip;
1304
1305        if (((c->d & ModRM) && (c->modrm_mod != 3)) || (c->d & MemAbs))
1306                memop = c->modrm_ea;
1307
1308        if (c->rep_prefix && (c->d & String)) {
1309                /* All REP prefixes have the same first termination condition */
1310                if (c->regs[VCPU_REGS_RCX] == 0) {
1311                        kvm_rip_write(ctxt->vcpu, c->eip);
1312                        goto done;
1313                }
1314                /* The second termination condition only applies for REPE
1315                 * and REPNE. Test if the repeat string operation prefix is
1316                 * REPE/REPZ or REPNE/REPNZ and if it's the case it tests the
1317                 * corresponding termination condition according to:
1318                 *      - if REPE/REPZ and ZF = 0 then done
1319                 *      - if REPNE/REPNZ and ZF = 1 then done
1320                 */
1321                if ((c->b == 0xa6) || (c->b == 0xa7) ||
1322                                (c->b == 0xae) || (c->b == 0xaf)) {
1323                        if ((c->rep_prefix == REPE_PREFIX) &&
1324                                ((ctxt->eflags & EFLG_ZF) == 0)) {
1325                                        kvm_rip_write(ctxt->vcpu, c->eip);
1326                                        goto done;
1327                        }
1328                        if ((c->rep_prefix == REPNE_PREFIX) &&
1329                                ((ctxt->eflags & EFLG_ZF) == EFLG_ZF)) {
1330                                kvm_rip_write(ctxt->vcpu, c->eip);
1331                                goto done;
1332                        }
1333                }
1334                c->regs[VCPU_REGS_RCX]--;
1335                c->eip = kvm_rip_read(ctxt->vcpu);
1336        }
1337
1338        if (c->src.type == OP_MEM) {
1339                c->src.ptr = (unsigned long *)memop;
1340                c->src.val = 0;
1341                rc = ops->read_emulated((unsigned long)c->src.ptr,
1342                                        &c->src.val,
1343                                        c->src.bytes,
1344                                        ctxt->vcpu);
1345                if (rc != 0)
1346                        goto done;
1347                c->src.orig_val = c->src.val;
1348        }
1349
1350        if ((c->d & DstMask) == ImplicitOps)
1351                goto special_insn;
1352
1353
1354        if (c->dst.type == OP_MEM) {
1355                c->dst.ptr = (unsigned long *)memop;
1356                c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1357                c->dst.val = 0;
1358                if (c->d & BitOp) {
1359                        unsigned long mask = ~(c->dst.bytes * 8 - 1);
1360
1361                        c->dst.ptr = (void *)c->dst.ptr +
1362                                                   (c->src.val & mask) / 8;
1363                }
1364                if (!(c->d & Mov) &&
1365                                   /* optimisation - avoid slow emulated read */
1366                    ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1367                                           &c->dst.val,
1368                                          c->dst.bytes, ctxt->vcpu)) != 0))
1369                        goto done;
1370        }
1371        c->dst.orig_val = c->dst.val;
1372
1373special_insn:
1374
1375        if (c->twobyte)
1376                goto twobyte_insn;
1377
1378        switch (c->b) {
1379        case 0x00 ... 0x05:
1380              add:              /* add */
1381                emulate_2op_SrcV("add", c->src, c->dst, ctxt->eflags);
1382                break;
1383        case 0x08 ... 0x0d:
1384              or:               /* or */
1385                emulate_2op_SrcV("or", c->src, c->dst, ctxt->eflags);
1386                break;
1387        case 0x10 ... 0x15:
1388              adc:              /* adc */
1389                emulate_2op_SrcV("adc", c->src, c->dst, ctxt->eflags);
1390                break;
1391        case 0x18 ... 0x1d:
1392              sbb:              /* sbb */
1393                emulate_2op_SrcV("sbb", c->src, c->dst, ctxt->eflags);
1394                break;
1395        case 0x20 ... 0x25:
1396              and:              /* and */
1397                emulate_2op_SrcV("and", c->src, c->dst, ctxt->eflags);
1398                break;
1399        case 0x28 ... 0x2d:
1400              sub:              /* sub */
1401                emulate_2op_SrcV("sub", c->src, c->dst, ctxt->eflags);
1402                break;
1403        case 0x30 ... 0x35:
1404              xor:              /* xor */
1405                emulate_2op_SrcV("xor", c->src, c->dst, ctxt->eflags);
1406                break;
1407        case 0x38 ... 0x3d:
1408              cmp:              /* cmp */
1409                emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1410                break;
1411        case 0x40 ... 0x47: /* inc r16/r32 */
1412                emulate_1op("inc", c->dst, ctxt->eflags);
1413                break;
1414        case 0x48 ... 0x4f: /* dec r16/r32 */
1415                emulate_1op("dec", c->dst, ctxt->eflags);
1416                break;
1417        case 0x50 ... 0x57:  /* push reg */
1418                c->dst.type  = OP_MEM;
1419                c->dst.bytes = c->op_bytes;
1420                c->dst.val = c->src.val;
1421                register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1422                                           -c->op_bytes);
1423                c->dst.ptr = (void *) register_address(
1424                        c, ss_base(ctxt), c->regs[VCPU_REGS_RSP]);
1425                break;
1426        case 0x58 ... 0x5f: /* pop reg */
1427        pop_instruction:
1428                if ((rc = ops->read_std(register_address(c, ss_base(ctxt),
1429                        c->regs[VCPU_REGS_RSP]), c->dst.ptr,
1430                        c->op_bytes, ctxt->vcpu)) != 0)
1431                        goto done;
1432
1433                register_address_increment(c, &c->regs[VCPU_REGS_RSP],
1434                                           c->op_bytes);
1435                c->dst.type = OP_NONE;  /* Disable writeback. */
1436                break;
1437        case 0x63:              /* movsxd */
1438                if (ctxt->mode != X86EMUL_MODE_PROT64)
1439                        goto cannot_emulate;
1440                c->dst.val = (s32) c->src.val;
1441                break;
1442        case 0x68: /* push imm */
1443        case 0x6a: /* push imm8 */
1444                emulate_push(ctxt);
1445                break;
1446        case 0x6c:              /* insb */
1447        case 0x6d:              /* insw/insd */
1448                 if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1449                                1,
1450                                (c->d & ByteOp) ? 1 : c->op_bytes,
1451                                c->rep_prefix ?
1452                                address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1453                                (ctxt->eflags & EFLG_DF),
1454                                register_address(c, es_base(ctxt),
1455                                                 c->regs[VCPU_REGS_RDI]),
1456                                c->rep_prefix,
1457                                c->regs[VCPU_REGS_RDX]) == 0) {
1458                        c->eip = saved_eip;
1459                        return -1;
1460                }
1461                return 0;
1462        case 0x6e:              /* outsb */
1463        case 0x6f:              /* outsw/outsd */
1464                if (kvm_emulate_pio_string(ctxt->vcpu, NULL,
1465                                0,
1466                                (c->d & ByteOp) ? 1 : c->op_bytes,
1467                                c->rep_prefix ?
1468                                address_mask(c, c->regs[VCPU_REGS_RCX]) : 1,
1469                                (ctxt->eflags & EFLG_DF),
1470                                         register_address(c,
1471                                          seg_override_base(ctxt, c),
1472                                                 c->regs[VCPU_REGS_RSI]),
1473                                c->rep_prefix,
1474                                c->regs[VCPU_REGS_RDX]) == 0) {
1475                        c->eip = saved_eip;
1476                        return -1;
1477                }
1478                return 0;
1479        case 0x70 ... 0x7f: /* jcc (short) */ {
1480                int rel = insn_fetch(s8, 1, c->eip);
1481
1482                if (test_cc(c->b, ctxt->eflags))
1483                        jmp_rel(c, rel);
1484                break;
1485        }
1486        case 0x80 ... 0x83:     /* Grp1 */
1487                switch (c->modrm_reg) {
1488                case 0:
1489                        goto add;
1490                case 1:
1491                        goto or;
1492                case 2:
1493                        goto adc;
1494                case 3:
1495                        goto sbb;
1496                case 4:
1497                        goto and;
1498                case 5:
1499                        goto sub;
1500                case 6:
1501                        goto xor;
1502                case 7:
1503                        goto cmp;
1504                }
1505                break;
1506        case 0x84 ... 0x85:
1507                emulate_2op_SrcV("test", c->src, c->dst, ctxt->eflags);
1508                break;
1509        case 0x86 ... 0x87:     /* xchg */
1510        xchg:
1511                /* Write back the register source. */
1512                switch (c->dst.bytes) {
1513                case 1:
1514                        *(u8 *) c->src.ptr = (u8) c->dst.val;
1515                        break;
1516                case 2:
1517                        *(u16 *) c->src.ptr = (u16) c->dst.val;
1518                        break;
1519                case 4:
1520                        *c->src.ptr = (u32) c->dst.val;
1521                        break;  /* 64b reg: zero-extend */
1522                case 8:
1523                        *c->src.ptr = c->dst.val;
1524                        break;
1525                }
1526                /*
1527                 * Write back the memory destination with implicit LOCK
1528                 * prefix.
1529                 */
1530                c->dst.val = c->src.val;
1531                c->lock_prefix = 1;
1532                break;
1533        case 0x88 ... 0x8b:     /* mov */
1534                goto mov;
1535        case 0x8c: { /* mov r/m, sreg */
1536                struct kvm_segment segreg;
1537
1538                if (c->modrm_reg <= 5)
1539                        kvm_get_segment(ctxt->vcpu, &segreg, c->modrm_reg);
1540                else {
1541                        printk(KERN_INFO "0x8c: Invalid segreg in modrm byte 0x%02x\n",
1542                               c->modrm);
1543                        goto cannot_emulate;
1544                }
1545                c->dst.val = segreg.selector;
1546                break;
1547        }
1548        case 0x8d: /* lea r16/r32, m */
1549                c->dst.val = c->modrm_ea;
1550                break;
1551        case 0x8e: { /* mov seg, r/m16 */
1552                uint16_t sel;
1553                int type_bits;
1554                int err;
1555
1556                sel = c->src.val;
1557                if (c->modrm_reg <= 5) {
1558                        type_bits = (c->modrm_reg == 1) ? 9 : 1;
1559                        err = kvm_load_segment_descriptor(ctxt->vcpu, sel,
1560                                                          type_bits, c->modrm_reg);
1561                } else {
1562                        printk(KERN_INFO "Invalid segreg in modrm byte 0x%02x\n",
1563                                        c->modrm);
1564                        goto cannot_emulate;
1565                }
1566
1567                if (err < 0)
1568                        goto cannot_emulate;
1569
1570                c->dst.type = OP_NONE;  /* Disable writeback. */
1571                break;
1572        }
1573        case 0x8f:              /* pop (sole member of Grp1a) */
1574                rc = emulate_grp1a(ctxt, ops);
1575                if (rc != 0)
1576                        goto done;
1577                break;
1578        case 0x90: /* nop / xchg r8,rax */
1579                if (!(c->rex_prefix & 1)) { /* nop */
1580                        c->dst.type = OP_NONE;
1581                        break;
1582                }
1583        case 0x91 ... 0x97: /* xchg reg,rax */
1584                c->src.type = c->dst.type = OP_REG;
1585                c->src.bytes = c->dst.bytes = c->op_bytes;
1586                c->src.ptr = (unsigned long *) &c->regs[VCPU_REGS_RAX];
1587                c->src.val = *(c->src.ptr);
1588                goto xchg;
1589        case 0x9c: /* pushf */
1590                c->src.val =  (unsigned long) ctxt->eflags;
1591                emulate_push(ctxt);
1592                break;
1593        case 0x9d: /* popf */
1594                c->dst.ptr = (unsigned long *) &ctxt->eflags;
1595                goto pop_instruction;
1596        case 0xa0 ... 0xa1:     /* mov */
1597                c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1598                c->dst.val = c->src.val;
1599                break;
1600        case 0xa2 ... 0xa3:     /* mov */
1601                c->dst.val = (unsigned long)c->regs[VCPU_REGS_RAX];
1602                break;
1603        case 0xa4 ... 0xa5:     /* movs */
1604                c->dst.type = OP_MEM;
1605                c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1606                c->dst.ptr = (unsigned long *)register_address(c,
1607                                                   es_base(ctxt),
1608                                                   c->regs[VCPU_REGS_RDI]);
1609                if ((rc = ops->read_emulated(register_address(c,
1610                                           seg_override_base(ctxt, c),
1611                                        c->regs[VCPU_REGS_RSI]),
1612                                        &c->dst.val,
1613                                        c->dst.bytes, ctxt->vcpu)) != 0)
1614                        goto done;
1615                register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1616                                       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1617                                                           : c->dst.bytes);
1618                register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1619                                       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1620                                                           : c->dst.bytes);
1621                break;
1622        case 0xa6 ... 0xa7:     /* cmps */
1623                c->src.type = OP_NONE; /* Disable writeback. */
1624                c->src.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1625                c->src.ptr = (unsigned long *)register_address(c,
1626                                       seg_override_base(ctxt, c),
1627                                                   c->regs[VCPU_REGS_RSI]);
1628                if ((rc = ops->read_emulated((unsigned long)c->src.ptr,
1629                                                &c->src.val,
1630                                                c->src.bytes,
1631                                                ctxt->vcpu)) != 0)
1632                        goto done;
1633
1634                c->dst.type = OP_NONE; /* Disable writeback. */
1635                c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1636                c->dst.ptr = (unsigned long *)register_address(c,
1637                                                   es_base(ctxt),
1638                                                   c->regs[VCPU_REGS_RDI]);
1639                if ((rc = ops->read_emulated((unsigned long)c->dst.ptr,
1640                                                &c->dst.val,
1641                                                c->dst.bytes,
1642                                                ctxt->vcpu)) != 0)
1643                        goto done;
1644
1645                DPRINTF("cmps: mem1=0x%p mem2=0x%p\n", c->src.ptr, c->dst.ptr);
1646
1647                emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
1648
1649                register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1650                                       (ctxt->eflags & EFLG_DF) ? -c->src.bytes
1651                                                                  : c->src.bytes);
1652                register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1653                                       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1654                                                                  : c->dst.bytes);
1655
1656                break;
1657        case 0xaa ... 0xab:     /* stos */
1658                c->dst.type = OP_MEM;
1659                c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1660                c->dst.ptr = (unsigned long *)register_address(c,
1661                                                   es_base(ctxt),
1662                                                   c->regs[VCPU_REGS_RDI]);
1663                c->dst.val = c->regs[VCPU_REGS_RAX];
1664                register_address_increment(c, &c->regs[VCPU_REGS_RDI],
1665                                       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1666                                                           : c->dst.bytes);
1667                break;
1668        case 0xac ... 0xad:     /* lods */
1669                c->dst.type = OP_REG;
1670                c->dst.bytes = (c->d & ByteOp) ? 1 : c->op_bytes;
1671                c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
1672                if ((rc = ops->read_emulated(register_address(c,
1673                                                 seg_override_base(ctxt, c),
1674                                                 c->regs[VCPU_REGS_RSI]),
1675                                                 &c->dst.val,
1676                                                 c->dst.bytes,
1677                                                 ctxt->vcpu)) != 0)
1678                        goto done;
1679                register_address_increment(c, &c->regs[VCPU_REGS_RSI],
1680                                       (ctxt->eflags & EFLG_DF) ? -c->dst.bytes
1681                                                           : c->dst.bytes);
1682                break;
1683        case 0xae ... 0xaf:     /* scas */
1684                DPRINTF("Urk! I don't handle SCAS.\n");
1685                goto cannot_emulate;
1686        case 0xb0 ... 0xbf: /* mov r, imm */
1687                goto mov;
1688        case 0xc0 ... 0xc1:
1689                emulate_grp2(ctxt);
1690                break;
1691        case 0xc3: /* ret */
1692                c->dst.ptr = &c->eip;
1693                goto pop_instruction;
1694        case 0xc6 ... 0xc7:     /* mov (sole member of Grp11) */
1695        mov:
1696                c->dst.val = c->src.val;
1697                break;
1698        case 0xd0 ... 0xd1:     /* Grp2 */
1699                c->src.val = 1;
1700                emulate_grp2(ctxt);
1701                break;
1702        case 0xd2 ... 0xd3:     /* Grp2 */
1703                c->src.val = c->regs[VCPU_REGS_RCX];
1704                emulate_grp2(ctxt);
1705                break;
1706        case 0xe4:      /* inb */
1707        case 0xe5:      /* in */
1708                port = insn_fetch(u8, 1, c->eip);
1709                io_dir_in = 1;
1710                goto do_io;
1711        case 0xe6: /* outb */
1712        case 0xe7: /* out */
1713                port = insn_fetch(u8, 1, c->eip);
1714                io_dir_in = 0;
1715                goto do_io;
1716        case 0xe8: /* call (near) */ {
1717                long int rel;
1718                switch (c->op_bytes) {
1719                case 2:
1720                        rel = insn_fetch(s16, 2, c->eip);
1721                        break;
1722                case 4:
1723                        rel = insn_fetch(s32, 4, c->eip);
1724                        break;
1725                default:
1726                        DPRINTF("Call: Invalid op_bytes\n");
1727                        goto cannot_emulate;
1728                }
1729                c->src.val = (unsigned long) c->eip;
1730                jmp_rel(c, rel);
1731                c->op_bytes = c->ad_bytes;
1732                emulate_push(ctxt);
1733                break;
1734        }
1735        case 0xe9: /* jmp rel */
1736                goto jmp;
1737        case 0xea: /* jmp far */ {
1738                uint32_t eip;
1739                uint16_t sel;
1740
1741                switch (c->op_bytes) {
1742                case 2:
1743                        eip = insn_fetch(u16, 2, c->eip);
1744                        break;
1745                case 4:
1746                        eip = insn_fetch(u32, 4, c->eip);
1747                        break;
1748                default:
1749                        DPRINTF("jmp far: Invalid op_bytes\n");
1750                        goto cannot_emulate;
1751                }
1752                sel = insn_fetch(u16, 2, c->eip);
1753                if (kvm_load_segment_descriptor(ctxt->vcpu, sel, 9, VCPU_SREG_CS) < 0) {
1754                        DPRINTF("jmp far: Failed to load CS descriptor\n");
1755                        goto cannot_emulate;
1756                }
1757
1758                c->eip = eip;
1759                break;
1760        }
1761        case 0xeb:
1762              jmp:              /* jmp rel short */
1763                jmp_rel(c, c->src.val);
1764                c->dst.type = OP_NONE; /* Disable writeback. */
1765                break;
1766        case 0xec: /* in al,dx */
1767        case 0xed: /* in (e/r)ax,dx */
1768                port = c->regs[VCPU_REGS_RDX];
1769                io_dir_in = 1;
1770                goto do_io;
1771        case 0xee: /* out al,dx */
1772        case 0xef: /* out (e/r)ax,dx */
1773                port = c->regs[VCPU_REGS_RDX];
1774                io_dir_in = 0;
1775        do_io:  if (kvm_emulate_pio(ctxt->vcpu, NULL, io_dir_in,
1776                                   (c->d & ByteOp) ? 1 : c->op_bytes,
1777                                   port) != 0) {
1778                        c->eip = saved_eip;
1779                        goto cannot_emulate;
1780                }
1781                return 0;
1782        case 0xf4:              /* hlt */
1783                ctxt->vcpu->arch.halt_request = 1;
1784                break;
1785        case 0xf5:      /* cmc */
1786                /* complement carry flag from eflags reg */
1787                ctxt->eflags ^= EFLG_CF;
1788                c->dst.type = OP_NONE;  /* Disable writeback. */
1789                break;
1790        case 0xf6 ... 0xf7:     /* Grp3 */
1791                rc = emulate_grp3(ctxt, ops);
1792                if (rc != 0)
1793                        goto done;
1794                break;
1795        case 0xf8: /* clc */
1796                ctxt->eflags &= ~EFLG_CF;
1797                c->dst.type = OP_NONE;  /* Disable writeback. */
1798                break;
1799        case 0xfa: /* cli */
1800                ctxt->eflags &= ~X86_EFLAGS_IF;
1801                c->dst.type = OP_NONE;  /* Disable writeback. */
1802                break;
1803        case 0xfb: /* sti */
1804                ctxt->eflags |= X86_EFLAGS_IF;
1805                c->dst.type = OP_NONE;  /* Disable writeback. */
1806                break;
1807        case 0xfc: /* cld */
1808                ctxt->eflags &= ~EFLG_DF;
1809                c->dst.type = OP_NONE;  /* Disable writeback. */
1810                break;
1811        case 0xfd: /* std */
1812                ctxt->eflags |= EFLG_DF;
1813                c->dst.type = OP_NONE;  /* Disable writeback. */
1814                break;
1815        case 0xfe ... 0xff:     /* Grp4/Grp5 */
1816                rc = emulate_grp45(ctxt, ops);
1817                if (rc != 0)
1818                        goto done;
1819                break;
1820        }
1821
1822writeback:
1823        rc = writeback(ctxt, ops);
1824        if (rc != 0)
1825                goto done;
1826
1827        /* Commit shadow register state. */
1828        memcpy(ctxt->vcpu->arch.regs, c->regs, sizeof c->regs);
1829        kvm_rip_write(ctxt->vcpu, c->eip);
1830
1831done:
1832        if (rc == X86EMUL_UNHANDLEABLE) {
1833                c->eip = saved_eip;
1834                return -1;
1835        }
1836        return 0;
1837
1838twobyte_insn:
1839        switch (c->b) {
1840        case 0x01: /* lgdt, lidt, lmsw */
1841                switch (c->modrm_reg) {
1842                        u16 size;
1843                        unsigned long address;
1844
1845                case 0: /* vmcall */
1846                        if (c->modrm_mod != 3 || c->modrm_rm != 1)
1847                                goto cannot_emulate;
1848
1849                        rc = kvm_fix_hypercall(ctxt->vcpu);
1850                        if (rc)
1851                                goto done;
1852
1853                        /* Let the processor re-execute the fixed hypercall */
1854                        c->eip = kvm_rip_read(ctxt->vcpu);
1855                        /* Disable writeback. */
1856                        c->dst.type = OP_NONE;
1857                        break;
1858                case 2: /* lgdt */
1859                        rc = read_descriptor(ctxt, ops, c->src.ptr,
1860                                             &size, &address, c->op_bytes);
1861                        if (rc)
1862                                goto done;
1863                        realmode_lgdt(ctxt->vcpu, size, address);
1864                        /* Disable writeback. */
1865                        c->dst.type = OP_NONE;
1866                        break;
1867                case 3: /* lidt/vmmcall */
1868                        if (c->modrm_mod == 3 && c->modrm_rm == 1) {
1869                                rc = kvm_fix_hypercall(ctxt->vcpu);
1870                                if (rc)
1871                                        goto done;
1872                                kvm_emulate_hypercall(ctxt->vcpu);
1873                        } else {
1874                                rc = read_descriptor(ctxt, ops, c->src.ptr,
1875                                                     &size, &address,
1876                                                     c->op_bytes);
1877                                if (rc)
1878                                        goto done;
1879                                realmode_lidt(ctxt->vcpu, size, address);
1880                        }
1881                        /* Disable writeback. */
1882                        c->dst.type = OP_NONE;
1883                        break;
1884                case 4: /* smsw */
1885                        c->dst.bytes = 2;
1886                        c->dst.val = realmode_get_cr(ctxt->vcpu, 0);
1887                        break;
1888                case 6: /* lmsw */
1889                        realmode_lmsw(ctxt->vcpu, (u16)c->src.val,
1890                                      &ctxt->eflags);
1891                        c->dst.type = OP_NONE;
1892                        break;
1893                case 7: /* invlpg*/
1894                        emulate_invlpg(ctxt->vcpu, memop);
1895                        /* Disable writeback. */
1896                        c->dst.type = OP_NONE;
1897                        break;
1898                default:
1899                        goto cannot_emulate;
1900                }
1901                break;
1902        case 0x06:
1903                emulate_clts(ctxt->vcpu);
1904                c->dst.type = OP_NONE;
1905                break;
1906        case 0x08:              /* invd */
1907        case 0x09:              /* wbinvd */
1908        case 0x0d:              /* GrpP (prefetch) */
1909        case 0x18:              /* Grp16 (prefetch/nop) */
1910                c->dst.type = OP_NONE;
1911                break;
1912        case 0x20: /* mov cr, reg */
1913                if (c->modrm_mod != 3)
1914                        goto cannot_emulate;
1915                c->regs[c->modrm_rm] =
1916                                realmode_get_cr(ctxt->vcpu, c->modrm_reg);
1917                c->dst.type = OP_NONE;  /* no writeback */
1918                break;
1919        case 0x21: /* mov from dr to reg */
1920                if (c->modrm_mod != 3)
1921                        goto cannot_emulate;
1922                rc = emulator_get_dr(ctxt, c->modrm_reg, &c->regs[c->modrm_rm]);
1923                if (rc)
1924                        goto cannot_emulate;
1925                c->dst.type = OP_NONE;  /* no writeback */
1926                break;
1927        case 0x22: /* mov reg, cr */
1928                if (c->modrm_mod != 3)
1929                        goto cannot_emulate;
1930                realmode_set_cr(ctxt->vcpu,
1931                                c->modrm_reg, c->modrm_val, &ctxt->eflags);
1932                c->dst.type = OP_NONE;
1933                break;
1934        case 0x23: /* mov from reg to dr */
1935                if (c->modrm_mod != 3)
1936                        goto cannot_emulate;
1937                rc = emulator_set_dr(ctxt, c->modrm_reg,
1938                                     c->regs[c->modrm_rm]);
1939                if (rc)
1940                        goto cannot_emulate;
1941                c->dst.type = OP_NONE;  /* no writeback */
1942                break;
1943        case 0x30:
1944                /* wrmsr */
1945                msr_data = (u32)c->regs[VCPU_REGS_RAX]
1946                        | ((u64)c->regs[VCPU_REGS_RDX] << 32);
1947                rc = kvm_set_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], msr_data);
1948                if (rc) {
1949                        kvm_inject_gp(ctxt->vcpu, 0);
1950                        c->eip = kvm_rip_read(ctxt->vcpu);
1951                }
1952                rc = X86EMUL_CONTINUE;
1953                c->dst.type = OP_NONE;
1954                break;
1955        case 0x32:
1956                /* rdmsr */
1957                rc = kvm_get_msr(ctxt->vcpu, c->regs[VCPU_REGS_RCX], &msr_data);
1958                if (rc) {
1959                        kvm_inject_gp(ctxt->vcpu, 0);
1960                        c->eip = kvm_rip_read(ctxt->vcpu);
1961                } else {
1962                        c->regs[VCPU_REGS_RAX] = (u32)msr_data;
1963                        c->regs[VCPU_REGS_RDX] = msr_data >> 32;
1964                }
1965                rc = X86EMUL_CONTINUE;
1966                c->dst.type = OP_NONE;
1967                break;
1968        case 0x40 ... 0x4f:     /* cmov */
1969                c->dst.val = c->dst.orig_val = c->src.val;
1970                if (!test_cc(c->b, ctxt->eflags))
1971                        c->dst.type = OP_NONE; /* no writeback */
1972                break;
1973        case 0x80 ... 0x8f: /* jnz rel, etc*/ {
1974                long int rel;
1975
1976                switch (c->op_bytes) {
1977                case 2:
1978                        rel = insn_fetch(s16, 2, c->eip);
1979                        break;
1980                case 4:
1981                        rel = insn_fetch(s32, 4, c->eip);
1982                        break;
1983                case 8:
1984                        rel = insn_fetch(s64, 8, c->eip);
1985                        break;
1986                default:
1987                        DPRINTF("jnz: Invalid op_bytes\n");
1988                        goto cannot_emulate;
1989                }
1990                if (test_cc(c->b, ctxt->eflags))
1991                        jmp_rel(c, rel);
1992                c->dst.type = OP_NONE;
1993                break;
1994        }
1995        case 0xa3:
1996              bt:               /* bt */
1997                c->dst.type = OP_NONE;
1998                /* only subword offset */
1999                c->src.val &= (c->dst.bytes << 3) - 1;
2000                emulate_2op_SrcV_nobyte("bt", c->src, c->dst, ctxt->eflags);
2001                break;
2002        case 0xab:
2003              bts:              /* bts */
2004                /* only subword offset */
2005                c->src.val &= (c->dst.bytes << 3) - 1;
2006                emulate_2op_SrcV_nobyte("bts", c->src, c->dst, ctxt->eflags);
2007                break;
2008        case 0xae:              /* clflush */
2009                break;
2010        case 0xb0 ... 0xb1:     /* cmpxchg */
2011                /*
2012                 * Save real source value, then compare EAX against
2013                 * destination.
2014                 */
2015                c->src.orig_val = c->src.val;
2016                c->src.val = c->regs[VCPU_REGS_RAX];
2017                emulate_2op_SrcV("cmp", c->src, c->dst, ctxt->eflags);
2018                if (ctxt->eflags & EFLG_ZF) {
2019                        /* Success: write back to memory. */
2020                        c->dst.val = c->src.orig_val;
2021                } else {
2022                        /* Failure: write the value we saw to EAX. */
2023                        c->dst.type = OP_REG;
2024                        c->dst.ptr = (unsigned long *)&c->regs[VCPU_REGS_RAX];
2025                }
2026                break;
2027        case 0xb3:
2028              btr:              /* btr */
2029                /* only subword offset */
2030                c->src.val &= (c->dst.bytes << 3) - 1;
2031                emulate_2op_SrcV_nobyte("btr", c->src, c->dst, ctxt->eflags);
2032                break;
2033        case 0xb6 ... 0xb7:     /* movzx */
2034                c->dst.bytes = c->op_bytes;
2035                c->dst.val = (c->d & ByteOp) ? (u8) c->src.val
2036                                                       : (u16) c->src.val;
2037                break;
2038        case 0xba:              /* Grp8 */
2039                switch (c->modrm_reg & 3) {
2040                case 0:
2041                        goto bt;
2042                case 1:
2043                        goto bts;
2044                case 2:
2045                        goto btr;
2046                case 3:
2047                        goto btc;
2048                }
2049                break;
2050        case 0xbb:
2051              btc:              /* btc */
2052                /* only subword offset */
2053                c->src.val &= (c->dst.bytes << 3) - 1;
2054                emulate_2op_SrcV_nobyte("btc", c->src, c->dst, ctxt->eflags);
2055                break;
2056        case 0xbe ... 0xbf:     /* movsx */
2057                c->dst.bytes = c->op_bytes;
2058                c->dst.val = (c->d & ByteOp) ? (s8) c->src.val :
2059                                                        (s16) c->src.val;
2060                break;
2061        case 0xc3:              /* movnti */
2062                c->dst.bytes = c->op_bytes;
2063                c->dst.val = (c->op_bytes == 4) ? (u32) c->src.val :
2064                                                        (u64) c->src.val;
2065                break;
2066        case 0xc7:              /* Grp9 (cmpxchg8b) */
2067                rc = emulate_grp9(ctxt, ops, memop);
2068                if (rc != 0)
2069                        goto done;
2070                c->dst.type = OP_NONE;
2071                break;
2072        }
2073        goto writeback;
2074
2075cannot_emulate:
2076        DPRINTF("Cannot emulate %02x\n", c->b);
2077        c->eip = saved_eip;
2078        return -1;
2079}
2080