Disable the debug registers on 486+.
This commit is contained in:
@@ -50,80 +50,80 @@
|
||||
# define do_mmut_ww(s, a, b) do_mmutranslate_2386((s) + (a), b, 2, 1)
|
||||
# define do_mmut_wl(s, a, b) do_mmutranslate_2386((s) + (a), b, 4, 1)
|
||||
#else
|
||||
# define readmemb_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) ? readmembl_no_mmut((s) + (a), b) : *(uint8_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
# define readmemw_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF) || (((s) + (a)) & 1)) ? readmemwl_no_mmut((s) + (a), b) : *(uint16_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmeml_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF) || (((s) + (a)) & 3)) ? readmemll_no_mmut((s) + (a), b) : *(uint32_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmemb(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) ? readmembl((s) + (a)) : *(uint8_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
# define readmemw(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF) || (((s) + (a)) & 1)) ? readmemwl((s) + (a)) : *(uint16_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmeml(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF) || (((s) + (a)) & 3)) ? readmemll((s) + (a)) : *(uint32_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmemq(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF) || (((s) + (a)) & 7)) ? readmemql((s) + (a)) : *(uint64_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
# define readmemb_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) ? readmembl_no_mmut((s) + (a), b) : *(uint8_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
# define readmemw_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) ? readmemwl_no_mmut((s) + (a), b) : *(uint16_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmeml_n(s, a, b) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) ? readmemll_no_mmut((s) + (a), b) : *(uint32_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmemb(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) ? readmembl((s) + (a)) : *(uint8_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
# define readmemw(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) ? readmemwl((s) + (a)) : *(uint16_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmeml(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) ? readmemll((s) + (a)) : *(uint32_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uint32_t) ((s) + (a))))
|
||||
# define readmemq(s, a) ((readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 7)) ? readmemql((s) + (a)) : *(uint64_t *) (readlookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))))
|
||||
|
||||
# define writememb_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) \
|
||||
writemembl_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
# define writememb_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) \
|
||||
writemembl_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
*(uint8_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememw_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1) || (dr[7] & 0xFF)) \
|
||||
writememwl_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
# define writememw_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) \
|
||||
writememwl_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
*(uint16_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememl_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3) || (dr[7] & 0xFF)) \
|
||||
writememll_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
# define writememl_n(s, a, b, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) \
|
||||
writememll_no_mmut((s) + (a), b, v); \
|
||||
else \
|
||||
*(uint32_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememb(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) \
|
||||
writemembl((s) + (a), v); \
|
||||
else \
|
||||
# define writememb(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) \
|
||||
writemembl((s) + (a), v); \
|
||||
else \
|
||||
*(uint8_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememw(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1) || (dr[7] & 0xFF)) \
|
||||
writememwl((s) + (a), v); \
|
||||
else \
|
||||
# define writememw(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) \
|
||||
writememwl((s) + (a), v); \
|
||||
else \
|
||||
*(uint16_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememl(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3) || (dr[7] & 0xFF)) \
|
||||
writememll((s) + (a), v); \
|
||||
else \
|
||||
# define writememl(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) \
|
||||
writememll((s) + (a), v); \
|
||||
else \
|
||||
*(uint32_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
# define writememq(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 7) || (dr[7] & 0xFF)) \
|
||||
writememql((s) + (a), v); \
|
||||
else \
|
||||
# define writememq(s, a, v) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 7)) \
|
||||
writememql((s) + (a), v); \
|
||||
else \
|
||||
*(uint64_t *) (writelookup2[(uint32_t) ((s) + (a)) >> 12] + (uintptr_t) ((s) + (a))) = v
|
||||
|
||||
# define do_mmut_rb(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rb(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) \
|
||||
do_mmutranslate((s) + (a), b, 1, 0)
|
||||
# define do_mmut_rw(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rw(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) \
|
||||
do_mmutranslate((s) + (a), b, 2, 0)
|
||||
# define do_mmut_rl(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rl(s, a, b) \
|
||||
if (readlookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) \
|
||||
do_mmutranslate((s) + (a), b, 4, 0)
|
||||
# define do_mmut_rb2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rb2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) \
|
||||
do_mmutranslate((s) + (a), b, 1, 0)
|
||||
# define do_mmut_rw2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rw2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) \
|
||||
do_mmutranslate((s) + (a), b, 2, 0)
|
||||
# define do_mmut_rl2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_rl2(s, a, b) \
|
||||
old_rl2 = readlookup2[(uint32_t) ((s) + (a)) >> 12]; \
|
||||
if (old_rl2 == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) \
|
||||
do_mmutranslate((s) + (a), b, 4, 0)
|
||||
|
||||
# define do_mmut_wb(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_wb(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF) \
|
||||
do_mmutranslate((s) + (a), b, 1, 1)
|
||||
# define do_mmut_ww(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_ww(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 1)) \
|
||||
do_mmutranslate((s) + (a), b, 2, 1)
|
||||
# define do_mmut_wl(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3) || (dr[7] & 0xFF)) \
|
||||
# define do_mmut_wl(s, a, b) \
|
||||
if (writelookup2[(uint32_t) ((s) + (a)) >> 12] == (uintptr_t) LOOKUP_INV || (s) == 0xFFFFFFFF || (((s) + (a)) & 3)) \
|
||||
do_mmutranslate((s) + (a), b, 4, 1)
|
||||
#endif
|
||||
|
||||
|
@@ -223,7 +223,7 @@ fetch_ea_16_long(uint32_t rmdat)
|
||||
|
||||
#include "386_ops.h"
|
||||
|
||||
#define CACHE_ON() (!(cr0 & (1 << 30)) && !(cpu_state.flags & T_FLAG) && !(dr[7] & 0xFF))
|
||||
#define CACHE_ON() (!(cr0 & (1 << 30)) && !(cpu_state.flags & T_FLAG))
|
||||
|
||||
#ifdef USE_DYNAREC
|
||||
int32_t cycles_main = 0;
|
||||
@@ -268,7 +268,7 @@ exec386_dynarec_int(void)
|
||||
cpu_block_end = 0;
|
||||
x86_was_reset = 0;
|
||||
|
||||
if (trap & 2) {
|
||||
if (trap == 2) {
|
||||
/* Handle the T bit in the new TSS first. */
|
||||
CPU_BLOCK_END();
|
||||
goto block_ended;
|
||||
@@ -285,11 +285,6 @@ exec386_dynarec_int(void)
|
||||
cpu_state.ea_seg = &cpu_state.seg_ds;
|
||||
cpu_state.ssegs = 0;
|
||||
|
||||
if (UNLIKELY(cpu_386_check_instruction_fault())) {
|
||||
x86gen();
|
||||
goto block_ended;
|
||||
}
|
||||
|
||||
fetchdat = fastreadl_fetch(cs + cpu_state.pc);
|
||||
# ifdef ENABLE_386_DYNAREC_LOG
|
||||
if (in_smm)
|
||||
@@ -300,7 +295,7 @@ exec386_dynarec_int(void)
|
||||
opcode = fetchdat & 0xFF;
|
||||
fetchdat >>= 8;
|
||||
|
||||
trap |= !!(cpu_state.flags & T_FLAG);
|
||||
trap = cpu_state.flags & T_FLAG;
|
||||
|
||||
cpu_state.pc++;
|
||||
x86_opcodes[(opcode | cpu_state.op32) & 0x3ff](fetchdat);
|
||||
@@ -311,14 +306,6 @@ exec386_dynarec_int(void)
|
||||
cpu_state.pc &= 0xffff;
|
||||
# endif
|
||||
|
||||
if (!cpu_state.abrt) {
|
||||
if (!rf_flag_no_clear) {
|
||||
cpu_state.eflags &= ~RF_FLAG;
|
||||
}
|
||||
|
||||
rf_flag_no_clear = 0;
|
||||
}
|
||||
|
||||
if (((cs + cpu_state.pc) >> 12) != pccache)
|
||||
CPU_BLOCK_END();
|
||||
|
||||
@@ -342,10 +329,7 @@ exec386_dynarec_int(void)
|
||||
|
||||
block_ended:
|
||||
if (!cpu_state.abrt && trap) {
|
||||
//pclog("Debug trap 0x%X\n", trap);
|
||||
if (trap & 2) dr[6] |= 0x8000;
|
||||
if (trap & 1) dr[6] |= 0x4000;
|
||||
|
||||
dr[6] |= (trap == 2) ? 0x8000 : 0x4000;
|
||||
trap = 0;
|
||||
# ifndef USE_NEW_DYNAREC
|
||||
oldcs = CS;
|
||||
@@ -857,11 +841,6 @@ exec386(int32_t cycs)
|
||||
cpu_state.ea_seg = &cpu_state.seg_ds;
|
||||
cpu_state.ssegs = 0;
|
||||
|
||||
if (UNLIKELY(cpu_386_check_instruction_fault())) {
|
||||
x86gen();
|
||||
goto block_ended;
|
||||
}
|
||||
|
||||
fetchdat = fastreadl_fetch(cs + cpu_state.pc);
|
||||
|
||||
if (!cpu_state.abrt) {
|
||||
@@ -871,7 +850,7 @@ exec386(int32_t cycs)
|
||||
#endif
|
||||
opcode = fetchdat & 0xFF;
|
||||
fetchdat >>= 8;
|
||||
trap |= !!(cpu_state.flags & T_FLAG);
|
||||
trap = cpu_state.flags & T_FLAG;
|
||||
|
||||
cpu_state.pc++;
|
||||
x86_opcodes[(opcode | cpu_state.op32) & 0x3ff](fetchdat);
|
||||
@@ -891,7 +870,6 @@ exec386(int32_t cycs)
|
||||
if (cpu_end_block_after_ins)
|
||||
cpu_end_block_after_ins--;
|
||||
|
||||
block_ended:
|
||||
if (cpu_state.abrt) {
|
||||
flags_rebuild();
|
||||
tempi = cpu_state.abrt & ABRT_MASK;
|
||||
@@ -916,15 +894,12 @@ block_ended:
|
||||
}
|
||||
} else if (trap) {
|
||||
flags_rebuild();
|
||||
if (trap & 1)
|
||||
dr[6] |= 0x4000;
|
||||
if (trap & 2)
|
||||
dr[6] |= 0x8000;
|
||||
trap = 0;
|
||||
#ifndef USE_NEW_DYNAREC
|
||||
oldcs = CS;
|
||||
#endif
|
||||
cpu_state.oldpc = cpu_state.pc;
|
||||
dr[6] |= 0x4000;
|
||||
x86_int(1);
|
||||
}
|
||||
|
||||
|
@@ -181,7 +181,11 @@ extern void x386_dynarec_log(const char *fmt, ...);
|
||||
#ifndef OPS_286_386
|
||||
# include "x86_ops_cyrix.h"
|
||||
#endif
|
||||
#include "x86_ops_flag.h"
|
||||
#ifdef OPS_286_386
|
||||
# include "x86_ops_flag_2386.h"
|
||||
#else
|
||||
# include "x86_ops_flag.h"
|
||||
#endif
|
||||
#include "x86_ops_fpu.h"
|
||||
#include "x86_ops_inc_dec.h"
|
||||
#include "x86_ops_int.h"
|
||||
@@ -200,7 +204,11 @@ extern void x386_dynarec_log(const char *fmt, ...);
|
||||
# include "x86_ops_mmx_shift.h"
|
||||
#endif
|
||||
#include "x86_ops_mov.h"
|
||||
#include "x86_ops_mov_ctrl.h"
|
||||
#ifdef OPS_286_386
|
||||
# include "x86_ops_mov_ctrl_2386.h"
|
||||
#else
|
||||
# include "x86_ops_mov_ctrl.h"
|
||||
#endif
|
||||
#include "x86_ops_mov_seg.h"
|
||||
#include "x86_ops_movx.h"
|
||||
#ifndef OPS_286_386
|
||||
@@ -218,7 +226,11 @@ extern void x386_dynarec_log(const char *fmt, ...);
|
||||
# include "x86_ops_rep.h"
|
||||
# endif
|
||||
#endif
|
||||
#include "x86_ops_ret.h"
|
||||
#ifdef OPS_286_386
|
||||
# include "x86_ops_ret_2386.h"
|
||||
#else
|
||||
# include "x86_ops_ret.h"
|
||||
#endif
|
||||
#include "x86_ops_set.h"
|
||||
#include "x86_ops_stack.h"
|
||||
#ifdef OPS_286_386
|
||||
|
@@ -178,7 +178,6 @@ opPOPF_186(uint32_t fetchdat)
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
@@ -212,7 +211,6 @@ opPOPF_286(uint32_t fetchdat)
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
@@ -266,7 +264,6 @@ opPOPF(uint32_t fetchdat)
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
}
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
@@ -310,7 +307,6 @@ opPOPFD(uint32_t fetchdat)
|
||||
cpu_state.eflags = (templ >> 16) & 3;
|
||||
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 0, 1, 0, 0, 0);
|
||||
|
323
src/cpu/x86_ops_flag_2386.h
Normal file
323
src/cpu/x86_ops_flag_2386.h
Normal file
@@ -0,0 +1,323 @@
|
||||
static int
|
||||
opCMC(uint32_t fetchdat)
|
||||
{
|
||||
flags_rebuild();
|
||||
cpu_state.flags ^= C_FLAG;
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opCLC(uint32_t fetchdat)
|
||||
{
|
||||
flags_rebuild();
|
||||
cpu_state.flags &= ~C_FLAG;
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opCLD(uint32_t fetchdat)
|
||||
{
|
||||
cpu_state.flags &= ~D_FLAG;
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opCLI(uint32_t fetchdat)
|
||||
{
|
||||
if (!IOPLp) {
|
||||
if ((!(cpu_state.eflags & VM_FLAG) && (cr4 & CR4_PVI)) || ((cpu_state.eflags & VM_FLAG) && (cr4 & CR4_VME))) {
|
||||
cpu_state.eflags &= ~VIF_FLAG;
|
||||
} else {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
} else
|
||||
cpu_state.flags &= ~I_FLAG;
|
||||
|
||||
CLOCK_CYCLES(3);
|
||||
PREFETCH_RUN(3, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opSTC(uint32_t fetchdat)
|
||||
{
|
||||
flags_rebuild();
|
||||
cpu_state.flags |= C_FLAG;
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opSTD(uint32_t fetchdat)
|
||||
{
|
||||
cpu_state.flags |= D_FLAG;
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opSTI(uint32_t fetchdat)
|
||||
{
|
||||
if (!IOPLp) {
|
||||
if ((!(cpu_state.eflags & VM_FLAG) && (cr4 & CR4_PVI)) || ((cpu_state.eflags & VM_FLAG) && (cr4 & CR4_VME))) {
|
||||
if (cpu_state.eflags & VIP_FLAG) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
} else
|
||||
cpu_state.eflags |= VIF_FLAG;
|
||||
} else {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
} else
|
||||
cpu_state.flags |= I_FLAG;
|
||||
|
||||
/*First instruction after STI will always execute, regardless of whether
|
||||
there is a pending interrupt*/
|
||||
cpu_end_block_after_ins = 2;
|
||||
|
||||
CLOCK_CYCLES(2);
|
||||
PREFETCH_RUN(2, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opSAHF(uint32_t fetchdat)
|
||||
{
|
||||
flags_rebuild();
|
||||
cpu_state.flags = (cpu_state.flags & 0xff00) | (AH & 0xd5) | 2;
|
||||
CLOCK_CYCLES(3);
|
||||
PREFETCH_RUN(3, 1, -1, 0, 0, 0, 0, 0);
|
||||
|
||||
#if (defined(USE_DYNAREC) && defined(USE_NEW_DYNAREC))
|
||||
codegen_flags_changed = 0;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opLAHF(uint32_t fetchdat)
|
||||
{
|
||||
flags_rebuild();
|
||||
AH = cpu_state.flags & 0xff;
|
||||
CLOCK_CYCLES(3);
|
||||
PREFETCH_RUN(3, 1, -1, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opPUSHF(uint32_t fetchdat)
|
||||
{
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
if (cr4 & CR4_VME) {
|
||||
uint16_t temp;
|
||||
|
||||
flags_rebuild();
|
||||
temp = (cpu_state.flags & ~I_FLAG) | 0x3000;
|
||||
if (cpu_state.eflags & VIF_FLAG)
|
||||
temp |= I_FLAG;
|
||||
PUSH_W(temp);
|
||||
} else {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
flags_rebuild();
|
||||
PUSH_W(cpu_state.flags);
|
||||
}
|
||||
CLOCK_CYCLES(4);
|
||||
PREFETCH_RUN(4, 1, -1, 0, 0, 1, 0, 0);
|
||||
return cpu_state.abrt;
|
||||
}
|
||||
static int
|
||||
opPUSHFD(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t tempw;
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if (cpu_CR4_mask & CR4_VME)
|
||||
tempw = cpu_state.eflags & 0x3c;
|
||||
else if (CPUID)
|
||||
tempw = cpu_state.eflags & 0x24;
|
||||
else
|
||||
tempw = cpu_state.eflags & 4;
|
||||
flags_rebuild();
|
||||
PUSH_L(cpu_state.flags | (tempw << 16));
|
||||
CLOCK_CYCLES(4);
|
||||
PREFETCH_RUN(4, 1, -1, 0, 0, 0, 1, 0);
|
||||
return cpu_state.abrt;
|
||||
}
|
||||
|
||||
static int
|
||||
opPOPF_186(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t tempw;
|
||||
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
|
||||
tempw = POP_W();
|
||||
if (cpu_state.abrt)
|
||||
return 1;
|
||||
|
||||
if (!(msw & 1))
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (tempw & 0x0fd5) | 2;
|
||||
else if (!(CPL))
|
||||
cpu_state.flags = (tempw & 0x7fd5) | 2;
|
||||
else if (IOPLp)
|
||||
cpu_state.flags = (cpu_state.flags & 0x3000) | (tempw & 0x4fd5) | 2;
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
|
||||
#if (defined(USE_DYNAREC) && defined(USE_NEW_DYNAREC))
|
||||
codegen_flags_changed = 0;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opPOPF_286(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t tempw;
|
||||
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
|
||||
tempw = POP_W();
|
||||
if (cpu_state.abrt)
|
||||
return 1;
|
||||
|
||||
if (!(msw & 1))
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (tempw & 0x0fd5) | 2;
|
||||
else if (!(CPL))
|
||||
cpu_state.flags = (tempw & 0x7fd5) | 2;
|
||||
else if (IOPLp)
|
||||
cpu_state.flags = (cpu_state.flags & 0x3000) | (tempw & 0x4fd5) | 2;
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
|
||||
#if (defined(USE_DYNAREC) && defined(USE_NEW_DYNAREC))
|
||||
codegen_flags_changed = 0;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opPOPF(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t tempw;
|
||||
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
if (cr4 & CR4_VME) {
|
||||
uint32_t old_esp = ESP;
|
||||
|
||||
tempw = POP_W();
|
||||
if (cpu_state.abrt) {
|
||||
|
||||
ESP = old_esp;
|
||||
return 1;
|
||||
}
|
||||
|
||||
if ((tempw & T_FLAG) || ((tempw & I_FLAG) && (cpu_state.eflags & VIP_FLAG))) {
|
||||
ESP = old_esp;
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if (tempw & I_FLAG)
|
||||
cpu_state.eflags |= VIF_FLAG;
|
||||
else
|
||||
cpu_state.eflags &= ~VIF_FLAG;
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
} else {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
tempw = POP_W();
|
||||
if (cpu_state.abrt)
|
||||
return 1;
|
||||
|
||||
if (!(CPL) || !(msw & 1))
|
||||
cpu_state.flags = (tempw & 0x7fd5) | 2;
|
||||
else if (IOPLp)
|
||||
cpu_state.flags = (cpu_state.flags & 0x3000) | (tempw & 0x4fd5) | 2;
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (tempw & 0x4dd5) | 2;
|
||||
}
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 1, 0, 0, 0, 0);
|
||||
|
||||
#if (defined(USE_DYNAREC) && defined(USE_NEW_DYNAREC))
|
||||
codegen_flags_changed = 0;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opPOPFD(uint32_t fetchdat)
|
||||
{
|
||||
uint32_t templ;
|
||||
|
||||
if ((cpu_state.eflags & VM_FLAG) && (IOPL < 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
|
||||
templ = POP_L();
|
||||
if (cpu_state.abrt)
|
||||
return 1;
|
||||
|
||||
if (!(CPL) || !(msw & 1))
|
||||
cpu_state.flags = (templ & 0x7fd5) | 2;
|
||||
else if (IOPLp)
|
||||
cpu_state.flags = (cpu_state.flags & 0x3000) | (templ & 0x4fd5) | 2;
|
||||
else
|
||||
cpu_state.flags = (cpu_state.flags & 0x3200) | (templ & 0x4dd5) | 2;
|
||||
|
||||
templ &= (is486 || isibm486) ? 0x3c0000 : 0;
|
||||
templ |= ((cpu_state.eflags & 3) << 16);
|
||||
if (cpu_CR4_mask & CR4_VME)
|
||||
cpu_state.eflags = (templ >> 16) & 0x3f;
|
||||
else if (CPUID)
|
||||
cpu_state.eflags = (templ >> 16) & 0x27;
|
||||
else if (is486 || isibm486)
|
||||
cpu_state.eflags = (templ >> 16) & 7;
|
||||
else
|
||||
cpu_state.eflags = (templ >> 16) & 3;
|
||||
|
||||
flags_extract();
|
||||
rf_flag_no_clear = 1;
|
||||
|
||||
CLOCK_CYCLES(5);
|
||||
PREFETCH_RUN(5, 1, -1, 0, 1, 0, 0, 0);
|
||||
|
||||
#if (defined(USE_DYNAREC) && defined(USE_NEW_DYNAREC))
|
||||
codegen_flags_changed = 0;
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
@@ -87,12 +87,6 @@ opMOV_r_DRx_a16(uint32_t fetchdat)
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
cpu_state.regs[cpu_rm].l = dr[cpu_reg] | (cpu_reg == 6 ? 0xffff0ff0u : 0);
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
@@ -106,12 +100,6 @@ opMOV_r_DRx_a32(uint32_t fetchdat)
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
cpu_state.regs[cpu_rm].l = dr[cpu_reg] | (cpu_reg == 6 ? 0xffff0ff0u : 0);
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
@@ -240,23 +228,10 @@ opMOV_DRx_r_a16(uint32_t fetchdat)
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if ((dr[6] & 0x2000) && !(cpu_state.eflags & RF_FLAG)) {
|
||||
dr[7] |= 0x2000;
|
||||
dr[6] &= ~0x2000;
|
||||
x86gen();
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
dr[cpu_reg] = cpu_state.regs[cpu_rm].l;
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
CPU_BLOCK_END();
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
@@ -267,16 +242,9 @@ opMOV_DRx_r_a32(uint32_t fetchdat)
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
dr[cpu_reg] = cpu_state.regs[cpu_rm].l;
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
CPU_BLOCK_END();
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
414
src/cpu/x86_ops_mov_ctrl_2386.h
Normal file
414
src/cpu/x86_ops_mov_ctrl_2386.h
Normal file
@@ -0,0 +1,414 @@
|
||||
static int
|
||||
opMOV_r_CRx_a16(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
switch (cpu_reg) {
|
||||
case 0:
|
||||
cpu_state.regs[cpu_rm].l = cr0;
|
||||
if (is486 || isibm486)
|
||||
cpu_state.regs[cpu_rm].l |= 0x10; /*ET hardwired on 486*/
|
||||
else {
|
||||
if (is386)
|
||||
cpu_state.regs[cpu_rm].l |= 0x7fffffe0;
|
||||
else
|
||||
cpu_state.regs[cpu_rm].l |= 0x7ffffff0;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
cpu_state.regs[cpu_rm].l = cr2;
|
||||
break;
|
||||
case 3:
|
||||
cpu_state.regs[cpu_rm].l = cr3;
|
||||
break;
|
||||
case 4:
|
||||
if (cpu_has_feature(CPU_FEATURE_CR4)) {
|
||||
cpu_state.regs[cpu_rm].l = cr4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
cpu_state.pc = cpu_state.oldpc;
|
||||
x86illegal();
|
||||
break;
|
||||
}
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_r_CRx_a32(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
switch (cpu_reg) {
|
||||
case 0:
|
||||
cpu_state.regs[cpu_rm].l = cr0;
|
||||
if (is486 || isibm486)
|
||||
cpu_state.regs[cpu_rm].l |= 0x10; /*ET hardwired on 486*/
|
||||
else {
|
||||
if (is386)
|
||||
cpu_state.regs[cpu_rm].l |= 0x7fffffe0;
|
||||
else
|
||||
cpu_state.regs[cpu_rm].l |= 0x7ffffff0;
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
cpu_state.regs[cpu_rm].l = cr2;
|
||||
break;
|
||||
case 3:
|
||||
cpu_state.regs[cpu_rm].l = cr3;
|
||||
break;
|
||||
case 4:
|
||||
if (cpu_has_feature(CPU_FEATURE_CR4)) {
|
||||
cpu_state.regs[cpu_rm].l = cr4;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
cpu_state.pc = cpu_state.oldpc;
|
||||
x86illegal();
|
||||
break;
|
||||
}
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opMOV_r_DRx_a16(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
cpu_state.regs[cpu_rm].l = dr[cpu_reg] | (cpu_reg == 6 ? 0xffff0ff0u : 0);
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_r_DRx_a32(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
cpu_state.regs[cpu_rm].l = dr[cpu_reg] | (cpu_reg == 6 ? 0xffff0ff0u : 0);
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opMOV_CRx_r_a16(uint32_t fetchdat)
|
||||
{
|
||||
uint32_t old_cr0 = cr0;
|
||||
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
switch (cpu_reg) {
|
||||
case 0:
|
||||
if ((cpu_state.regs[cpu_rm].l ^ cr0) & 0x80000001)
|
||||
flushmmucache();
|
||||
/* Make sure CPL = 0 when switching from real mode to protected mode. */
|
||||
if ((cpu_state.regs[cpu_rm].l & 0x01) && !(cr0 & 0x01))
|
||||
cpu_state.seg_cs.access &= 0x9f;
|
||||
cr0 = cpu_state.regs[cpu_rm].l;
|
||||
if (cpu_16bitbus)
|
||||
cr0 |= 0x10;
|
||||
if (!(cr0 & 0x80000000))
|
||||
mmu_perm = 4;
|
||||
if (hascache && !(cr0 & (1 << 30)))
|
||||
cpu_cache_int_enabled = 1;
|
||||
else
|
||||
cpu_cache_int_enabled = 0;
|
||||
if (hascache && ((cr0 ^ old_cr0) & (1 << 30)))
|
||||
cpu_update_waitstates();
|
||||
if (cr0 & 1)
|
||||
cpu_cur_status |= CPU_STATUS_PMODE;
|
||||
else
|
||||
cpu_cur_status &= ~CPU_STATUS_PMODE;
|
||||
break;
|
||||
case 2:
|
||||
cr2 = cpu_state.regs[cpu_rm].l;
|
||||
break;
|
||||
case 3:
|
||||
cr3 = cpu_state.regs[cpu_rm].l;
|
||||
flushmmucache();
|
||||
break;
|
||||
case 4:
|
||||
if (cpu_has_feature(CPU_FEATURE_CR4)) {
|
||||
if (((cpu_state.regs[cpu_rm].l ^ cr4) & cpu_CR4_mask) & (CR4_PAE | CR4_PGE))
|
||||
flushmmucache();
|
||||
cr4 = cpu_state.regs[cpu_rm].l & cpu_CR4_mask;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
cpu_state.pc = cpu_state.oldpc;
|
||||
x86illegal();
|
||||
break;
|
||||
}
|
||||
CLOCK_CYCLES(10);
|
||||
PREFETCH_RUN(10, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_CRx_r_a32(uint32_t fetchdat)
|
||||
{
|
||||
uint32_t old_cr0 = cr0;
|
||||
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
switch (cpu_reg) {
|
||||
case 0:
|
||||
if ((cpu_state.regs[cpu_rm].l ^ cr0) & 0x80000001)
|
||||
flushmmucache();
|
||||
/* Make sure CPL = 0 when switching from real mode to protected mode. */
|
||||
if ((cpu_state.regs[cpu_rm].l & 0x01) && !(cr0 & 0x01))
|
||||
cpu_state.seg_cs.access &= 0x9f;
|
||||
cr0 = cpu_state.regs[cpu_rm].l;
|
||||
if (cpu_16bitbus)
|
||||
cr0 |= 0x10;
|
||||
if (!(cr0 & 0x80000000))
|
||||
mmu_perm = 4;
|
||||
if (hascache && !(cr0 & (1 << 30)))
|
||||
cpu_cache_int_enabled = 1;
|
||||
else
|
||||
cpu_cache_int_enabled = 0;
|
||||
if (hascache && ((cr0 ^ old_cr0) & (1 << 30)))
|
||||
cpu_update_waitstates();
|
||||
if (cr0 & 1)
|
||||
cpu_cur_status |= CPU_STATUS_PMODE;
|
||||
else
|
||||
cpu_cur_status &= ~CPU_STATUS_PMODE;
|
||||
break;
|
||||
case 2:
|
||||
cr2 = cpu_state.regs[cpu_rm].l;
|
||||
break;
|
||||
case 3:
|
||||
cr3 = cpu_state.regs[cpu_rm].l;
|
||||
flushmmucache();
|
||||
break;
|
||||
case 4:
|
||||
if (cpu_has_feature(CPU_FEATURE_CR4)) {
|
||||
if (((cpu_state.regs[cpu_rm].l ^ cr4) & cpu_CR4_mask) & (CR4_PAE | CR4_PGE))
|
||||
flushmmucache();
|
||||
cr4 = cpu_state.regs[cpu_rm].l & cpu_CR4_mask;
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
cpu_state.pc = cpu_state.oldpc;
|
||||
x86illegal();
|
||||
break;
|
||||
}
|
||||
CLOCK_CYCLES(10);
|
||||
PREFETCH_RUN(10, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opMOV_DRx_r_a16(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if ((dr[6] & 0x2000) && !(cpu_state.eflags & RF_FLAG)) {
|
||||
dr[7] |= 0x2000;
|
||||
dr[6] &= ~0x2000;
|
||||
x86gen();
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
dr[cpu_reg] = cpu_state.regs[cpu_rm].l;
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
CPU_BLOCK_END();
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_DRx_r_a32(uint32_t fetchdat)
|
||||
{
|
||||
if ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
if (cpu_reg == 4 || cpu_reg == 5) {
|
||||
if (cr4 & 0x8)
|
||||
x86illegal();
|
||||
else
|
||||
cpu_reg += 2;
|
||||
}
|
||||
dr[cpu_reg] = cpu_state.regs[cpu_rm].l;
|
||||
CLOCK_CYCLES(6);
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
CPU_BLOCK_END();
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
opMOV_r_TRx(void)
|
||||
{
|
||||
#if 0
|
||||
uint32_t base;
|
||||
|
||||
base = _tr[4] & 0xfffff800;
|
||||
#endif
|
||||
|
||||
switch (cpu_reg) {
|
||||
case 3:
|
||||
#if 0
|
||||
pclog("[R] %08X cache = %08X\n", base + cache_index, _tr[3]);
|
||||
#endif
|
||||
_tr[3] = *(uint32_t *) &(_cache[cache_index]);
|
||||
cache_index = (cache_index + 4) & 0xf;
|
||||
break;
|
||||
}
|
||||
cpu_state.regs[cpu_rm].l = _tr[cpu_reg];
|
||||
CLOCK_CYCLES(6);
|
||||
}
|
||||
static int
|
||||
opMOV_r_TRx_a16(uint32_t fetchdat)
|
||||
{
|
||||
if ((cpu_s->cpu_type == CPU_PENTIUM) || ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1))) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
opMOV_r_TRx();
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_r_TRx_a32(uint32_t fetchdat)
|
||||
{
|
||||
if ((cpu_s->cpu_type == CPU_PENTIUM) || ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1))) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
opMOV_r_TRx();
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
opMOV_TRx_r(void)
|
||||
{
|
||||
uint32_t base;
|
||||
int i;
|
||||
int ctl;
|
||||
|
||||
_tr[cpu_reg] = cpu_state.regs[cpu_rm].l;
|
||||
base = _tr[4] & 0xfffff800;
|
||||
ctl = _tr[5] & 3;
|
||||
switch (cpu_reg) {
|
||||
case 3:
|
||||
#if 0
|
||||
pclog("[W] %08X cache = %08X\n", base + cache_index, _tr[3]);
|
||||
#endif
|
||||
*(uint32_t *) &(_cache[cache_index]) = _tr[3];
|
||||
cache_index = (cache_index + 4) & 0xf;
|
||||
break;
|
||||
case 4:
|
||||
#if 0
|
||||
if (!(cr0 & 1) && !(_tr[5] & (1 << 19)))
|
||||
pclog("TAG = %08X, DEST = %08X\n", base, base + cache_index - 16);
|
||||
#endif
|
||||
break;
|
||||
case 5:
|
||||
#if 0
|
||||
pclog("[16] EXT = %i (%i), SET = %04X\n", !!(_tr[5] & (1 << 19)), _tr[5] & 0x03, _tr[5] & 0x7f0);
|
||||
#endif
|
||||
if (!(_tr[5] & (1 << 19))) {
|
||||
switch (ctl) {
|
||||
case 0:
|
||||
#if 0
|
||||
pclog(" Cache fill or read...\n", base);
|
||||
#endif
|
||||
break;
|
||||
case 1:
|
||||
base += (_tr[5] & 0x7f0);
|
||||
#if 0
|
||||
pclog(" Writing 16 bytes to %08X...\n", base);
|
||||
#endif
|
||||
for (i = 0; i < 16; i += 4)
|
||||
mem_writel_phys(base + i, *(uint32_t *) &(_cache[i]));
|
||||
break;
|
||||
case 2:
|
||||
base += (_tr[5] & 0x7f0);
|
||||
#if 0
|
||||
pclog(" Reading 16 bytes from %08X...\n", base);
|
||||
#endif
|
||||
for (i = 0; i < 16; i += 4)
|
||||
*(uint32_t *) &(_cache[i]) = mem_readl_phys(base + i);
|
||||
break;
|
||||
case 3:
|
||||
#if 0
|
||||
pclog(" Cache invalidate/flush...\n", base);
|
||||
#endif
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
CLOCK_CYCLES(6);
|
||||
}
|
||||
static int
|
||||
opMOV_TRx_r_a16(uint32_t fetchdat)
|
||||
{
|
||||
if ((cpu_s->cpu_type == CPU_PENTIUM) || ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1))) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_16(fetchdat);
|
||||
opMOV_TRx_r();
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 0);
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opMOV_TRx_r_a32(uint32_t fetchdat)
|
||||
{
|
||||
if ((cpu_s->cpu_type == CPU_PENTIUM) || ((CPL || (cpu_state.eflags & VM_FLAG)) && (cr0 & 1))) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
fetch_ea_32(fetchdat);
|
||||
opMOV_TRx_r();
|
||||
PREFETCH_RUN(6, 2, rmdat, 0, 0, 0, 0, 1);
|
||||
return 0;
|
||||
}
|
@@ -135,7 +135,6 @@ opIRET_186(uint32_t fetchdat)
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
@@ -176,7 +175,6 @@ opIRET_286(uint32_t fetchdat)
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
@@ -245,7 +243,6 @@ opIRET(uint32_t fetchdat)
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
@@ -288,7 +285,6 @@ opIRETD(uint32_t fetchdat)
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 0, 2, 0, 0, 1);
|
||||
|
297
src/cpu/x86_ops_ret_2386.h
Normal file
297
src/cpu/x86_ops_ret_2386.h
Normal file
@@ -0,0 +1,297 @@
|
||||
#ifdef USE_NEW_DYNAREC
|
||||
# define CPU_SET_OXPC
|
||||
#else
|
||||
# define CPU_SET_OXPC oxpc = cpu_state.pc;
|
||||
#endif
|
||||
|
||||
#define RETF_a16(stack_offset) \
|
||||
if ((msw & 1) && !(cpu_state.eflags & VM_FLAG)) { \
|
||||
op_pmoderetf(0, stack_offset); \
|
||||
return 1; \
|
||||
} \
|
||||
CPU_SET_OXPC \
|
||||
if (stack32) { \
|
||||
cpu_state.pc = readmemw(ss, ESP); \
|
||||
op_loadcs(readmemw(ss, ESP + 2)); \
|
||||
} else { \
|
||||
cpu_state.pc = readmemw(ss, SP); \
|
||||
op_loadcs(readmemw(ss, SP + 2)); \
|
||||
} \
|
||||
if (cpu_state.abrt) \
|
||||
return 1; \
|
||||
if (stack32) \
|
||||
ESP += 4 + stack_offset; \
|
||||
else \
|
||||
SP += 4 + stack_offset; \
|
||||
cycles -= timing_retf_rm;
|
||||
|
||||
#define RETF_a32(stack_offset) \
|
||||
if ((msw & 1) && !(cpu_state.eflags & VM_FLAG)) { \
|
||||
op_pmoderetf(1, stack_offset); \
|
||||
return 1; \
|
||||
} \
|
||||
CPU_SET_OXPC \
|
||||
if (stack32) { \
|
||||
cpu_state.pc = readmeml(ss, ESP); \
|
||||
op_loadcs(readmeml(ss, ESP + 4) & 0xffff); \
|
||||
} else { \
|
||||
cpu_state.pc = readmeml(ss, SP); \
|
||||
op_loadcs(readmeml(ss, SP + 4) & 0xffff); \
|
||||
} \
|
||||
if (cpu_state.abrt) \
|
||||
return 1; \
|
||||
if (stack32) \
|
||||
ESP += 8 + stack_offset; \
|
||||
else \
|
||||
SP += 8 + stack_offset; \
|
||||
cycles -= timing_retf_rm;
|
||||
|
||||
static int
|
||||
opRETF_a16(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
CPU_BLOCK_END();
|
||||
RETF_a16(0);
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
PREFETCH_FLUSH();
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opRETF_a32(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
CPU_BLOCK_END();
|
||||
RETF_a32(0);
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 0, 2, 0, 0, 1);
|
||||
PREFETCH_FLUSH();
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opRETF_a16_imm(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t offset = getwordf();
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
CPU_BLOCK_END();
|
||||
RETF_a16(offset);
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 3, -1, 2, 0, 0, 0, 0);
|
||||
PREFETCH_FLUSH();
|
||||
return 0;
|
||||
}
|
||||
static int
|
||||
opRETF_a32_imm(uint32_t fetchdat)
|
||||
{
|
||||
uint16_t offset = getwordf();
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
CPU_BLOCK_END();
|
||||
RETF_a32(offset);
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 3, -1, 0, 2, 0, 0, 1);
|
||||
PREFETCH_FLUSH();
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
opIRET_186(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
if ((cr0 & 1) && (cpu_state.eflags & VM_FLAG) && (IOPL != 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if (msw & 1) {
|
||||
optype = IRET;
|
||||
op_pmodeiret(0);
|
||||
optype = 0;
|
||||
} else {
|
||||
uint16_t new_cs;
|
||||
CPU_SET_OXPC
|
||||
if (stack32) {
|
||||
cpu_state.pc = readmemw(ss, ESP);
|
||||
new_cs = readmemw(ss, ESP + 2);
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (readmemw(ss, ESP + 4) & 0xffd5) | 2;
|
||||
ESP += 6;
|
||||
} else {
|
||||
cpu_state.pc = readmemw(ss, SP);
|
||||
new_cs = readmemw(ss, ((SP + 2) & 0xffff));
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (readmemw(ss, ((SP + 4) & 0xffff)) & 0x0fd5) | 2;
|
||||
SP += 6;
|
||||
}
|
||||
op_loadcs(new_cs);
|
||||
cycles -= timing_iret_rm;
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
PREFETCH_FLUSH();
|
||||
return cpu_state.abrt;
|
||||
}
|
||||
|
||||
static int
|
||||
opIRET_286(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
if ((cr0 & 1) && (cpu_state.eflags & VM_FLAG) && (IOPL != 3)) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if (msw & 1) {
|
||||
optype = IRET;
|
||||
op_pmodeiret(0);
|
||||
optype = 0;
|
||||
} else {
|
||||
uint16_t new_cs;
|
||||
CPU_SET_OXPC
|
||||
if (stack32) {
|
||||
cpu_state.pc = readmemw(ss, ESP);
|
||||
new_cs = readmemw(ss, ESP + 2);
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (readmemw(ss, ESP + 4) & 0xffd5) | 2;
|
||||
ESP += 6;
|
||||
} else {
|
||||
cpu_state.pc = readmemw(ss, SP);
|
||||
new_cs = readmemw(ss, ((SP + 2) & 0xffff));
|
||||
cpu_state.flags = (cpu_state.flags & 0x7000) | (readmemw(ss, ((SP + 4) & 0xffff)) & 0x0fd5) | 2;
|
||||
SP += 6;
|
||||
}
|
||||
op_loadcs(new_cs);
|
||||
cycles -= timing_iret_rm;
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
PREFETCH_FLUSH();
|
||||
return cpu_state.abrt;
|
||||
}
|
||||
|
||||
static int
|
||||
opIRET(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
if ((cr0 & 1) && (cpu_state.eflags & VM_FLAG) && (IOPL != 3)) {
|
||||
if (cr4 & CR4_VME) {
|
||||
uint16_t new_pc;
|
||||
uint16_t new_cs;
|
||||
uint16_t new_flags;
|
||||
|
||||
new_pc = readmemw(ss, SP);
|
||||
new_cs = readmemw(ss, ((SP + 2) & 0xffff));
|
||||
new_flags = readmemw(ss, ((SP + 4) & 0xffff));
|
||||
if (cpu_state.abrt)
|
||||
return 1;
|
||||
|
||||
if ((new_flags & T_FLAG) || ((new_flags & I_FLAG) && (cpu_state.eflags & VIP_FLAG))) {
|
||||
x86gpf(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
SP += 6;
|
||||
if (new_flags & I_FLAG)
|
||||
cpu_state.eflags |= VIF_FLAG;
|
||||
else
|
||||
cpu_state.eflags &= ~VIF_FLAG;
|
||||
cpu_state.flags = (cpu_state.flags & 0x3300) | (new_flags & 0x4cd5) | 2;
|
||||
op_loadcs(new_cs);
|
||||
cpu_state.pc = new_pc;
|
||||
|
||||
cycles -= timing_iret_rm;
|
||||
} else {
|
||||
x86gpf_expected(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
} else {
|
||||
if (msw & 1) {
|
||||
optype = IRET;
|
||||
op_pmodeiret(0);
|
||||
optype = 0;
|
||||
} else {
|
||||
uint16_t new_cs;
|
||||
CPU_SET_OXPC
|
||||
if (stack32) {
|
||||
cpu_state.pc = readmemw(ss, ESP);
|
||||
new_cs = readmemw(ss, ESP + 2);
|
||||
cpu_state.flags = (readmemw(ss, ESP + 4) & 0xffd5) | 2;
|
||||
ESP += 6;
|
||||
} else {
|
||||
cpu_state.pc = readmemw(ss, SP);
|
||||
new_cs = readmemw(ss, ((SP + 2) & 0xffff));
|
||||
cpu_state.flags = (readmemw(ss, ((SP + 4) & 0xffff)) & 0xffd5) | 2;
|
||||
SP += 6;
|
||||
}
|
||||
op_loadcs(new_cs);
|
||||
cycles -= timing_iret_rm;
|
||||
}
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 2, 0, 0, 0, 0);
|
||||
PREFETCH_FLUSH();
|
||||
return cpu_state.abrt;
|
||||
}
|
||||
|
||||
static int
|
||||
opIRETD(uint32_t fetchdat)
|
||||
{
|
||||
int cycles_old = cycles;
|
||||
UN_USED(cycles_old);
|
||||
|
||||
if ((cr0 & 1) && (cpu_state.eflags & VM_FLAG) && (IOPL != 3)) {
|
||||
x86gpf_expected(NULL, 0);
|
||||
return 1;
|
||||
}
|
||||
if (msw & 1) {
|
||||
optype = IRET;
|
||||
op_pmodeiret(1);
|
||||
optype = 0;
|
||||
} else {
|
||||
uint16_t new_cs;
|
||||
CPU_SET_OXPC
|
||||
if (stack32) {
|
||||
cpu_state.pc = readmeml(ss, ESP);
|
||||
new_cs = readmemw(ss, ESP + 4);
|
||||
cpu_state.flags = (readmemw(ss, ESP + 8) & 0xffd5) | 2;
|
||||
cpu_state.eflags = readmemw(ss, ESP + 10);
|
||||
ESP += 12;
|
||||
} else {
|
||||
cpu_state.pc = readmeml(ss, SP);
|
||||
new_cs = readmemw(ss, ((SP + 4) & 0xffff));
|
||||
cpu_state.flags = (readmemw(ss, (SP + 8) & 0xffff) & 0xffd5) | 2;
|
||||
cpu_state.eflags = readmemw(ss, (SP + 10) & 0xffff);
|
||||
SP += 12;
|
||||
}
|
||||
op_loadcs(new_cs);
|
||||
cycles -= timing_iret_rm;
|
||||
}
|
||||
flags_extract();
|
||||
nmi_enable = 1;
|
||||
rf_flag_no_clear = 1;
|
||||
CPU_BLOCK_END();
|
||||
|
||||
PREFETCH_RUN(cycles_old - cycles, 1, -1, 0, 2, 0, 0, 1);
|
||||
PREFETCH_FLUSH();
|
||||
return cpu_state.abrt;
|
||||
}
|
@@ -161,54 +161,6 @@ mem_log(const char *fmt, ...)
|
||||
# define mem_log(fmt, ...)
|
||||
#endif
|
||||
|
||||
/* Set trap for data address breakpoints. */
|
||||
void
|
||||
mem_debug_check_addr(uint32_t addr, int write)
|
||||
{
|
||||
int i = 0;
|
||||
int set_trap = 0;
|
||||
|
||||
if (!(dr[7] & 0xFF))
|
||||
return;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
uint32_t dr_addr = dr[i];
|
||||
int breakpoint_enabled = !!(dr[7] & (0x3 << (2 * i)));
|
||||
int len_type_pair = ((dr[7] >> 16) & (0xF << (4 * i))) >> (4 * i);
|
||||
if (!breakpoint_enabled)
|
||||
continue;
|
||||
if (!write && (len_type_pair & 3) != 3)
|
||||
continue;
|
||||
if ((len_type_pair & 3) != 1)
|
||||
continue;
|
||||
|
||||
switch ((len_type_pair >> 2) & 3)
|
||||
{
|
||||
case 0x00:
|
||||
if (dr_addr == addr) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
case 0x01:
|
||||
if ((dr_addr & ~1) == addr || ((dr_addr & ~1) + 1) == (addr + 1)) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
case 0x03:
|
||||
dr_addr &= ~3;
|
||||
if (addr >= dr_addr && addr < (dr_addr + 4)) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (set_trap)
|
||||
trap |= 4;
|
||||
}
|
||||
|
||||
int
|
||||
mem_addr_is_ram(uint32_t addr)
|
||||
{
|
||||
@@ -838,7 +790,6 @@ readmembl(uint32_t addr)
|
||||
uint64_t a;
|
||||
|
||||
GDBSTUB_MEM_ACCESS(addr, GDBSTUB_MEM_READ, 1);
|
||||
mem_debug_check_addr(addr, 0);
|
||||
|
||||
addr64 = (uint64_t) addr;
|
||||
mem_logical_addr = addr;
|
||||
@@ -868,7 +819,6 @@ writemembl(uint32_t addr, uint8_t val)
|
||||
uint64_t a;
|
||||
|
||||
GDBSTUB_MEM_ACCESS(addr, GDBSTUB_MEM_WRITE, 1);
|
||||
mem_debug_check_addr(addr, 1);
|
||||
|
||||
addr64 = (uint64_t) addr;
|
||||
mem_logical_addr = addr;
|
||||
@@ -955,8 +905,6 @@ readmemwl(uint32_t addr)
|
||||
|
||||
addr64a[0] = addr;
|
||||
addr64a[1] = addr + 1;
|
||||
mem_debug_check_addr(addr, 0);
|
||||
mem_debug_check_addr(addr + 1, 0);
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_READ, 2);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1015,8 +963,6 @@ writememwl(uint32_t addr, uint16_t val)
|
||||
|
||||
addr64a[0] = addr;
|
||||
addr64a[1] = addr + 1;
|
||||
mem_debug_check_addr(addr, 1);
|
||||
mem_debug_check_addr(addr + 1, 1);
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_WRITE, 2);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1193,10 +1139,8 @@ readmemll(uint32_t addr)
|
||||
int i;
|
||||
uint64_t a = 0x0000000000000000ULL;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
for (i = 0; i < 4; i++)
|
||||
addr64a[i] = (uint64_t) (addr + i);
|
||||
mem_debug_check_addr(addr + i, 0);
|
||||
}
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_READ, 4);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1269,10 +1213,8 @@ writememll(uint32_t addr, uint32_t val)
|
||||
int i;
|
||||
uint64_t a = 0x0000000000000000ULL;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
for (i = 0; i < 4; i++)
|
||||
addr64a[i] = (uint64_t) (addr + i);
|
||||
mem_debug_check_addr(addr + i, 1);
|
||||
}
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_WRITE, 4);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1475,10 +1417,8 @@ readmemql(uint32_t addr)
|
||||
int i;
|
||||
uint64_t a = 0x0000000000000000ULL;
|
||||
|
||||
for (i = 0; i < 8; i++) {
|
||||
for (i = 0; i < 8; i++)
|
||||
addr64a[i] = (uint64_t) (addr + i);
|
||||
mem_debug_check_addr(addr + i, 0);
|
||||
}
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_READ, 8);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1543,10 +1483,8 @@ writememql(uint32_t addr, uint64_t val)
|
||||
int i;
|
||||
uint64_t a = 0x0000000000000000ULL;
|
||||
|
||||
for (i = 0; i < 8; i++) {
|
||||
for (i = 0; i < 8; i++)
|
||||
addr64a[i] = (uint64_t) (addr + i);
|
||||
mem_debug_check_addr(addr + i, 1);
|
||||
}
|
||||
GDBSTUB_MEM_ACCESS_FAST(addr64a, GDBSTUB_MEM_WRITE, 8);
|
||||
|
||||
mem_logical_addr = addr;
|
||||
@@ -1644,10 +1582,8 @@ do_mmutranslate(uint32_t addr, uint32_t *a64, int num, int write)
|
||||
uint32_t last_addr = addr + (num - 1);
|
||||
uint64_t a = 0x0000000000000000ULL;
|
||||
|
||||
mem_debug_check_addr(addr, write);
|
||||
for (i = 0; i < num; i++) {
|
||||
for (i = 0; i < num; i++)
|
||||
a64[i] = (uint64_t) addr;
|
||||
}
|
||||
|
||||
for (i = 0; i < num; i++) {
|
||||
if (cr0 >> 31) {
|
||||
|
@@ -39,6 +39,54 @@
|
||||
#include <86box/rom.h>
|
||||
#include <86box/gdbstub.h>
|
||||
|
||||
/* Set trap for data address breakpoints. */
|
||||
void
|
||||
mem_debug_check_addr(uint32_t addr, int write)
|
||||
{
|
||||
int i = 0;
|
||||
int set_trap = 0;
|
||||
|
||||
if (!(dr[7] & 0xFF))
|
||||
return;
|
||||
|
||||
for (i = 0; i < 4; i++) {
|
||||
uint32_t dr_addr = dr[i];
|
||||
int breakpoint_enabled = !!(dr[7] & (0x3 << (2 * i)));
|
||||
int len_type_pair = ((dr[7] >> 16) & (0xF << (4 * i))) >> (4 * i);
|
||||
if (!breakpoint_enabled)
|
||||
continue;
|
||||
if (!write && (len_type_pair & 3) != 3)
|
||||
continue;
|
||||
if ((len_type_pair & 3) != 1)
|
||||
continue;
|
||||
|
||||
switch ((len_type_pair >> 2) & 3)
|
||||
{
|
||||
case 0x00:
|
||||
if (dr_addr == addr) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
case 0x01:
|
||||
if ((dr_addr & ~1) == addr || ((dr_addr & ~1) + 1) == (addr + 1)) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
case 0x03:
|
||||
dr_addr &= ~3;
|
||||
if (addr >= dr_addr && addr < (dr_addr + 4)) {
|
||||
set_trap = 1;
|
||||
dr[6] |= (1 << i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (set_trap)
|
||||
trap |= 4;
|
||||
}
|
||||
|
||||
uint8_t
|
||||
mem_readb_map(uint32_t addr)
|
||||
{
|
||||
|
Reference in New Issue
Block a user