Large changes to configuration files again (the old ones might break, be careful);

Applied the mainline PCem slight CPU emulation speedup commit;
Added emulation of removable SCSI hard disks;
CD-ROM image handler now uses C FILE's (with the 64-bit size calls) instead of C++ iostreams, ISO images bigger than 2 GB should work properly again;
Split RLL/ESDI and XT IDE disks to their own bus types;
Turned status bar pane meaning and hard disks and CD-ROM BUS numbers to #define's;
Other miscellaneous cleanups.
This commit is contained in:
OBattler
2017-05-27 03:53:32 +02:00
parent 94680da416
commit a36720f174
56 changed files with 4736 additions and 2682 deletions

View File

@@ -866,6 +866,10 @@ static void CHECK_SEG_READ(x86seg *seg)
return;
if (seg->checked)
return;
if ((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS))
return;
if ((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS))
return;
if (IS_32_ADDR(&seg->base))
{
@@ -900,6 +904,10 @@ static void CHECK_SEG_WRITE(x86seg *seg)
return;
if (seg->checked)
return;
if ((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS))
return;
if ((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS))
return;
if (IS_32_ADDR(&seg->base))
{
@@ -926,6 +934,11 @@ static void CHECK_SEG_WRITE(x86seg *seg)
}
static void CHECK_SEG_LIMITS(x86seg *seg, int end_offset)
{
if ((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS))
return;
if ((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS))
return;
if (IS_32_ADDR(&seg->base))
{
addbyte(0xb8 | REG_ESI); /*MOV ESI, &addr*/
@@ -962,7 +975,13 @@ static void CHECK_SEG_LIMITS(x86seg *seg, int end_offset)
static void MEM_LOAD_ADDR_EA_B(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1030,7 +1049,13 @@ static void MEM_LOAD_ADDR_EA_B(x86seg *seg)
}
static void MEM_LOAD_ADDR_EA_W(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1114,7 +1139,13 @@ static void MEM_LOAD_ADDR_EA_W_OFFSET(x86seg *seg, int offset)
}
static void MEM_LOAD_ADDR_EA_L(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1190,7 +1221,13 @@ static void MEM_LOAD_ADDR_EA_L(x86seg *seg)
}
static void MEM_LOAD_ADDR_EA_Q(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1309,7 +1346,13 @@ static void MEM_STORE_ADDR_EA_B(x86seg *seg, int host_reg)
addbyte(8);
host_reg = 8;
}
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1388,7 +1431,13 @@ static void MEM_STORE_ADDR_EA_B(x86seg *seg, int host_reg)
}
static void MEM_STORE_ADDR_EA_W(x86seg *seg, int host_reg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1478,7 +1527,13 @@ static void MEM_STORE_ADDR_EA_W(x86seg *seg, int host_reg)
}
static void MEM_STORE_ADDR_EA_L(x86seg *seg, int host_reg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -1566,7 +1621,13 @@ static void MEM_STORE_ADDR_EA_L(x86seg *seg, int host_reg)
}
static void MEM_STORE_ADDR_EA_Q(x86seg *seg, int host_reg, int host_reg2)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -5252,7 +5313,13 @@ static void MEM_CHECK_WRITE(x86seg *seg)
CHECK_SEG_WRITE(seg);
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ESI, ESI*/
addbyte(0xf6);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOV ESI, seg->base*/
addbyte(0x34);
@@ -5299,12 +5366,16 @@ static void MEM_CHECK_WRITE(x86seg *seg)
addbyte(0xc1); /*SHR EDI, 12*/
addbyte(0xef);
addbyte(12);
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
addbyte(0x74); /*JE slowpath*/
jump3 = &codeblock[block_current].data[block_pos];
addbyte(0);
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
addbyte(0x74); /*JE slowpath*/
jump3 = &codeblock[block_current].data[block_pos];
addbyte(0);
}
if (IS_32_ADDR(writelookup2))
{
addbyte(0x83); /*CMP writelookup2[RDI*8],-1*/
@@ -5328,7 +5399,9 @@ static void MEM_CHECK_WRITE(x86seg *seg)
addbyte(0);
// addbyte(0xc3); /*RET*/
*jump3 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump3 - 1;
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
*jump3 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump3 - 1;
/*slowpath:*/
addbyte(0x67); /*LEA EDI, [EAX+ESI]*/
addbyte(0x8d);
@@ -5373,7 +5446,13 @@ static void MEM_CHECK_WRITE_W(x86seg *seg)
CHECK_SEG_WRITE(seg);
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ESI, ESI*/
addbyte(0xf6);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOV ESI, seg->base*/
addbyte(0x34);
@@ -5416,15 +5495,23 @@ static void MEM_CHECK_WRITE_W(x86seg *seg)
addbyte(0x79); /*JNS +*/
jump1 = &codeblock[block_current].data[block_pos];
addbyte(0);
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
}
addbyte(0x8d); /*LEA ESI, 1[EDI]*/
addbyte(0x77);
addbyte(0x01);
addbyte(0x74); /*JE slowpath*/
jump4 = &codeblock[block_current].data[block_pos];
addbyte(0);
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x74); /*JE slowpath*/
jump4 = &codeblock[block_current].data[block_pos];
addbyte(0);
}
addbyte(0x89); /*MOV EBX, EDI*/
addbyte(0xfb);
addbyte(0xc1); /*SHR EDI, 12*/
@@ -5475,7 +5562,9 @@ static void MEM_CHECK_WRITE_W(x86seg *seg)
/*slowpath:*/
*jump2 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump2 - 1;
*jump4 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump4 - 1;
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
*jump4 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump4 - 1;
jump_pos = block_pos;
load_param_1_reg_32(REG_EBX);
load_param_2_32(&codeblock[block_current], 1);
@@ -5510,7 +5599,13 @@ static void MEM_CHECK_WRITE_L(x86seg *seg)
CHECK_SEG_WRITE(seg);
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ESI, ESI*/
addbyte(0xf6);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOV ESI, seg->base*/
addbyte(0x34);
@@ -5553,15 +5648,23 @@ static void MEM_CHECK_WRITE_L(x86seg *seg)
addbyte(0x79); /*JNS +*/
jump1 = &codeblock[block_current].data[block_pos];
addbyte(0);
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x83); /*CMP ESI, -1*/
addbyte(0xfe);
addbyte(-1);
}
addbyte(0x8d); /*LEA ESI, 3[EDI]*/
addbyte(0x77);
addbyte(0x03);
addbyte(0x74); /*JE slowpath*/
jump4 = &codeblock[block_current].data[block_pos];
addbyte(0);
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x74); /*JE slowpath*/
jump4 = &codeblock[block_current].data[block_pos];
addbyte(0);
}
addbyte(0x89); /*MOV EBX, EDI*/
addbyte(0xfb);
addbyte(0xc1); /*SHR EDI, 12*/
@@ -5612,7 +5715,9 @@ static void MEM_CHECK_WRITE_L(x86seg *seg)
/*slowpath:*/
*jump2 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump2 - 1;
*jump4 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump4 - 1;
if (!((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) &&
!((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
*jump4 = (uintptr_t)&codeblock[block_current].data[block_pos] - (uintptr_t)jump4 - 1;
jump_pos = block_pos;
load_param_1_reg_32(REG_EBX);
load_param_2_32(&codeblock[block_current], 1);
@@ -5642,7 +5747,13 @@ static void MEM_CHECK_WRITE_L(x86seg *seg)
static int MEM_LOAD_ADDR_EA_B_NO_ABRT(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -5709,7 +5820,13 @@ static int MEM_LOAD_ADDR_EA_B_NO_ABRT(x86seg *seg)
}
static int MEM_LOAD_ADDR_EA_W_NO_ABRT(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -5785,7 +5902,13 @@ static int MEM_LOAD_ADDR_EA_W_NO_ABRT(x86seg *seg)
}
static int MEM_LOAD_ADDR_EA_L_NO_ABRT(x86seg *seg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR ECX, ECX*/
addbyte(0xc9);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL ECX, seg->base*/
addbyte(0x0c);
@@ -5883,7 +6006,13 @@ static void MEM_STORE_ADDR_EA_B_NO_ABRT(x86seg *seg, int host_reg)
addbyte(8);
host_reg = 8;
}
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR EBX, EBX*/
addbyte(0xdb);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL EBX, seg->base*/
addbyte(0x1c);
@@ -5955,7 +6084,13 @@ static void MEM_STORE_ADDR_EA_B_NO_ABRT(x86seg *seg, int host_reg)
}
static void MEM_STORE_ADDR_EA_W_NO_ABRT(x86seg *seg, int host_reg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR EBX, EBX*/
addbyte(0xdb);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL EBX, seg->base*/
addbyte(0x1c);
@@ -6038,7 +6173,13 @@ static void MEM_STORE_ADDR_EA_W_NO_ABRT(x86seg *seg, int host_reg)
}
static void MEM_STORE_ADDR_EA_L_NO_ABRT(x86seg *seg, int host_reg)
{
if (IS_32_ADDR(&seg->base))
if (((seg == &_ds) && (cpu_cur_status & CPU_STATUS_FLATDS)) ||
((seg == &_ss) && (cpu_cur_status & CPU_STATUS_FLATSS)))
{
addbyte(0x31); /*XOR EBX, EBX*/
addbyte(0xdb);
}
else if (IS_32_ADDR(&seg->base))
{
addbyte(0x8b); /*MOVL EBX, seg->base*/
addbyte(0x1c);