src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/sm83/sm83.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19static const uint8_t _yankBuffer[] = { 0xFF };
20
21enum GBBus {
22 GB_BUS_CPU,
23 GB_BUS_MAIN,
24 GB_BUS_VRAM,
25 GB_BUS_RAM
26};
27
28static const enum GBBus _oamBlockDMG[] = {
29 GB_BUS_MAIN, // 0x0000
30 GB_BUS_MAIN, // 0x2000
31 GB_BUS_MAIN, // 0x4000
32 GB_BUS_MAIN, // 0x6000
33 GB_BUS_VRAM, // 0x8000
34 GB_BUS_MAIN, // 0xA000
35 GB_BUS_MAIN, // 0xC000
36 GB_BUS_CPU, // 0xE000
37};
38
39static const enum GBBus _oamBlockCGB[] = {
40 GB_BUS_MAIN, // 0x0000
41 GB_BUS_MAIN, // 0x2000
42 GB_BUS_MAIN, // 0x4000
43 GB_BUS_MAIN, // 0x6000
44 GB_BUS_VRAM, // 0x8000
45 GB_BUS_MAIN, // 0xA000
46 GB_BUS_RAM, // 0xC000
47 GB_BUS_CPU // 0xE000
48};
49
50static const uint8_t _blockedRegion[1] = { 0xFF };
51
52static void _pristineCow(struct GB* gba);
53
54static uint8_t GBFastLoad8(struct SM83Core* cpu, uint16_t address) {
55 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
56 cpu->memory.setActiveRegion(cpu, address);
57 return cpu->memory.cpuLoad8(cpu, address);
58 }
59 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
60}
61
62static void GBSetActiveRegion(struct SM83Core* cpu, uint16_t address) {
63 struct GB* gb = (struct GB*) cpu->master;
64 struct GBMemory* memory = &gb->memory;
65 switch (address >> 12) {
66 case GB_REGION_CART_BANK0:
67 case GB_REGION_CART_BANK0 + 1:
68 case GB_REGION_CART_BANK0 + 2:
69 case GB_REGION_CART_BANK0 + 3:
70 cpu->memory.cpuLoad8 = GBFastLoad8;
71 cpu->memory.activeRegion = memory->romBase;
72 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
73 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
74 if (gb->memory.romSize < GB_SIZE_CART_BANK0) {
75 if (address >= gb->memory.romSize) {
76 cpu->memory.activeRegion = _yankBuffer;
77 cpu->memory.activeMask = 0;
78 } else {
79 cpu->memory.activeRegionEnd = gb->memory.romSize;
80 }
81 }
82 break;
83 case GB_REGION_CART_BANK1:
84 case GB_REGION_CART_BANK1 + 1:
85 case GB_REGION_CART_BANK1 + 2:
86 case GB_REGION_CART_BANK1 + 3:
87 cpu->memory.cpuLoad8 = GBFastLoad8;
88 if (gb->memory.mbcType != GB_MBC6) {
89 cpu->memory.activeRegion = memory->romBank;
90 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
91 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
92 } else {
93 cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
94 if (address & 0x2000) {
95 cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
96 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
97 } else {
98 cpu->memory.activeRegion = memory->romBank;
99 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
100 }
101 }
102 if (gb->memory.romSize < GB_SIZE_CART_BANK0 * 2) {
103 if (address >= gb->memory.romSize) {
104 cpu->memory.activeRegion = _yankBuffer;
105 cpu->memory.activeMask = 0;
106 } else {
107 cpu->memory.activeRegionEnd = gb->memory.romSize;
108 }
109 }
110 break;
111 default:
112 cpu->memory.cpuLoad8 = GBLoad8;
113 break;
114 }
115 if (gb->memory.dmaRemaining) {
116 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
117 enum GBBus dmaBus = block[memory->dmaSource >> 13];
118 enum GBBus accessBus = block[address >> 13];
119 if ((dmaBus != GB_BUS_CPU && dmaBus == accessBus) || (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE)) {
120 cpu->memory.activeRegion = _blockedRegion;
121 cpu->memory.activeMask = 0;
122 }
123 }
124}
125
126static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
127static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
128
129void GBMemoryInit(struct GB* gb) {
130 struct SM83Core* cpu = gb->cpu;
131 cpu->memory.cpuLoad8 = GBLoad8;
132 cpu->memory.load8 = GBLoad8;
133 cpu->memory.store8 = GBStore8;
134 cpu->memory.currentSegment = GBCurrentSegment;
135 cpu->memory.setActiveRegion = GBSetActiveRegion;
136
137 gb->memory.wram = 0;
138 gb->memory.wramBank = 0;
139 gb->memory.rom = 0;
140 gb->memory.romBank = 0;
141 gb->memory.romSize = 0;
142 gb->memory.sram = 0;
143 gb->memory.mbcType = GB_MBC_AUTODETECT;
144 gb->memory.mbcRead = NULL;
145 gb->memory.mbcWrite = NULL;
146
147 gb->memory.rtc = NULL;
148 gb->memory.rotation = NULL;
149 gb->memory.rumble = NULL;
150 gb->memory.cam = NULL;
151
152 GBIOInit(gb);
153}
154
155void GBMemoryDeinit(struct GB* gb) {
156 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
157 if (gb->memory.rom) {
158 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
159 }
160}
161
162void GBMemoryReset(struct GB* gb) {
163 if (gb->memory.wram) {
164 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
165 }
166 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
167 if (gb->model >= GB_MODEL_CGB) {
168 uint32_t* base = (uint32_t*) gb->memory.wram;
169 size_t i;
170 uint32_t pattern = 0;
171 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
172 if ((i & 0x1FF) == 0) {
173 pattern = ~pattern;
174 }
175 base[i + 0] = pattern;
176 base[i + 1] = pattern;
177 base[i + 2] = ~pattern;
178 base[i + 3] = ~pattern;
179 }
180 }
181 GBMemorySwitchWramBank(&gb->memory, 1);
182 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
183 gb->memory.currentBank = 1;
184 gb->memory.sramCurrentBank = 0;
185
186 gb->memory.ime = false;
187 gb->memory.ie = 0;
188
189 gb->memory.dmaRemaining = 0;
190 gb->memory.dmaSource = 0;
191 gb->memory.dmaDest = 0;
192 gb->memory.hdmaRemaining = 0;
193 gb->memory.hdmaSource = 0;
194 gb->memory.hdmaDest = 0;
195 gb->memory.isHdma = false;
196
197
198 gb->memory.dmaEvent.context = gb;
199 gb->memory.dmaEvent.name = "GB DMA";
200 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
201 gb->memory.dmaEvent.priority = 0x40;
202 gb->memory.hdmaEvent.context = gb;
203 gb->memory.hdmaEvent.name = "GB HDMA";
204 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
205 gb->memory.hdmaEvent.priority = 0x41;
206
207 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
208
209 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
210 GBMBCInit(gb);
211 switch (gb->memory.mbcType) {
212 case GB_MBC1:
213 gb->memory.mbcState.mbc1.mode = 0;
214 gb->memory.mbcState.mbc1.bankLo = 1;
215 break;
216 case GB_MBC6:
217 GBMBCSwitchHalfBank(gb, 0, 2);
218 GBMBCSwitchHalfBank(gb, 1, 3);
219 gb->memory.mbcState.mbc6.sramAccess = false;
220 GBMBCSwitchSramHalfBank(gb, 0, 0);
221 GBMBCSwitchSramHalfBank(gb, 0, 1);
222 break;
223 case GB_MMM01:
224 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
225 GBMBCSwitchBank(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 1);
226 break;
227 default:
228 break;
229 }
230 gb->memory.sramBank = gb->memory.sram;
231
232 if (!gb->memory.wram) {
233 GBMemoryDeinit(gb);
234 }
235}
236
237void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
238 bank &= 7;
239 if (!bank) {
240 bank = 1;
241 }
242 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
243 memory->wramCurrentBank = bank;
244}
245
246uint8_t GBLoad8(struct SM83Core* cpu, uint16_t address) {
247 struct GB* gb = (struct GB*) cpu->master;
248 struct GBMemory* memory = &gb->memory;
249 if (gb->memory.dmaRemaining) {
250 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
251 enum GBBus dmaBus = block[memory->dmaSource >> 13];
252 enum GBBus accessBus = block[address >> 13];
253 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
254 return 0xFF;
255 }
256 if (address >= GB_BASE_OAM && address < GB_BASE_IO) {
257 return 0xFF;
258 }
259 }
260 switch (address >> 12) {
261 case GB_REGION_CART_BANK0:
262 case GB_REGION_CART_BANK0 + 1:
263 case GB_REGION_CART_BANK0 + 2:
264 case GB_REGION_CART_BANK0 + 3:
265 if (address >= memory->romSize) {
266 return 0xFF;
267 }
268 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
269 case GB_REGION_CART_BANK1 + 2:
270 case GB_REGION_CART_BANK1 + 3:
271 if (memory->mbcType == GB_MBC6) {
272 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
273 }
274 // Fall through
275 case GB_REGION_CART_BANK1:
276 case GB_REGION_CART_BANK1 + 1:
277 if (address >= memory->romSize) {
278 return 0xFF;
279 }
280 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
281 case GB_REGION_VRAM:
282 case GB_REGION_VRAM + 1:
283 if (gb->video.mode != 3) {
284 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
285 }
286 return 0xFF;
287 case GB_REGION_EXTERNAL_RAM:
288 case GB_REGION_EXTERNAL_RAM + 1:
289 if (memory->rtcAccess) {
290 return memory->rtcRegs[memory->activeRtcReg];
291 } else if (memory->mbcRead) {
292 return memory->mbcRead(memory, address);
293 } else if (memory->sramAccess && memory->sram) {
294 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
295 } else if (memory->mbcType == GB_HuC3) {
296 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
297 }
298 return 0xFF;
299 case GB_REGION_WORKING_RAM_BANK0:
300 case GB_REGION_WORKING_RAM_BANK0 + 2:
301 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
302 case GB_REGION_WORKING_RAM_BANK1:
303 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
304 default:
305 if (address < GB_BASE_OAM) {
306 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
307 }
308 if (address < GB_BASE_UNUSABLE) {
309 if (gb->video.mode < 2) {
310 return gb->video.oam.raw[address & 0xFF];
311 }
312 return 0xFF;
313 }
314 if (address < GB_BASE_IO) {
315 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
316 return 0xFF;
317 }
318 if (address < GB_BASE_HRAM) {
319 return GBIORead(gb, address & (GB_SIZE_IO - 1));
320 }
321 if (address < GB_BASE_IE) {
322 return memory->hram[address & GB_SIZE_HRAM];
323 }
324 return GBIORead(gb, REG_IE);
325 }
326}
327
328void GBStore8(struct SM83Core* cpu, uint16_t address, int8_t value) {
329 struct GB* gb = (struct GB*) cpu->master;
330 struct GBMemory* memory = &gb->memory;
331 if (gb->memory.dmaRemaining) {
332 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
333 enum GBBus dmaBus = block[memory->dmaSource >> 13];
334 enum GBBus accessBus = block[address >> 13];
335 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
336 return;
337 }
338 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
339 return;
340 }
341 }
342 switch (address >> 12) {
343 case GB_REGION_CART_BANK0:
344 case GB_REGION_CART_BANK0 + 1:
345 case GB_REGION_CART_BANK0 + 2:
346 case GB_REGION_CART_BANK0 + 3:
347 case GB_REGION_CART_BANK1:
348 case GB_REGION_CART_BANK1 + 1:
349 case GB_REGION_CART_BANK1 + 2:
350 case GB_REGION_CART_BANK1 + 3:
351 memory->mbcWrite(gb, address, value);
352 cpu->memory.setActiveRegion(cpu, cpu->pc);
353 return;
354 case GB_REGION_VRAM:
355 case GB_REGION_VRAM + 1:
356 if (gb->video.mode != 3) {
357 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
358 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
359 }
360 return;
361 case GB_REGION_EXTERNAL_RAM:
362 case GB_REGION_EXTERNAL_RAM + 1:
363 if (memory->rtcAccess) {
364 memory->rtcRegs[memory->activeRtcReg] = value;
365 } else if (memory->sramAccess && memory->sram && memory->directSramAccess) {
366 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
367 } else {
368 memory->mbcWrite(gb, address, value);
369 }
370 gb->sramDirty |= GB_SRAM_DIRT_NEW;
371 return;
372 case GB_REGION_WORKING_RAM_BANK0:
373 case GB_REGION_WORKING_RAM_BANK0 + 2:
374 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
375 return;
376 case GB_REGION_WORKING_RAM_BANK1:
377 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
378 return;
379 default:
380 if (address < GB_BASE_OAM) {
381 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
382 } else if (address < GB_BASE_UNUSABLE) {
383 if (gb->video.mode < 2) {
384 gb->video.oam.raw[address & 0xFF] = value;
385 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
386 }
387 } else if (address < GB_BASE_IO) {
388 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
389 } else if (address < GB_BASE_HRAM) {
390 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
391 } else if (address < GB_BASE_IE) {
392 memory->hram[address & GB_SIZE_HRAM] = value;
393 } else {
394 GBIOWrite(gb, REG_IE, value);
395 }
396 }
397}
398
399int GBCurrentSegment(struct SM83Core* cpu, uint16_t address) {
400 struct GB* gb = (struct GB*) cpu->master;
401 struct GBMemory* memory = &gb->memory;
402 switch (address >> 12) {
403 case GB_REGION_CART_BANK0:
404 case GB_REGION_CART_BANK0 + 1:
405 case GB_REGION_CART_BANK0 + 2:
406 case GB_REGION_CART_BANK0 + 3:
407 return 0;
408 case GB_REGION_CART_BANK1:
409 case GB_REGION_CART_BANK1 + 1:
410 case GB_REGION_CART_BANK1 + 2:
411 case GB_REGION_CART_BANK1 + 3:
412 return memory->currentBank;
413 case GB_REGION_VRAM:
414 case GB_REGION_VRAM + 1:
415 return gb->video.vramCurrentBank;
416 case GB_REGION_EXTERNAL_RAM:
417 case GB_REGION_EXTERNAL_RAM + 1:
418 return memory->sramCurrentBank;
419 case GB_REGION_WORKING_RAM_BANK0:
420 case GB_REGION_WORKING_RAM_BANK0 + 2:
421 return 0;
422 case GB_REGION_WORKING_RAM_BANK1:
423 return memory->wramCurrentBank;
424 default:
425 return 0;
426 }
427}
428
429uint8_t GBView8(struct SM83Core* cpu, uint16_t address, int segment) {
430 struct GB* gb = (struct GB*) cpu->master;
431 struct GBMemory* memory = &gb->memory;
432 switch (address >> 12) {
433 case GB_REGION_CART_BANK0:
434 case GB_REGION_CART_BANK0 + 1:
435 case GB_REGION_CART_BANK0 + 2:
436 case GB_REGION_CART_BANK0 + 3:
437 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
438 case GB_REGION_CART_BANK1:
439 case GB_REGION_CART_BANK1 + 1:
440 case GB_REGION_CART_BANK1 + 2:
441 case GB_REGION_CART_BANK1 + 3:
442 if (segment < 0) {
443 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
444 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
445 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
446 } else {
447 return 0xFF;
448 }
449 case GB_REGION_VRAM:
450 case GB_REGION_VRAM + 1:
451 if (segment < 0) {
452 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
453 } else if (segment < 2) {
454 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
455 } else {
456 return 0xFF;
457 }
458 case GB_REGION_EXTERNAL_RAM:
459 case GB_REGION_EXTERNAL_RAM + 1:
460 if (memory->rtcAccess) {
461 return memory->rtcRegs[memory->activeRtcReg];
462 } else if (memory->sramAccess) {
463 if (segment < 0 && memory->sram) {
464 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
465 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
466 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
467 } else {
468 return 0xFF;
469 }
470 } else if (memory->mbcRead) {
471 return memory->mbcRead(memory, address);
472 } else if (memory->mbcType == GB_HuC3) {
473 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
474 }
475 return 0xFF;
476 case GB_REGION_WORKING_RAM_BANK0:
477 case GB_REGION_WORKING_RAM_BANK0 + 2:
478 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
479 case GB_REGION_WORKING_RAM_BANK1:
480 if (segment < 0) {
481 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
482 } else if (segment < 8) {
483 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
484 } else {
485 return 0xFF;
486 }
487 default:
488 if (address < GB_BASE_OAM) {
489 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
490 }
491 if (address < GB_BASE_UNUSABLE) {
492 if (gb->video.mode < 2) {
493 return gb->video.oam.raw[address & 0xFF];
494 }
495 return 0xFF;
496 }
497 if (address < GB_BASE_IO) {
498 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
499 if (gb->video.mode < 2) {
500 switch (gb->model) {
501 case GB_MODEL_AGB:
502 return (address & 0xF0) | ((address >> 4) & 0xF);
503 case GB_MODEL_CGB:
504 // TODO: R/W behavior
505 return 0x00;
506 default:
507 return 0x00;
508 }
509 }
510 return 0xFF;
511 }
512 if (address < GB_BASE_HRAM) {
513 return GBIORead(gb, address & (GB_SIZE_IO - 1));
514 }
515 if (address < GB_BASE_IE) {
516 return memory->hram[address & GB_SIZE_HRAM];
517 }
518 return GBIORead(gb, REG_IE);
519 }
520}
521
522void GBMemoryDMA(struct GB* gb, uint16_t base) {
523 if (base >= 0xE000) {
524 base &= 0xDFFF;
525 }
526 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
527 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
528 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
529 gb->cpu->nextEvent = gb->cpu->cycles + 8;
530 }
531 gb->memory.dmaSource = base;
532 gb->memory.dmaDest = 0;
533 gb->memory.dmaRemaining = 0xA0;
534}
535
536uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
537 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
538 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
539 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
540 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
541 gb->memory.hdmaSource &= 0xFFF0;
542 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
543 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
544 return value | 0x80;
545 }
546 gb->memory.hdmaDest &= 0x1FF0;
547 gb->memory.hdmaDest |= 0x8000;
548 bool wasHdma = gb->memory.isHdma;
549 gb->memory.isHdma = value & 0x80;
550 if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
551 if (gb->memory.isHdma) {
552 gb->memory.hdmaRemaining = 0x10;
553 } else {
554 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
555 }
556 gb->cpuBlocked = true;
557 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
558 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
559 return 0x80 | ((value + 1) & 0x7F);
560 }
561 return value & 0x7F;
562}
563
564void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
565 struct GB* gb = context;
566 int dmaRemaining = gb->memory.dmaRemaining;
567 gb->memory.dmaRemaining = 0;
568 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
569 // TODO: Can DMA write OAM during modes 2-3?
570 gb->video.oam.raw[gb->memory.dmaDest] = b;
571 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
572 ++gb->memory.dmaSource;
573 ++gb->memory.dmaDest;
574 gb->memory.dmaRemaining = dmaRemaining - 1;
575 if (gb->memory.dmaRemaining) {
576 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
577 }
578}
579
580void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
581 struct GB* gb = context;
582 gb->cpuBlocked = true;
583 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
584 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
585 ++gb->memory.hdmaSource;
586 ++gb->memory.hdmaDest;
587 --gb->memory.hdmaRemaining;
588 if (gb->memory.hdmaRemaining) {
589 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
590 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
591 } else {
592 gb->cpuBlocked = false;
593 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
594 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
595 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
596 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
597 if (gb->memory.isHdma) {
598 --gb->memory.io[REG_HDMA5];
599 if (gb->memory.io[REG_HDMA5] == 0xFF) {
600 gb->memory.isHdma = false;
601 }
602 } else {
603 gb->memory.io[REG_HDMA5] = 0xFF;
604 }
605 }
606}
607
608void GBPatch8(struct SM83Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
609 struct GB* gb = (struct GB*) cpu->master;
610 struct GBMemory* memory = &gb->memory;
611 int8_t oldValue = -1;
612
613 switch (address >> 12) {
614 case GB_REGION_CART_BANK0:
615 case GB_REGION_CART_BANK0 + 1:
616 case GB_REGION_CART_BANK0 + 2:
617 case GB_REGION_CART_BANK0 + 3:
618 _pristineCow(gb);
619 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
620 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
621 break;
622 case GB_REGION_CART_BANK1:
623 case GB_REGION_CART_BANK1 + 1:
624 case GB_REGION_CART_BANK1 + 2:
625 case GB_REGION_CART_BANK1 + 3:
626 _pristineCow(gb);
627 if (segment < 0) {
628 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
629 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
630 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
631 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
632 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
633 } else {
634 return;
635 }
636 break;
637 case GB_REGION_VRAM:
638 case GB_REGION_VRAM + 1:
639 if (segment < 0) {
640 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
641 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
642 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
643 } else if (segment < 2) {
644 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
645 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
646 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
647 } else {
648 return;
649 }
650 break;
651 case GB_REGION_EXTERNAL_RAM:
652 case GB_REGION_EXTERNAL_RAM + 1:
653 if (memory->rtcAccess) {
654 memory->rtcRegs[memory->activeRtcReg] = value;
655 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
656 // TODO: Remove sramAccess check?
657 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
658 } else {
659 memory->mbcWrite(gb, address, value);
660 }
661 gb->sramDirty |= GB_SRAM_DIRT_NEW;
662 return;
663 case GB_REGION_WORKING_RAM_BANK0:
664 case GB_REGION_WORKING_RAM_BANK0 + 2:
665 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
666 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
667 break;
668 case GB_REGION_WORKING_RAM_BANK1:
669 if (segment < 0) {
670 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
671 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
672 } else if (segment < 8) {
673 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
674 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
675 } else {
676 return;
677 }
678 break;
679 default:
680 if (address < GB_BASE_OAM) {
681 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
682 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
683 } else if (address < GB_BASE_UNUSABLE) {
684 oldValue = gb->video.oam.raw[address & 0xFF];
685 gb->video.oam.raw[address & 0xFF] = value;
686 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
687 } else if (address < GB_BASE_HRAM) {
688 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
689 return;
690 } else if (address < GB_BASE_IE) {
691 oldValue = memory->hram[address & GB_SIZE_HRAM];
692 memory->hram[address & GB_SIZE_HRAM] = value;
693 } else {
694 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
695 return;
696 }
697 }
698 if (old) {
699 *old = oldValue;
700 }
701}
702
703void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
704 const struct GBMemory* memory = &gb->memory;
705 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
706 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
707 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
708 state->memory.wramCurrentBank = memory->wramCurrentBank;
709 state->memory.sramCurrentBank = memory->sramCurrentBank;
710
711 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
712 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
713
714 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
715 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
716
717 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
718 state->memory.dmaRemaining = memory->dmaRemaining;
719 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
720
721 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
722 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
723
724 GBSerializedMemoryFlags flags = 0;
725 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
726 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
727 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
728 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
729 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
730 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
731 STORE_16LE(flags, 0, &state->memory.flags);
732
733 switch (memory->mbcType) {
734 case GB_MBC1:
735 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
736 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
737 state->memory.mbc1.bankLo = memory->mbcState.mbc1.bankLo;
738 state->memory.mbc1.bankHi = memory->mbcState.mbc1.bankHi;
739 break;
740 case GB_MBC3_RTC:
741 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
742 break;
743 case GB_MBC7:
744 state->memory.mbc7.state = memory->mbcState.mbc7.state;
745 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
746 state->memory.mbc7.address = memory->mbcState.mbc7.address;
747 state->memory.mbc7.access = memory->mbcState.mbc7.access;
748 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
749 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
750 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
751 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
752 break;
753 case GB_MMM01:
754 state->memory.mmm01.locked = memory->mbcState.mmm01.locked;
755 state->memory.mmm01.bank0 = memory->mbcState.mmm01.currentBank0;
756 break;
757 default:
758 break;
759 }
760}
761
762void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
763 struct GBMemory* memory = &gb->memory;
764 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
765 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
766 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
767 memory->wramCurrentBank = state->memory.wramCurrentBank;
768 memory->sramCurrentBank = state->memory.sramCurrentBank;
769
770 GBMBCSwitchBank(gb, memory->currentBank);
771 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
772 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
773
774 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
775 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
776
777 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
778 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
779
780 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
781 memory->dmaRemaining = state->memory.dmaRemaining;
782 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
783
784 uint32_t when;
785 LOAD_32LE(when, 0, &state->memory.dmaNext);
786 if (memory->dmaRemaining) {
787 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
788 } else {
789 memory->dmaEvent.when = when + mTimingCurrentTime(&gb->timing);
790 }
791 LOAD_32LE(when, 0, &state->memory.hdmaNext);
792 if (memory->hdmaRemaining) {
793 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
794 } else {
795 memory->hdmaEvent.when = when + mTimingCurrentTime(&gb->timing);
796 }
797
798 GBSerializedMemoryFlags flags;
799 LOAD_16LE(flags, 0, &state->memory.flags);
800 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
801 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
802 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
803 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
804 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
805 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
806
807 switch (memory->mbcType) {
808 case GB_MBC1:
809 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
810 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
811 memory->mbcState.mbc1.bankLo = state->memory.mbc1.bankLo;
812 memory->mbcState.mbc1.bankHi = state->memory.mbc1.bankHi;
813 if (!(memory->mbcState.mbc1.bankLo || memory->mbcState.mbc1.bankHi)) {
814 // Backwards compat
815 memory->mbcState.mbc1.bankLo = memory->currentBank & ((1 << memory->mbcState.mbc1.multicartStride) - 1);
816 memory->mbcState.mbc1.bankHi = memory->currentBank >> memory->mbcState.mbc1.multicartStride;
817 }
818 if (memory->mbcState.mbc1.mode) {
819 GBMBCSwitchBank0(gb, memory->mbcState.mbc1.bankHi);
820 }
821 break;
822 case GB_MBC3_RTC:
823 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
824 break;
825 case GB_MBC7:
826 memory->mbcState.mbc7.state = state->memory.mbc7.state;
827 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
828 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
829 memory->mbcState.mbc7.access = state->memory.mbc7.access;
830 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
831 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
832 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
833 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
834 break;
835 case GB_MMM01:
836 memory->mbcState.mmm01.locked = state->memory.mmm01.locked;
837 memory->mbcState.mmm01.currentBank0 = state->memory.mmm01.bank0;
838 if (memory->mbcState.mmm01.locked) {
839 GBMBCSwitchBank0(gb, memory->mbcState.mmm01.currentBank0);
840 } else {
841 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
842 }
843 break;
844 default:
845 break;
846 }
847}
848
849void _pristineCow(struct GB* gb) {
850 if (!gb->isPristine) {
851 return;
852 }
853 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
854 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
855 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
856 if (gb->memory.rom == gb->memory.romBase) {
857 gb->memory.romBase = newRom;
858 }
859 gb->memory.rom = newRom;
860 GBMBCSwitchBank(gb, gb->memory.currentBank);
861 gb->isPristine = false;
862}