src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/sm83/sm83.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19static const uint8_t _yankBuffer[] = { 0xFF };
20
21enum GBBus {
22 GB_BUS_CPU,
23 GB_BUS_MAIN,
24 GB_BUS_VRAM,
25 GB_BUS_RAM
26};
27
28static const enum GBBus _oamBlockDMG[] = {
29 GB_BUS_MAIN, // 0x0000
30 GB_BUS_MAIN, // 0x2000
31 GB_BUS_MAIN, // 0x4000
32 GB_BUS_MAIN, // 0x6000
33 GB_BUS_VRAM, // 0x8000
34 GB_BUS_MAIN, // 0xA000
35 GB_BUS_MAIN, // 0xC000
36 GB_BUS_CPU, // 0xE000
37};
38
39static const enum GBBus _oamBlockCGB[] = {
40 GB_BUS_MAIN, // 0x0000
41 GB_BUS_MAIN, // 0x2000
42 GB_BUS_MAIN, // 0x4000
43 GB_BUS_MAIN, // 0x6000
44 GB_BUS_VRAM, // 0x8000
45 GB_BUS_MAIN, // 0xA000
46 GB_BUS_RAM, // 0xC000
47 GB_BUS_CPU // 0xE000
48};
49
50static const uint8_t _blockedRegion[1] = { 0xFF };
51
52static void _pristineCow(struct GB* gba);
53
54static uint8_t GBFastLoad8(struct SM83Core* cpu, uint16_t address) {
55 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
56 cpu->memory.setActiveRegion(cpu, address);
57 return cpu->memory.cpuLoad8(cpu, address);
58 }
59 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
60}
61
62static void GBSetActiveRegion(struct SM83Core* cpu, uint16_t address) {
63 struct GB* gb = (struct GB*) cpu->master;
64 struct GBMemory* memory = &gb->memory;
65 switch (address >> 12) {
66 case GB_REGION_CART_BANK0:
67 case GB_REGION_CART_BANK0 + 1:
68 case GB_REGION_CART_BANK0 + 2:
69 case GB_REGION_CART_BANK0 + 3:
70 cpu->memory.cpuLoad8 = GBFastLoad8;
71 cpu->memory.activeRegion = memory->romBase;
72 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
73 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
74 if (gb->memory.romSize < GB_SIZE_CART_BANK0) {
75 if (address >= gb->memory.romSize) {
76 cpu->memory.activeRegion = _yankBuffer;
77 cpu->memory.activeMask = 0;
78 } else {
79 cpu->memory.activeRegionEnd = gb->memory.romSize;
80 }
81 }
82 break;
83 case GB_REGION_CART_BANK1:
84 case GB_REGION_CART_BANK1 + 1:
85 case GB_REGION_CART_BANK1 + 2:
86 case GB_REGION_CART_BANK1 + 3:
87 cpu->memory.cpuLoad8 = GBFastLoad8;
88 if (gb->memory.mbcType != GB_MBC6) {
89 cpu->memory.activeRegion = memory->romBank;
90 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
91 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
92 } else {
93 cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
94 if (address & 0x2000) {
95 cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
96 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
97 } else {
98 cpu->memory.activeRegion = memory->romBank;
99 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
100 }
101 }
102 if (gb->memory.romSize < GB_SIZE_CART_BANK0 * 2) {
103 if (address >= gb->memory.romSize) {
104 cpu->memory.activeRegion = _yankBuffer;
105 cpu->memory.activeMask = 0;
106 } else {
107 cpu->memory.activeRegionEnd = gb->memory.romSize;
108 }
109 }
110 break;
111 default:
112 cpu->memory.cpuLoad8 = GBLoad8;
113 break;
114 }
115 if (gb->memory.dmaRemaining) {
116 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
117 enum GBBus dmaBus = block[memory->dmaSource >> 13];
118 enum GBBus accessBus = block[address >> 13];
119 if ((dmaBus != GB_BUS_CPU && dmaBus == accessBus) || (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE)) {
120 cpu->memory.activeRegion = _blockedRegion;
121 cpu->memory.activeMask = 0;
122 }
123 }
124}
125
126static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
127static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
128
129void GBMemoryInit(struct GB* gb) {
130 struct SM83Core* cpu = gb->cpu;
131 cpu->memory.cpuLoad8 = GBLoad8;
132 cpu->memory.load8 = GBLoad8;
133 cpu->memory.store8 = GBStore8;
134 cpu->memory.currentSegment = GBCurrentSegment;
135 cpu->memory.setActiveRegion = GBSetActiveRegion;
136
137 gb->memory.wram = 0;
138 gb->memory.wramBank = 0;
139 gb->memory.rom = 0;
140 gb->memory.romBank = 0;
141 gb->memory.romSize = 0;
142 gb->memory.sram = 0;
143 gb->memory.mbcType = GB_MBC_AUTODETECT;
144 gb->memory.mbcRead = NULL;
145 gb->memory.mbcWrite = NULL;
146
147 gb->memory.rtc = NULL;
148 gb->memory.rotation = NULL;
149 gb->memory.rumble = NULL;
150 gb->memory.cam = NULL;
151
152 GBIOInit(gb);
153}
154
155void GBMemoryDeinit(struct GB* gb) {
156 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
157 if (gb->memory.rom) {
158 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
159 }
160}
161
162void GBMemoryReset(struct GB* gb) {
163 if (gb->memory.wram) {
164 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
165 }
166 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
167 if (gb->model >= GB_MODEL_CGB) {
168 uint32_t* base = (uint32_t*) gb->memory.wram;
169 size_t i;
170 uint32_t pattern = 0;
171 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
172 if ((i & 0x1FF) == 0) {
173 pattern = ~pattern;
174 }
175 base[i + 0] = pattern;
176 base[i + 1] = pattern;
177 base[i + 2] = ~pattern;
178 base[i + 3] = ~pattern;
179 }
180 }
181 GBMemorySwitchWramBank(&gb->memory, 1);
182 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
183 gb->memory.currentBank = 1;
184 gb->memory.sramCurrentBank = 0;
185
186 gb->memory.ime = false;
187 gb->memory.ie = 0;
188
189 gb->memory.dmaRemaining = 0;
190 gb->memory.dmaSource = 0;
191 gb->memory.dmaDest = 0;
192 gb->memory.hdmaRemaining = 0;
193 gb->memory.hdmaSource = 0;
194 gb->memory.hdmaDest = 0;
195 gb->memory.isHdma = false;
196
197
198 gb->memory.dmaEvent.context = gb;
199 gb->memory.dmaEvent.name = "GB DMA";
200 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
201 gb->memory.dmaEvent.priority = 0x40;
202 gb->memory.hdmaEvent.context = gb;
203 gb->memory.hdmaEvent.name = "GB HDMA";
204 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
205 gb->memory.hdmaEvent.priority = 0x41;
206
207 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
208
209 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
210 GBMBCInit(gb);
211 switch (gb->memory.mbcType) {
212 case GB_MBC1:
213 gb->memory.mbcState.mbc1.mode = 0;
214 break;
215 case GB_MBC6:
216 GBMBCSwitchHalfBank(gb, 0, 2);
217 GBMBCSwitchHalfBank(gb, 1, 3);
218 gb->memory.mbcState.mbc6.sramAccess = false;
219 GBMBCSwitchSramHalfBank(gb, 0, 0);
220 GBMBCSwitchSramHalfBank(gb, 0, 1);
221 break;
222 case GB_MMM01:
223 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
224 GBMBCSwitchBank(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 1);
225 break;
226 default:
227 break;
228 }
229 gb->memory.sramBank = gb->memory.sram;
230
231 if (!gb->memory.wram) {
232 GBMemoryDeinit(gb);
233 }
234}
235
236void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
237 bank &= 7;
238 if (!bank) {
239 bank = 1;
240 }
241 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
242 memory->wramCurrentBank = bank;
243}
244
245uint8_t GBLoad8(struct SM83Core* cpu, uint16_t address) {
246 struct GB* gb = (struct GB*) cpu->master;
247 struct GBMemory* memory = &gb->memory;
248 if (gb->memory.dmaRemaining) {
249 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
250 enum GBBus dmaBus = block[memory->dmaSource >> 13];
251 enum GBBus accessBus = block[address >> 13];
252 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
253 return 0xFF;
254 }
255 if (address >= GB_BASE_OAM && address < GB_BASE_IO) {
256 return 0xFF;
257 }
258 }
259 switch (address >> 12) {
260 case GB_REGION_CART_BANK0:
261 case GB_REGION_CART_BANK0 + 1:
262 case GB_REGION_CART_BANK0 + 2:
263 case GB_REGION_CART_BANK0 + 3:
264 if (address >= memory->romSize) {
265 return 0xFF;
266 }
267 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
268 case GB_REGION_CART_BANK1 + 2:
269 case GB_REGION_CART_BANK1 + 3:
270 if (memory->mbcType == GB_MBC6) {
271 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
272 }
273 // Fall through
274 case GB_REGION_CART_BANK1:
275 case GB_REGION_CART_BANK1 + 1:
276 if (address >= memory->romSize) {
277 return 0xFF;
278 }
279 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
280 case GB_REGION_VRAM:
281 case GB_REGION_VRAM + 1:
282 if (gb->video.mode != 3) {
283 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
284 }
285 return 0xFF;
286 case GB_REGION_EXTERNAL_RAM:
287 case GB_REGION_EXTERNAL_RAM + 1:
288 if (memory->rtcAccess) {
289 return memory->rtcRegs[memory->activeRtcReg];
290 } else if (memory->mbcRead) {
291 return memory->mbcRead(memory, address);
292 } else if (memory->sramAccess && memory->sram) {
293 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
294 } else if (memory->mbcType == GB_HuC3) {
295 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
296 }
297 return 0xFF;
298 case GB_REGION_WORKING_RAM_BANK0:
299 case GB_REGION_WORKING_RAM_BANK0 + 2:
300 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
301 case GB_REGION_WORKING_RAM_BANK1:
302 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
303 default:
304 if (address < GB_BASE_OAM) {
305 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
306 }
307 if (address < GB_BASE_UNUSABLE) {
308 if (gb->video.mode < 2) {
309 return gb->video.oam.raw[address & 0xFF];
310 }
311 return 0xFF;
312 }
313 if (address < GB_BASE_IO) {
314 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
315 return 0xFF;
316 }
317 if (address < GB_BASE_HRAM) {
318 return GBIORead(gb, address & (GB_SIZE_IO - 1));
319 }
320 if (address < GB_BASE_IE) {
321 return memory->hram[address & GB_SIZE_HRAM];
322 }
323 return GBIORead(gb, REG_IE);
324 }
325}
326
327void GBStore8(struct SM83Core* cpu, uint16_t address, int8_t value) {
328 struct GB* gb = (struct GB*) cpu->master;
329 struct GBMemory* memory = &gb->memory;
330 if (gb->memory.dmaRemaining) {
331 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
332 enum GBBus dmaBus = block[memory->dmaSource >> 13];
333 enum GBBus accessBus = block[address >> 13];
334 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
335 return;
336 }
337 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
338 return;
339 }
340 }
341 switch (address >> 12) {
342 case GB_REGION_CART_BANK0:
343 case GB_REGION_CART_BANK0 + 1:
344 case GB_REGION_CART_BANK0 + 2:
345 case GB_REGION_CART_BANK0 + 3:
346 case GB_REGION_CART_BANK1:
347 case GB_REGION_CART_BANK1 + 1:
348 case GB_REGION_CART_BANK1 + 2:
349 case GB_REGION_CART_BANK1 + 3:
350 memory->mbcWrite(gb, address, value);
351 cpu->memory.setActiveRegion(cpu, cpu->pc);
352 return;
353 case GB_REGION_VRAM:
354 case GB_REGION_VRAM + 1:
355 if (gb->video.mode != 3) {
356 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
357 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
358 }
359 return;
360 case GB_REGION_EXTERNAL_RAM:
361 case GB_REGION_EXTERNAL_RAM + 1:
362 if (memory->rtcAccess) {
363 memory->rtcRegs[memory->activeRtcReg] = value;
364 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
365 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
366 } else {
367 memory->mbcWrite(gb, address, value);
368 }
369 gb->sramDirty |= GB_SRAM_DIRT_NEW;
370 return;
371 case GB_REGION_WORKING_RAM_BANK0:
372 case GB_REGION_WORKING_RAM_BANK0 + 2:
373 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
374 return;
375 case GB_REGION_WORKING_RAM_BANK1:
376 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
377 return;
378 default:
379 if (address < GB_BASE_OAM) {
380 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
381 } else if (address < GB_BASE_UNUSABLE) {
382 if (gb->video.mode < 2) {
383 gb->video.oam.raw[address & 0xFF] = value;
384 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
385 }
386 } else if (address < GB_BASE_IO) {
387 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
388 } else if (address < GB_BASE_HRAM) {
389 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
390 } else if (address < GB_BASE_IE) {
391 memory->hram[address & GB_SIZE_HRAM] = value;
392 } else {
393 GBIOWrite(gb, REG_IE, value);
394 }
395 }
396}
397
398int GBCurrentSegment(struct SM83Core* cpu, uint16_t address) {
399 struct GB* gb = (struct GB*) cpu->master;
400 struct GBMemory* memory = &gb->memory;
401 switch (address >> 12) {
402 case GB_REGION_CART_BANK0:
403 case GB_REGION_CART_BANK0 + 1:
404 case GB_REGION_CART_BANK0 + 2:
405 case GB_REGION_CART_BANK0 + 3:
406 return 0;
407 case GB_REGION_CART_BANK1:
408 case GB_REGION_CART_BANK1 + 1:
409 case GB_REGION_CART_BANK1 + 2:
410 case GB_REGION_CART_BANK1 + 3:
411 return memory->currentBank;
412 case GB_REGION_VRAM:
413 case GB_REGION_VRAM + 1:
414 return gb->video.vramCurrentBank;
415 case GB_REGION_EXTERNAL_RAM:
416 case GB_REGION_EXTERNAL_RAM + 1:
417 return memory->sramCurrentBank;
418 case GB_REGION_WORKING_RAM_BANK0:
419 case GB_REGION_WORKING_RAM_BANK0 + 2:
420 return 0;
421 case GB_REGION_WORKING_RAM_BANK1:
422 return memory->wramCurrentBank;
423 default:
424 return 0;
425 }
426}
427
428uint8_t GBView8(struct SM83Core* cpu, uint16_t address, int segment) {
429 struct GB* gb = (struct GB*) cpu->master;
430 struct GBMemory* memory = &gb->memory;
431 switch (address >> 12) {
432 case GB_REGION_CART_BANK0:
433 case GB_REGION_CART_BANK0 + 1:
434 case GB_REGION_CART_BANK0 + 2:
435 case GB_REGION_CART_BANK0 + 3:
436 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
437 case GB_REGION_CART_BANK1:
438 case GB_REGION_CART_BANK1 + 1:
439 case GB_REGION_CART_BANK1 + 2:
440 case GB_REGION_CART_BANK1 + 3:
441 if (segment < 0) {
442 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
443 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
444 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
445 } else {
446 return 0xFF;
447 }
448 case GB_REGION_VRAM:
449 case GB_REGION_VRAM + 1:
450 if (segment < 0) {
451 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
452 } else if (segment < 2) {
453 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
454 } else {
455 return 0xFF;
456 }
457 case GB_REGION_EXTERNAL_RAM:
458 case GB_REGION_EXTERNAL_RAM + 1:
459 if (memory->rtcAccess) {
460 return memory->rtcRegs[memory->activeRtcReg];
461 } else if (memory->sramAccess) {
462 if (segment < 0 && memory->sram) {
463 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
464 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
465 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
466 } else {
467 return 0xFF;
468 }
469 } else if (memory->mbcRead) {
470 return memory->mbcRead(memory, address);
471 } else if (memory->mbcType == GB_HuC3) {
472 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
473 }
474 return 0xFF;
475 case GB_REGION_WORKING_RAM_BANK0:
476 case GB_REGION_WORKING_RAM_BANK0 + 2:
477 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
478 case GB_REGION_WORKING_RAM_BANK1:
479 if (segment < 0) {
480 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
481 } else if (segment < 8) {
482 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
483 } else {
484 return 0xFF;
485 }
486 default:
487 if (address < GB_BASE_OAM) {
488 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
489 }
490 if (address < GB_BASE_UNUSABLE) {
491 if (gb->video.mode < 2) {
492 return gb->video.oam.raw[address & 0xFF];
493 }
494 return 0xFF;
495 }
496 if (address < GB_BASE_IO) {
497 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
498 if (gb->video.mode < 2) {
499 switch (gb->model) {
500 case GB_MODEL_AGB:
501 return (address & 0xF0) | ((address >> 4) & 0xF);
502 case GB_MODEL_CGB:
503 // TODO: R/W behavior
504 return 0x00;
505 default:
506 return 0x00;
507 }
508 }
509 return 0xFF;
510 }
511 if (address < GB_BASE_HRAM) {
512 return GBIORead(gb, address & (GB_SIZE_IO - 1));
513 }
514 if (address < GB_BASE_IE) {
515 return memory->hram[address & GB_SIZE_HRAM];
516 }
517 return GBIORead(gb, REG_IE);
518 }
519}
520
521void GBMemoryDMA(struct GB* gb, uint16_t base) {
522 if (base > 0xF100) {
523 return;
524 }
525 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
526 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
527 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
528 gb->cpu->nextEvent = gb->cpu->cycles + 8;
529 }
530 gb->memory.dmaSource = base;
531 gb->memory.dmaDest = 0;
532 gb->memory.dmaRemaining = 0xA0;
533}
534
535uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
536 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
537 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
538 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
539 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
540 gb->memory.hdmaSource &= 0xFFF0;
541 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
542 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
543 return value | 0x80;
544 }
545 gb->memory.hdmaDest &= 0x1FF0;
546 gb->memory.hdmaDest |= 0x8000;
547 bool wasHdma = gb->memory.isHdma;
548 gb->memory.isHdma = value & 0x80;
549 if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
550 if (gb->memory.isHdma) {
551 gb->memory.hdmaRemaining = 0x10;
552 } else {
553 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
554 }
555 gb->cpuBlocked = true;
556 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
557 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
558 return 0x80 | ((value + 1) & 0x7F);
559 }
560 return value & 0x7F;
561}
562
563void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
564 struct GB* gb = context;
565 int dmaRemaining = gb->memory.dmaRemaining;
566 gb->memory.dmaRemaining = 0;
567 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
568 // TODO: Can DMA write OAM during modes 2-3?
569 gb->video.oam.raw[gb->memory.dmaDest] = b;
570 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
571 ++gb->memory.dmaSource;
572 ++gb->memory.dmaDest;
573 gb->memory.dmaRemaining = dmaRemaining - 1;
574 if (gb->memory.dmaRemaining) {
575 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
576 }
577}
578
579void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
580 struct GB* gb = context;
581 gb->cpuBlocked = true;
582 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
583 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
584 ++gb->memory.hdmaSource;
585 ++gb->memory.hdmaDest;
586 --gb->memory.hdmaRemaining;
587 if (gb->memory.hdmaRemaining) {
588 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
589 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
590 } else {
591 gb->cpuBlocked = false;
592 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
593 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
594 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
595 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
596 if (gb->memory.isHdma) {
597 --gb->memory.io[REG_HDMA5];
598 if (gb->memory.io[REG_HDMA5] == 0xFF) {
599 gb->memory.isHdma = false;
600 }
601 } else {
602 gb->memory.io[REG_HDMA5] = 0xFF;
603 }
604 }
605}
606
607void GBPatch8(struct SM83Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
608 struct GB* gb = (struct GB*) cpu->master;
609 struct GBMemory* memory = &gb->memory;
610 int8_t oldValue = -1;
611
612 switch (address >> 12) {
613 case GB_REGION_CART_BANK0:
614 case GB_REGION_CART_BANK0 + 1:
615 case GB_REGION_CART_BANK0 + 2:
616 case GB_REGION_CART_BANK0 + 3:
617 _pristineCow(gb);
618 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
619 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
620 break;
621 case GB_REGION_CART_BANK1:
622 case GB_REGION_CART_BANK1 + 1:
623 case GB_REGION_CART_BANK1 + 2:
624 case GB_REGION_CART_BANK1 + 3:
625 _pristineCow(gb);
626 if (segment < 0) {
627 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
628 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
629 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
630 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
631 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
632 } else {
633 return;
634 }
635 break;
636 case GB_REGION_VRAM:
637 case GB_REGION_VRAM + 1:
638 if (segment < 0) {
639 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
640 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
641 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
642 } else if (segment < 2) {
643 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
644 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
645 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
646 } else {
647 return;
648 }
649 break;
650 case GB_REGION_EXTERNAL_RAM:
651 case GB_REGION_EXTERNAL_RAM + 1:
652 if (memory->rtcAccess) {
653 memory->rtcRegs[memory->activeRtcReg] = value;
654 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
655 // TODO: Remove sramAccess check?
656 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
657 } else {
658 memory->mbcWrite(gb, address, value);
659 }
660 gb->sramDirty |= GB_SRAM_DIRT_NEW;
661 return;
662 case GB_REGION_WORKING_RAM_BANK0:
663 case GB_REGION_WORKING_RAM_BANK0 + 2:
664 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
665 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
666 break;
667 case GB_REGION_WORKING_RAM_BANK1:
668 if (segment < 0) {
669 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
670 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
671 } else if (segment < 8) {
672 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
673 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
674 } else {
675 return;
676 }
677 break;
678 default:
679 if (address < GB_BASE_OAM) {
680 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
681 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
682 } else if (address < GB_BASE_UNUSABLE) {
683 oldValue = gb->video.oam.raw[address & 0xFF];
684 gb->video.oam.raw[address & 0xFF] = value;
685 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
686 } else if (address < GB_BASE_HRAM) {
687 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
688 return;
689 } else if (address < GB_BASE_IE) {
690 oldValue = memory->hram[address & GB_SIZE_HRAM];
691 memory->hram[address & GB_SIZE_HRAM] = value;
692 } else {
693 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
694 return;
695 }
696 }
697 if (old) {
698 *old = oldValue;
699 }
700}
701
702void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
703 const struct GBMemory* memory = &gb->memory;
704 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
705 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
706 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
707 state->memory.wramCurrentBank = memory->wramCurrentBank;
708 state->memory.sramCurrentBank = memory->sramCurrentBank;
709
710 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
711 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
712
713 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
714 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
715
716 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
717 state->memory.dmaRemaining = memory->dmaRemaining;
718 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
719
720 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
721 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
722
723 GBSerializedMemoryFlags flags = 0;
724 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
725 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
726 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
727 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
728 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
729 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
730 STORE_16LE(flags, 0, &state->memory.flags);
731
732 switch (memory->mbcType) {
733 case GB_MBC1:
734 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
735 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
736 state->memory.mbc1.bankLo = memory->mbcState.mbc1.bankLo;
737 state->memory.mbc1.bankHi = memory->mbcState.mbc1.bankHi;
738 break;
739 case GB_MBC3_RTC:
740 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
741 break;
742 case GB_MBC7:
743 state->memory.mbc7.state = memory->mbcState.mbc7.state;
744 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
745 state->memory.mbc7.address = memory->mbcState.mbc7.address;
746 state->memory.mbc7.access = memory->mbcState.mbc7.access;
747 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
748 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
749 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
750 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
751 break;
752 case GB_MMM01:
753 state->memory.mmm01.locked = memory->mbcState.mmm01.locked;
754 state->memory.mmm01.bank0 = memory->mbcState.mmm01.currentBank0;
755 break;
756 default:
757 break;
758 }
759}
760
761void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
762 struct GBMemory* memory = &gb->memory;
763 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
764 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
765 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
766 memory->wramCurrentBank = state->memory.wramCurrentBank;
767 memory->sramCurrentBank = state->memory.sramCurrentBank;
768
769 GBMBCSwitchBank(gb, memory->currentBank);
770 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
771 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
772
773 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
774 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
775
776 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
777 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
778
779 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
780 memory->dmaRemaining = state->memory.dmaRemaining;
781 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
782
783 uint32_t when;
784 LOAD_32LE(when, 0, &state->memory.dmaNext);
785 if (memory->dmaRemaining) {
786 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
787 }
788 LOAD_32LE(when, 0, &state->memory.hdmaNext);
789 if (memory->hdmaRemaining) {
790 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
791 }
792
793 GBSerializedMemoryFlags flags;
794 LOAD_16LE(flags, 0, &state->memory.flags);
795 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
796 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
797 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
798 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
799 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
800 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
801
802 switch (memory->mbcType) {
803 case GB_MBC1:
804 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
805 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
806 memory->mbcState.mbc1.bankLo = state->memory.mbc1.bankLo;
807 memory->mbcState.mbc1.bankHi = state->memory.mbc1.bankHi;
808 if (!(memory->mbcState.mbc1.bankLo || memory->mbcState.mbc1.bankHi)) {
809 // Backwards compat
810 memory->mbcState.mbc1.bankLo = memory->currentBank & ((1 << memory->mbcState.mbc1.multicartStride) - 1);
811 memory->mbcState.mbc1.bankHi = memory->currentBank >> memory->mbcState.mbc1.multicartStride;
812 }
813 if (memory->mbcState.mbc1.mode) {
814 GBMBCSwitchBank0(gb, memory->mbcState.mbc1.bankHi);
815 }
816 break;
817 case GB_MBC3_RTC:
818 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
819 break;
820 case GB_MBC7:
821 memory->mbcState.mbc7.state = state->memory.mbc7.state;
822 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
823 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
824 memory->mbcState.mbc7.access = state->memory.mbc7.access;
825 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
826 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
827 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
828 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
829 break;
830 case GB_MMM01:
831 memory->mbcState.mmm01.locked = state->memory.mmm01.locked;
832 memory->mbcState.mmm01.currentBank0 = state->memory.mmm01.bank0;
833 if (memory->mbcState.mmm01.locked) {
834 GBMBCSwitchBank0(gb, memory->mbcState.mmm01.currentBank0);
835 } else {
836 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
837 }
838 break;
839 default:
840 break;
841 }
842}
843
844void _pristineCow(struct GB* gb) {
845 if (!gb->isPristine) {
846 return;
847 }
848 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
849 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
850 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
851 if (gb->memory.rom == gb->memory.romBase) {
852 gb->memory.romBase = newRom;
853 }
854 gb->memory.rom = newRom;
855 GBMBCSwitchBank(gb, gb->memory.currentBank);
856 gb->isPristine = false;
857}