src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19enum GBBus {
20 GB_BUS_CPU,
21 GB_BUS_MAIN,
22 GB_BUS_VRAM,
23 GB_BUS_RAM
24};
25
26static const enum GBBus _oamBlockDMG[] = {
27 GB_BUS_MAIN, // 0x0000
28 GB_BUS_MAIN, // 0x2000
29 GB_BUS_MAIN, // 0x4000
30 GB_BUS_MAIN, // 0x6000
31 GB_BUS_VRAM, // 0x8000
32 GB_BUS_MAIN, // 0xA000
33 GB_BUS_MAIN, // 0xC000
34 GB_BUS_CPU, // 0xE000
35};
36
37static const enum GBBus _oamBlockCGB[] = {
38 GB_BUS_MAIN, // 0x0000
39 GB_BUS_MAIN, // 0x2000
40 GB_BUS_MAIN, // 0x4000
41 GB_BUS_MAIN, // 0x6000
42 GB_BUS_VRAM, // 0x8000
43 GB_BUS_MAIN, // 0xA000
44 GB_BUS_RAM, // 0xC000
45 GB_BUS_CPU // 0xE000
46};
47
48static void _pristineCow(struct GB* gba);
49
50static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
51 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
52 cpu->memory.setActiveRegion(cpu, address);
53 return cpu->memory.cpuLoad8(cpu, address);
54 }
55 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
56}
57
58static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
59 struct GB* gb = (struct GB*) cpu->master;
60 struct GBMemory* memory = &gb->memory;
61 switch (address >> 12) {
62 case GB_REGION_CART_BANK0:
63 case GB_REGION_CART_BANK0 + 1:
64 case GB_REGION_CART_BANK0 + 2:
65 case GB_REGION_CART_BANK0 + 3:
66 cpu->memory.cpuLoad8 = GBFastLoad8;
67 cpu->memory.activeRegion = memory->romBase;
68 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
69 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
70 break;
71 case GB_REGION_CART_BANK1:
72 case GB_REGION_CART_BANK1 + 1:
73 case GB_REGION_CART_BANK1 + 2:
74 case GB_REGION_CART_BANK1 + 3:
75 cpu->memory.cpuLoad8 = GBFastLoad8;
76 if (gb->memory.mbcType != GB_MBC6) {
77 cpu->memory.activeRegion = memory->romBank;
78 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
79 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
80 } else {
81 cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
82 if (address & 0x2000) {
83 cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
84 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
85 } else {
86 cpu->memory.activeRegion = memory->romBank;
87 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
88 }
89 }
90 break;
91 default:
92 cpu->memory.cpuLoad8 = GBLoad8;
93 break;
94 }
95}
96
97static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
98static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
99
100void GBMemoryInit(struct GB* gb) {
101 struct LR35902Core* cpu = gb->cpu;
102 cpu->memory.cpuLoad8 = GBLoad8;
103 cpu->memory.load8 = GBLoad8;
104 cpu->memory.store8 = GBStore8;
105 cpu->memory.currentSegment = GBCurrentSegment;
106 cpu->memory.setActiveRegion = GBSetActiveRegion;
107
108 gb->memory.wram = 0;
109 gb->memory.wramBank = 0;
110 gb->memory.rom = 0;
111 gb->memory.romBank = 0;
112 gb->memory.romSize = 0;
113 gb->memory.sram = 0;
114 gb->memory.mbcType = GB_MBC_AUTODETECT;
115 gb->memory.mbcRead = NULL;
116 gb->memory.mbcWrite = NULL;
117
118 gb->memory.rtc = NULL;
119 gb->memory.rotation = NULL;
120 gb->memory.rumble = NULL;
121 gb->memory.cam = NULL;
122
123 GBIOInit(gb);
124}
125
126void GBMemoryDeinit(struct GB* gb) {
127 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
128 if (gb->memory.rom) {
129 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
130 }
131}
132
133void GBMemoryReset(struct GB* gb) {
134 if (gb->memory.wram) {
135 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
136 }
137 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
138 if (gb->model >= GB_MODEL_CGB) {
139 uint32_t* base = (uint32_t*) gb->memory.wram;
140 size_t i;
141 uint32_t pattern = 0;
142 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
143 if ((i & 0x1FF) == 0) {
144 pattern = ~pattern;
145 }
146 base[i + 0] = pattern;
147 base[i + 1] = pattern;
148 base[i + 2] = ~pattern;
149 base[i + 3] = ~pattern;
150 }
151 }
152 GBMemorySwitchWramBank(&gb->memory, 1);
153 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
154 gb->memory.currentBank = 1;
155 gb->memory.sramCurrentBank = 0;
156
157 gb->memory.ime = false;
158 gb->memory.ie = 0;
159
160 gb->memory.dmaRemaining = 0;
161 gb->memory.dmaSource = 0;
162 gb->memory.dmaDest = 0;
163 gb->memory.hdmaRemaining = 0;
164 gb->memory.hdmaSource = 0;
165 gb->memory.hdmaDest = 0;
166 gb->memory.isHdma = false;
167
168
169 gb->memory.dmaEvent.context = gb;
170 gb->memory.dmaEvent.name = "GB DMA";
171 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
172 gb->memory.dmaEvent.priority = 0x40;
173 gb->memory.hdmaEvent.context = gb;
174 gb->memory.hdmaEvent.name = "GB HDMA";
175 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
176 gb->memory.hdmaEvent.priority = 0x41;
177
178 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
179
180 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
181 GBMBCInit(gb);
182 switch (gb->memory.mbcType) {
183 case GB_MBC1:
184 gb->memory.mbcState.mbc1.mode = 0;
185 break;
186 case GB_MBC6:
187 GBMBCSwitchHalfBank(gb, 0, 2);
188 GBMBCSwitchHalfBank(gb, 1, 3);
189 gb->memory.mbcState.mbc6.sramAccess = false;
190 GBMBCSwitchSramHalfBank(gb, 0, 0);
191 GBMBCSwitchSramHalfBank(gb, 0, 1);
192 break;
193 case GB_MMM01:
194 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
195 GBMBCSwitchBank(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 1);
196 break;
197 default:
198 break;
199 }
200 gb->memory.sramBank = gb->memory.sram;
201
202 if (!gb->memory.wram) {
203 GBMemoryDeinit(gb);
204 }
205}
206
207void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
208 bank &= 7;
209 if (!bank) {
210 bank = 1;
211 }
212 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
213 memory->wramCurrentBank = bank;
214}
215
216uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
217 struct GB* gb = (struct GB*) cpu->master;
218 struct GBMemory* memory = &gb->memory;
219 if (gb->memory.dmaRemaining) {
220 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
221 enum GBBus dmaBus = block[memory->dmaSource >> 13];
222 enum GBBus accessBus = block[address >> 13];
223 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
224 return 0xFF;
225 }
226 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
227 return 0xFF;
228 }
229 }
230 switch (address >> 12) {
231 case GB_REGION_CART_BANK0:
232 case GB_REGION_CART_BANK0 + 1:
233 case GB_REGION_CART_BANK0 + 2:
234 case GB_REGION_CART_BANK0 + 3:
235 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
236 case GB_REGION_CART_BANK1 + 2:
237 case GB_REGION_CART_BANK1 + 3:
238 if (memory->mbcType == GB_MBC6) {
239 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
240 }
241 // Fall through
242 case GB_REGION_CART_BANK1:
243 case GB_REGION_CART_BANK1 + 1:
244 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
245 case GB_REGION_VRAM:
246 case GB_REGION_VRAM + 1:
247 if (gb->video.mode != 3) {
248 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
249 }
250 return 0xFF;
251 case GB_REGION_EXTERNAL_RAM:
252 case GB_REGION_EXTERNAL_RAM + 1:
253 if (memory->rtcAccess) {
254 return memory->rtcRegs[memory->activeRtcReg];
255 } else if (memory->mbcRead) {
256 return memory->mbcRead(memory, address);
257 } else if (memory->sramAccess && memory->sram) {
258 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
259 } else if (memory->mbcType == GB_HuC3) {
260 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
261 }
262 return 0xFF;
263 case GB_REGION_WORKING_RAM_BANK0:
264 case GB_REGION_WORKING_RAM_BANK0 + 2:
265 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
266 case GB_REGION_WORKING_RAM_BANK1:
267 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
268 default:
269 if (address < GB_BASE_OAM) {
270 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
271 }
272 if (address < GB_BASE_UNUSABLE) {
273 if (gb->video.mode < 2) {
274 return gb->video.oam.raw[address & 0xFF];
275 }
276 return 0xFF;
277 }
278 if (address < GB_BASE_IO) {
279 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
280 return 0xFF;
281 }
282 if (address < GB_BASE_HRAM) {
283 return GBIORead(gb, address & (GB_SIZE_IO - 1));
284 }
285 if (address < GB_BASE_IE) {
286 return memory->hram[address & GB_SIZE_HRAM];
287 }
288 return GBIORead(gb, REG_IE);
289 }
290}
291
292void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
293 struct GB* gb = (struct GB*) cpu->master;
294 struct GBMemory* memory = &gb->memory;
295 if (gb->memory.dmaRemaining) {
296 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
297 enum GBBus dmaBus = block[memory->dmaSource >> 13];
298 enum GBBus accessBus = block[address >> 13];
299 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
300 return;
301 }
302 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
303 return;
304 }
305 }
306 switch (address >> 12) {
307 case GB_REGION_CART_BANK0:
308 case GB_REGION_CART_BANK0 + 1:
309 case GB_REGION_CART_BANK0 + 2:
310 case GB_REGION_CART_BANK0 + 3:
311 case GB_REGION_CART_BANK1:
312 case GB_REGION_CART_BANK1 + 1:
313 case GB_REGION_CART_BANK1 + 2:
314 case GB_REGION_CART_BANK1 + 3:
315 memory->mbcWrite(gb, address, value);
316 cpu->memory.setActiveRegion(cpu, cpu->pc);
317 return;
318 case GB_REGION_VRAM:
319 case GB_REGION_VRAM + 1:
320 if (gb->video.mode != 3) {
321 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
322 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
323 }
324 return;
325 case GB_REGION_EXTERNAL_RAM:
326 case GB_REGION_EXTERNAL_RAM + 1:
327 if (memory->rtcAccess) {
328 memory->rtcRegs[memory->activeRtcReg] = value;
329 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
330 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
331 } else {
332 memory->mbcWrite(gb, address, value);
333 }
334 gb->sramDirty |= GB_SRAM_DIRT_NEW;
335 return;
336 case GB_REGION_WORKING_RAM_BANK0:
337 case GB_REGION_WORKING_RAM_BANK0 + 2:
338 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
339 return;
340 case GB_REGION_WORKING_RAM_BANK1:
341 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
342 return;
343 default:
344 if (address < GB_BASE_OAM) {
345 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
346 } else if (address < GB_BASE_UNUSABLE) {
347 if (gb->video.mode < 2) {
348 gb->video.oam.raw[address & 0xFF] = value;
349 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
350 }
351 } else if (address < GB_BASE_IO) {
352 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
353 } else if (address < GB_BASE_HRAM) {
354 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
355 } else if (address < GB_BASE_IE) {
356 memory->hram[address & GB_SIZE_HRAM] = value;
357 } else {
358 GBIOWrite(gb, REG_IE, value);
359 }
360 }
361}
362
363int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
364 struct GB* gb = (struct GB*) cpu->master;
365 struct GBMemory* memory = &gb->memory;
366 switch (address >> 12) {
367 case GB_REGION_CART_BANK0:
368 case GB_REGION_CART_BANK0 + 1:
369 case GB_REGION_CART_BANK0 + 2:
370 case GB_REGION_CART_BANK0 + 3:
371 return 0;
372 case GB_REGION_CART_BANK1:
373 case GB_REGION_CART_BANK1 + 1:
374 case GB_REGION_CART_BANK1 + 2:
375 case GB_REGION_CART_BANK1 + 3:
376 return memory->currentBank;
377 case GB_REGION_VRAM:
378 case GB_REGION_VRAM + 1:
379 return gb->video.vramCurrentBank;
380 case GB_REGION_EXTERNAL_RAM:
381 case GB_REGION_EXTERNAL_RAM + 1:
382 return memory->sramCurrentBank;
383 case GB_REGION_WORKING_RAM_BANK0:
384 case GB_REGION_WORKING_RAM_BANK0 + 2:
385 return 0;
386 case GB_REGION_WORKING_RAM_BANK1:
387 return memory->wramCurrentBank;
388 default:
389 return 0;
390 }
391}
392
393uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
394 struct GB* gb = (struct GB*) cpu->master;
395 struct GBMemory* memory = &gb->memory;
396 switch (address >> 12) {
397 case GB_REGION_CART_BANK0:
398 case GB_REGION_CART_BANK0 + 1:
399 case GB_REGION_CART_BANK0 + 2:
400 case GB_REGION_CART_BANK0 + 3:
401 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
402 case GB_REGION_CART_BANK1:
403 case GB_REGION_CART_BANK1 + 1:
404 case GB_REGION_CART_BANK1 + 2:
405 case GB_REGION_CART_BANK1 + 3:
406 if (segment < 0) {
407 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
408 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
409 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
410 } else {
411 return 0xFF;
412 }
413 case GB_REGION_VRAM:
414 case GB_REGION_VRAM + 1:
415 if (segment < 0) {
416 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
417 } else if (segment < 2) {
418 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
419 } else {
420 return 0xFF;
421 }
422 case GB_REGION_EXTERNAL_RAM:
423 case GB_REGION_EXTERNAL_RAM + 1:
424 if (memory->rtcAccess) {
425 return memory->rtcRegs[memory->activeRtcReg];
426 } else if (memory->sramAccess) {
427 if (segment < 0 && memory->sram) {
428 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
429 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
430 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
431 } else {
432 return 0xFF;
433 }
434 } else if (memory->mbcRead) {
435 return memory->mbcRead(memory, address);
436 } else if (memory->mbcType == GB_HuC3) {
437 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
438 }
439 return 0xFF;
440 case GB_REGION_WORKING_RAM_BANK0:
441 case GB_REGION_WORKING_RAM_BANK0 + 2:
442 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
443 case GB_REGION_WORKING_RAM_BANK1:
444 if (segment < 0) {
445 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
446 } else if (segment < 8) {
447 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
448 } else {
449 return 0xFF;
450 }
451 default:
452 if (address < GB_BASE_OAM) {
453 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
454 }
455 if (address < GB_BASE_UNUSABLE) {
456 if (gb->video.mode < 2) {
457 return gb->video.oam.raw[address & 0xFF];
458 }
459 return 0xFF;
460 }
461 if (address < GB_BASE_IO) {
462 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
463 return 0xFF;
464 }
465 if (address < GB_BASE_HRAM) {
466 return GBIORead(gb, address & (GB_SIZE_IO - 1));
467 }
468 if (address < GB_BASE_IE) {
469 return memory->hram[address & GB_SIZE_HRAM];
470 }
471 return GBIORead(gb, REG_IE);
472 }
473}
474
475void GBMemoryDMA(struct GB* gb, uint16_t base) {
476 if (base > 0xF100) {
477 return;
478 }
479 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
480 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
481 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
482 gb->cpu->nextEvent = gb->cpu->cycles + 8;
483 }
484 gb->memory.dmaSource = base;
485 gb->memory.dmaDest = 0;
486 gb->memory.dmaRemaining = 0xA0;
487}
488
489uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
490 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
491 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
492 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
493 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
494 gb->memory.hdmaSource &= 0xFFF0;
495 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
496 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
497 return value | 0x80;
498 }
499 gb->memory.hdmaDest &= 0x1FF0;
500 gb->memory.hdmaDest |= 0x8000;
501 bool wasHdma = gb->memory.isHdma;
502 gb->memory.isHdma = value & 0x80;
503 if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
504 if (gb->memory.isHdma) {
505 gb->memory.hdmaRemaining = 0x10;
506 } else {
507 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
508 }
509 gb->cpuBlocked = true;
510 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
511 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
512 return 0x80 | ((value + 1) & 0x7F);
513 }
514 return value & 0x7F;
515}
516
517void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
518 struct GB* gb = context;
519 int dmaRemaining = gb->memory.dmaRemaining;
520 gb->memory.dmaRemaining = 0;
521 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
522 // TODO: Can DMA write OAM during modes 2-3?
523 gb->video.oam.raw[gb->memory.dmaDest] = b;
524 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
525 ++gb->memory.dmaSource;
526 ++gb->memory.dmaDest;
527 gb->memory.dmaRemaining = dmaRemaining - 1;
528 if (gb->memory.dmaRemaining) {
529 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
530 }
531}
532
533void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
534 struct GB* gb = context;
535 gb->cpuBlocked = true;
536 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
537 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
538 ++gb->memory.hdmaSource;
539 ++gb->memory.hdmaDest;
540 --gb->memory.hdmaRemaining;
541 if (gb->memory.hdmaRemaining) {
542 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
543 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
544 } else {
545 gb->cpuBlocked = false;
546 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
547 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
548 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
549 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
550 if (gb->memory.isHdma) {
551 --gb->memory.io[REG_HDMA5];
552 if (gb->memory.io[REG_HDMA5] == 0xFF) {
553 gb->memory.isHdma = false;
554 }
555 } else {
556 gb->memory.io[REG_HDMA5] = 0xFF;
557 }
558 }
559}
560
561void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
562 struct GB* gb = (struct GB*) cpu->master;
563 struct GBMemory* memory = &gb->memory;
564 int8_t oldValue = -1;
565
566 switch (address >> 12) {
567 case GB_REGION_CART_BANK0:
568 case GB_REGION_CART_BANK0 + 1:
569 case GB_REGION_CART_BANK0 + 2:
570 case GB_REGION_CART_BANK0 + 3:
571 _pristineCow(gb);
572 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
573 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
574 break;
575 case GB_REGION_CART_BANK1:
576 case GB_REGION_CART_BANK1 + 1:
577 case GB_REGION_CART_BANK1 + 2:
578 case GB_REGION_CART_BANK1 + 3:
579 _pristineCow(gb);
580 if (segment < 0) {
581 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
582 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
583 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
584 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
585 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
586 } else {
587 return;
588 }
589 break;
590 case GB_REGION_VRAM:
591 case GB_REGION_VRAM + 1:
592 if (segment < 0) {
593 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
594 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
595 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
596 } else if (segment < 2) {
597 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
598 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
599 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
600 } else {
601 return;
602 }
603 break;
604 case GB_REGION_EXTERNAL_RAM:
605 case GB_REGION_EXTERNAL_RAM + 1:
606 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
607 return;
608 case GB_REGION_WORKING_RAM_BANK0:
609 case GB_REGION_WORKING_RAM_BANK0 + 2:
610 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
611 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
612 break;
613 case GB_REGION_WORKING_RAM_BANK1:
614 if (segment < 0) {
615 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
616 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
617 } else if (segment < 8) {
618 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
619 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
620 } else {
621 return;
622 }
623 break;
624 default:
625 if (address < GB_BASE_OAM) {
626 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
627 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
628 } else if (address < GB_BASE_UNUSABLE) {
629 oldValue = gb->video.oam.raw[address & 0xFF];
630 gb->video.oam.raw[address & 0xFF] = value;
631 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
632 } else if (address < GB_BASE_HRAM) {
633 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
634 return;
635 } else if (address < GB_BASE_IE) {
636 oldValue = memory->hram[address & GB_SIZE_HRAM];
637 memory->hram[address & GB_SIZE_HRAM] = value;
638 } else {
639 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
640 return;
641 }
642 }
643 if (old) {
644 *old = oldValue;
645 }
646}
647
648void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
649 const struct GBMemory* memory = &gb->memory;
650 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
651 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
652 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
653 state->memory.wramCurrentBank = memory->wramCurrentBank;
654 state->memory.sramCurrentBank = memory->sramCurrentBank;
655
656 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
657 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
658
659 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
660 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
661
662 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
663 state->memory.dmaRemaining = memory->dmaRemaining;
664 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
665
666 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
667 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
668
669 GBSerializedMemoryFlags flags = 0;
670 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
671 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
672 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
673 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
674 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
675 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
676 STORE_16LE(flags, 0, &state->memory.flags);
677
678 switch (memory->mbcType) {
679 case GB_MBC1:
680 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
681 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
682 break;
683 case GB_MBC3_RTC:
684 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
685 break;
686 case GB_MBC7:
687 state->memory.mbc7.state = memory->mbcState.mbc7.state;
688 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
689 state->memory.mbc7.address = memory->mbcState.mbc7.address;
690 state->memory.mbc7.access = memory->mbcState.mbc7.access;
691 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
692 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
693 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
694 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
695 break;
696 case GB_MMM01:
697 state->memory.mmm01.locked = memory->mbcState.mmm01.locked;
698 state->memory.mmm01.bank0 = memory->mbcState.mmm01.currentBank0;
699 break;
700 default:
701 break;
702 }
703}
704
705void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
706 struct GBMemory* memory = &gb->memory;
707 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
708 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
709 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
710 memory->wramCurrentBank = state->memory.wramCurrentBank;
711 memory->sramCurrentBank = state->memory.sramCurrentBank;
712
713 GBMBCSwitchBank(gb, memory->currentBank);
714 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
715 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
716
717 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
718 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
719
720 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
721 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
722
723 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
724 memory->dmaRemaining = state->memory.dmaRemaining;
725 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
726
727 uint32_t when;
728 LOAD_32LE(when, 0, &state->memory.dmaNext);
729 if (memory->dmaRemaining) {
730 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
731 }
732 LOAD_32LE(when, 0, &state->memory.hdmaNext);
733 if (memory->hdmaRemaining) {
734 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
735 }
736
737 GBSerializedMemoryFlags flags;
738 LOAD_16LE(flags, 0, &state->memory.flags);
739 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
740 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
741 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
742 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
743 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
744 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
745
746 switch (memory->mbcType) {
747 case GB_MBC1:
748 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
749 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
750 if (memory->mbcState.mbc1.mode) {
751 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
752 }
753 break;
754 case GB_MBC3_RTC:
755 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
756 break;
757 case GB_MBC7:
758 memory->mbcState.mbc7.state = state->memory.mbc7.state;
759 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
760 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
761 memory->mbcState.mbc7.access = state->memory.mbc7.access;
762 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
763 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
764 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
765 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
766 break;
767 case GB_MMM01:
768 memory->mbcState.mmm01.locked = state->memory.mmm01.locked;
769 memory->mbcState.mmm01.currentBank0 = state->memory.mmm01.bank0;
770 if (memory->mbcState.mmm01.locked) {
771 GBMBCSwitchBank0(gb, memory->mbcState.mmm01.currentBank0);
772 } else {
773 GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
774 }
775 break;
776 default:
777 break;
778 }
779}
780
781void _pristineCow(struct GB* gb) {
782 if (!gb->isPristine) {
783 return;
784 }
785 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
786 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
787 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
788 if (gb->memory.rom == gb->memory.romBase) {
789 gb->memory.romBase = newRom;
790 }
791 gb->memory.rom = newRom;
792 GBMBCSwitchBank(gb, gb->memory.currentBank);
793 gb->isPristine = false;
794}