src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19enum GBBus {
20 GB_BUS_CPU,
21 GB_BUS_MAIN,
22 GB_BUS_VRAM,
23 GB_BUS_RAM
24};
25
26static const enum GBBus _oamBlockDMG[] = {
27 GB_BUS_MAIN, // 0x0000
28 GB_BUS_MAIN, // 0x2000
29 GB_BUS_MAIN, // 0x4000
30 GB_BUS_MAIN, // 0x6000
31 GB_BUS_VRAM, // 0x8000
32 GB_BUS_MAIN, // 0xA000
33 GB_BUS_MAIN, // 0xC000
34 GB_BUS_CPU, // 0xE000
35};
36
37static const enum GBBus _oamBlockCGB[] = {
38 GB_BUS_MAIN, // 0x0000
39 GB_BUS_MAIN, // 0x2000
40 GB_BUS_MAIN, // 0x4000
41 GB_BUS_MAIN, // 0x6000
42 GB_BUS_VRAM, // 0x8000
43 GB_BUS_MAIN, // 0xA000
44 GB_BUS_RAM, // 0xC000
45 GB_BUS_CPU // 0xE000
46};
47
48static void _pristineCow(struct GB* gba);
49
50static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
51 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
52 cpu->memory.setActiveRegion(cpu, address);
53 return cpu->memory.cpuLoad8(cpu, address);
54 }
55 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
56}
57
58static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
59 struct GB* gb = (struct GB*) cpu->master;
60 struct GBMemory* memory = &gb->memory;
61 switch (address >> 12) {
62 case GB_REGION_CART_BANK0:
63 case GB_REGION_CART_BANK0 + 1:
64 case GB_REGION_CART_BANK0 + 2:
65 case GB_REGION_CART_BANK0 + 3:
66 cpu->memory.cpuLoad8 = GBFastLoad8;
67 cpu->memory.activeRegion = memory->romBase;
68 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
69 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
70 break;
71 case GB_REGION_CART_BANK1:
72 case GB_REGION_CART_BANK1 + 1:
73 case GB_REGION_CART_BANK1 + 2:
74 case GB_REGION_CART_BANK1 + 3:
75 cpu->memory.cpuLoad8 = GBFastLoad8;
76 cpu->memory.activeRegion = memory->romBank;
77 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
78 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
79 break;
80 default:
81 cpu->memory.cpuLoad8 = GBLoad8;
82 break;
83 }
84}
85
86static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
87static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
88
89void GBMemoryInit(struct GB* gb) {
90 struct LR35902Core* cpu = gb->cpu;
91 cpu->memory.cpuLoad8 = GBLoad8;
92 cpu->memory.load8 = GBLoad8;
93 cpu->memory.store8 = GBStore8;
94 cpu->memory.currentSegment = GBCurrentSegment;
95 cpu->memory.setActiveRegion = GBSetActiveRegion;
96
97 gb->memory.wram = 0;
98 gb->memory.wramBank = 0;
99 gb->memory.rom = 0;
100 gb->memory.romBank = 0;
101 gb->memory.romSize = 0;
102 gb->memory.sram = 0;
103 gb->memory.mbcType = GB_MBC_AUTODETECT;
104 gb->memory.mbcRead = NULL;
105 gb->memory.mbcWrite = NULL;
106
107 gb->memory.rtc = NULL;
108 gb->memory.rotation = NULL;
109 gb->memory.rumble = NULL;
110 gb->memory.cam = NULL;
111
112 GBIOInit(gb);
113}
114
115void GBMemoryDeinit(struct GB* gb) {
116 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
117 if (gb->memory.rom) {
118 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
119 }
120}
121
122void GBMemoryReset(struct GB* gb) {
123 if (gb->memory.wram) {
124 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
125 }
126 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
127 if (gb->model >= GB_MODEL_CGB) {
128 uint32_t* base = (uint32_t*) gb->memory.wram;
129 size_t i;
130 uint32_t pattern = 0;
131 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
132 if ((i & 0x1FF) == 0) {
133 pattern = ~pattern;
134 }
135 base[i + 0] = pattern;
136 base[i + 1] = pattern;
137 base[i + 2] = ~pattern;
138 base[i + 3] = ~pattern;
139 }
140 }
141 GBMemorySwitchWramBank(&gb->memory, 1);
142 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
143 gb->memory.currentBank = 1;
144 gb->memory.sramCurrentBank = 0;
145
146 gb->memory.ime = false;
147 gb->memory.ie = 0;
148
149 gb->memory.dmaRemaining = 0;
150 gb->memory.dmaSource = 0;
151 gb->memory.dmaDest = 0;
152 gb->memory.hdmaRemaining = 0;
153 gb->memory.hdmaSource = 0;
154 gb->memory.hdmaDest = 0;
155 gb->memory.isHdma = false;
156
157
158 gb->memory.dmaEvent.context = gb;
159 gb->memory.dmaEvent.name = "GB DMA";
160 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
161 gb->memory.dmaEvent.priority = 0x40;
162 gb->memory.hdmaEvent.context = gb;
163 gb->memory.hdmaEvent.name = "GB HDMA";
164 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
165 gb->memory.hdmaEvent.priority = 0x41;
166
167 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
168 switch (gb->memory.mbcType) {
169 case GB_MBC1:
170 gb->memory.mbcState.mbc1.mode = 0;
171 break;
172 default:
173 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
174 }
175
176 GBMBCInit(gb);
177 gb->memory.sramBank = gb->memory.sram;
178
179 if (!gb->memory.wram) {
180 GBMemoryDeinit(gb);
181 }
182}
183
184void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
185 bank &= 7;
186 if (!bank) {
187 bank = 1;
188 }
189 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
190 memory->wramCurrentBank = bank;
191}
192
193uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
194 struct GB* gb = (struct GB*) cpu->master;
195 struct GBMemory* memory = &gb->memory;
196 if (gb->memory.dmaRemaining) {
197 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
198 enum GBBus dmaBus = block[memory->dmaSource >> 13];
199 enum GBBus accessBus = block[address >> 13];
200 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
201 return 0xFF;
202 }
203 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
204 return 0xFF;
205 }
206 }
207 switch (address >> 12) {
208 case GB_REGION_CART_BANK0:
209 case GB_REGION_CART_BANK0 + 1:
210 case GB_REGION_CART_BANK0 + 2:
211 case GB_REGION_CART_BANK0 + 3:
212 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
213 case GB_REGION_CART_BANK1 + 2:
214 case GB_REGION_CART_BANK1 + 3:
215 if (memory->mbcType == GB_MBC6) {
216 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
217 }
218 // Fall through
219 case GB_REGION_CART_BANK1:
220 case GB_REGION_CART_BANK1 + 1:
221 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
222 case GB_REGION_VRAM:
223 case GB_REGION_VRAM + 1:
224 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
225 case GB_REGION_EXTERNAL_RAM:
226 case GB_REGION_EXTERNAL_RAM + 1:
227 if (memory->rtcAccess) {
228 return memory->rtcRegs[memory->activeRtcReg];
229 } else if (memory->mbcRead) {
230 return memory->mbcRead(memory, address);
231 } else if (memory->sramAccess && memory->sram) {
232 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
233 } else if (memory->mbcType == GB_HuC3) {
234 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
235 }
236 return 0xFF;
237 case GB_REGION_WORKING_RAM_BANK0:
238 case GB_REGION_WORKING_RAM_BANK0 + 2:
239 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
240 case GB_REGION_WORKING_RAM_BANK1:
241 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
242 default:
243 if (address < GB_BASE_OAM) {
244 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
245 }
246 if (address < GB_BASE_UNUSABLE) {
247 if (gb->video.mode < 2) {
248 return gb->video.oam.raw[address & 0xFF];
249 }
250 return 0xFF;
251 }
252 if (address < GB_BASE_IO) {
253 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
254 return 0xFF;
255 }
256 if (address < GB_BASE_HRAM) {
257 return GBIORead(gb, address & (GB_SIZE_IO - 1));
258 }
259 if (address < GB_BASE_IE) {
260 return memory->hram[address & GB_SIZE_HRAM];
261 }
262 return GBIORead(gb, REG_IE);
263 }
264}
265
266void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
267 struct GB* gb = (struct GB*) cpu->master;
268 struct GBMemory* memory = &gb->memory;
269 if (gb->memory.dmaRemaining) {
270 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
271 enum GBBus dmaBus = block[memory->dmaSource >> 13];
272 enum GBBus accessBus = block[address >> 13];
273 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
274 return;
275 }
276 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
277 return;
278 }
279 }
280 switch (address >> 12) {
281 case GB_REGION_CART_BANK0:
282 case GB_REGION_CART_BANK0 + 1:
283 case GB_REGION_CART_BANK0 + 2:
284 case GB_REGION_CART_BANK0 + 3:
285 case GB_REGION_CART_BANK1:
286 case GB_REGION_CART_BANK1 + 1:
287 case GB_REGION_CART_BANK1 + 2:
288 case GB_REGION_CART_BANK1 + 3:
289 memory->mbcWrite(gb, address, value);
290 cpu->memory.setActiveRegion(cpu, cpu->pc);
291 return;
292 case GB_REGION_VRAM:
293 case GB_REGION_VRAM + 1:
294 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
295 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
296 return;
297 case GB_REGION_EXTERNAL_RAM:
298 case GB_REGION_EXTERNAL_RAM + 1:
299 if (memory->rtcAccess) {
300 memory->rtcRegs[memory->activeRtcReg] = value;
301 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
302 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
303 } else {
304 memory->mbcWrite(gb, address, value);
305 }
306 gb->sramDirty |= GB_SRAM_DIRT_NEW;
307 return;
308 case GB_REGION_WORKING_RAM_BANK0:
309 case GB_REGION_WORKING_RAM_BANK0 + 2:
310 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
311 return;
312 case GB_REGION_WORKING_RAM_BANK1:
313 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
314 return;
315 default:
316 if (address < GB_BASE_OAM) {
317 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
318 } else if (address < GB_BASE_UNUSABLE) {
319 if (gb->video.mode < 2) {
320 gb->video.oam.raw[address & 0xFF] = value;
321 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
322 }
323 } else if (address < GB_BASE_IO) {
324 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
325 } else if (address < GB_BASE_HRAM) {
326 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
327 } else if (address < GB_BASE_IE) {
328 memory->hram[address & GB_SIZE_HRAM] = value;
329 } else {
330 GBIOWrite(gb, REG_IE, value);
331 }
332 }
333}
334
335int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
336 struct GB* gb = (struct GB*) cpu->master;
337 struct GBMemory* memory = &gb->memory;
338 switch (address >> 12) {
339 case GB_REGION_CART_BANK0:
340 case GB_REGION_CART_BANK0 + 1:
341 case GB_REGION_CART_BANK0 + 2:
342 case GB_REGION_CART_BANK0 + 3:
343 return 0;
344 case GB_REGION_CART_BANK1:
345 case GB_REGION_CART_BANK1 + 1:
346 case GB_REGION_CART_BANK1 + 2:
347 case GB_REGION_CART_BANK1 + 3:
348 return memory->currentBank;
349 case GB_REGION_VRAM:
350 case GB_REGION_VRAM + 1:
351 return gb->video.vramCurrentBank;
352 case GB_REGION_EXTERNAL_RAM:
353 case GB_REGION_EXTERNAL_RAM + 1:
354 return memory->sramCurrentBank;
355 case GB_REGION_WORKING_RAM_BANK0:
356 case GB_REGION_WORKING_RAM_BANK0 + 2:
357 return 0;
358 case GB_REGION_WORKING_RAM_BANK1:
359 return memory->wramCurrentBank;
360 default:
361 return 0;
362 }
363}
364
365uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
366 struct GB* gb = (struct GB*) cpu->master;
367 struct GBMemory* memory = &gb->memory;
368 switch (address >> 12) {
369 case GB_REGION_CART_BANK0:
370 case GB_REGION_CART_BANK0 + 1:
371 case GB_REGION_CART_BANK0 + 2:
372 case GB_REGION_CART_BANK0 + 3:
373 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
374 case GB_REGION_CART_BANK1:
375 case GB_REGION_CART_BANK1 + 1:
376 case GB_REGION_CART_BANK1 + 2:
377 case GB_REGION_CART_BANK1 + 3:
378 if (segment < 0) {
379 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
380 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
381 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
382 } else {
383 return 0xFF;
384 }
385 case GB_REGION_VRAM:
386 case GB_REGION_VRAM + 1:
387 if (segment < 0) {
388 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
389 } else if (segment < 2) {
390 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
391 } else {
392 return 0xFF;
393 }
394 case GB_REGION_EXTERNAL_RAM:
395 case GB_REGION_EXTERNAL_RAM + 1:
396 if (memory->rtcAccess) {
397 return memory->rtcRegs[memory->activeRtcReg];
398 } else if (memory->sramAccess) {
399 if (segment < 0 && memory->sram) {
400 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
401 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
402 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
403 } else {
404 return 0xFF;
405 }
406 } else if (memory->mbcRead) {
407 return memory->mbcRead(memory, address);
408 } else if (memory->mbcType == GB_HuC3) {
409 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
410 }
411 return 0xFF;
412 case GB_REGION_WORKING_RAM_BANK0:
413 case GB_REGION_WORKING_RAM_BANK0 + 2:
414 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
415 case GB_REGION_WORKING_RAM_BANK1:
416 if (segment < 0) {
417 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
418 } else if (segment < 8) {
419 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
420 } else {
421 return 0xFF;
422 }
423 default:
424 if (address < GB_BASE_OAM) {
425 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
426 }
427 if (address < GB_BASE_UNUSABLE) {
428 if (gb->video.mode < 2) {
429 return gb->video.oam.raw[address & 0xFF];
430 }
431 return 0xFF;
432 }
433 if (address < GB_BASE_IO) {
434 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
435 return 0xFF;
436 }
437 if (address < GB_BASE_HRAM) {
438 return GBIORead(gb, address & (GB_SIZE_IO - 1));
439 }
440 if (address < GB_BASE_IE) {
441 return memory->hram[address & GB_SIZE_HRAM];
442 }
443 return GBIORead(gb, REG_IE);
444 }
445}
446
447void GBMemoryDMA(struct GB* gb, uint16_t base) {
448 if (base > 0xF100) {
449 return;
450 }
451 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
452 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
453 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
454 gb->cpu->nextEvent = gb->cpu->cycles + 8;
455 }
456 gb->memory.dmaSource = base;
457 gb->memory.dmaDest = 0;
458 gb->memory.dmaRemaining = 0xA0;
459}
460
461uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
462 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
463 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
464 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
465 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
466 gb->memory.hdmaSource &= 0xFFF0;
467 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
468 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
469 return value | 0x80;
470 }
471 gb->memory.hdmaDest &= 0x1FF0;
472 gb->memory.hdmaDest |= 0x8000;
473 bool wasHdma = gb->memory.isHdma;
474 gb->memory.isHdma = value & 0x80;
475 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
476 if (gb->memory.isHdma) {
477 gb->memory.hdmaRemaining = 0x10;
478 } else {
479 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
480 }
481 gb->cpuBlocked = true;
482 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
483 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
484 return 0x80 | ((value + 1) & 0x7F);
485 }
486 return value & 0x7F;
487}
488
489void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
490 struct GB* gb = context;
491 int dmaRemaining = gb->memory.dmaRemaining;
492 gb->memory.dmaRemaining = 0;
493 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
494 // TODO: Can DMA write OAM during modes 2-3?
495 gb->video.oam.raw[gb->memory.dmaDest] = b;
496 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
497 ++gb->memory.dmaSource;
498 ++gb->memory.dmaDest;
499 gb->memory.dmaRemaining = dmaRemaining - 1;
500 if (gb->memory.dmaRemaining) {
501 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
502 }
503}
504
505void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
506 struct GB* gb = context;
507 gb->cpuBlocked = true;
508 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
509 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
510 ++gb->memory.hdmaSource;
511 ++gb->memory.hdmaDest;
512 --gb->memory.hdmaRemaining;
513 if (gb->memory.hdmaRemaining) {
514 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
515 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
516 } else {
517 gb->cpuBlocked = false;
518 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
519 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
520 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
521 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
522 if (gb->memory.isHdma) {
523 --gb->memory.io[REG_HDMA5];
524 if (gb->memory.io[REG_HDMA5] == 0xFF) {
525 gb->memory.isHdma = false;
526 }
527 } else {
528 gb->memory.io[REG_HDMA5] = 0xFF;
529 }
530 }
531}
532
533void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
534 struct GB* gb = (struct GB*) cpu->master;
535 struct GBMemory* memory = &gb->memory;
536 int8_t oldValue = -1;
537
538 switch (address >> 12) {
539 case GB_REGION_CART_BANK0:
540 case GB_REGION_CART_BANK0 + 1:
541 case GB_REGION_CART_BANK0 + 2:
542 case GB_REGION_CART_BANK0 + 3:
543 _pristineCow(gb);
544 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
545 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
546 break;
547 case GB_REGION_CART_BANK1:
548 case GB_REGION_CART_BANK1 + 1:
549 case GB_REGION_CART_BANK1 + 2:
550 case GB_REGION_CART_BANK1 + 3:
551 _pristineCow(gb);
552 if (segment < 0) {
553 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
554 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
555 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
556 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
557 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
558 } else {
559 return;
560 }
561 break;
562 case GB_REGION_VRAM:
563 case GB_REGION_VRAM + 1:
564 if (segment < 0) {
565 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
566 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
567 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
568 } else if (segment < 2) {
569 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
570 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
571 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
572 } else {
573 return;
574 }
575 break;
576 case GB_REGION_EXTERNAL_RAM:
577 case GB_REGION_EXTERNAL_RAM + 1:
578 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
579 return;
580 case GB_REGION_WORKING_RAM_BANK0:
581 case GB_REGION_WORKING_RAM_BANK0 + 2:
582 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
583 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
584 break;
585 case GB_REGION_WORKING_RAM_BANK1:
586 if (segment < 0) {
587 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
588 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
589 } else if (segment < 8) {
590 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
591 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
592 } else {
593 return;
594 }
595 break;
596 default:
597 if (address < GB_BASE_OAM) {
598 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
599 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
600 } else if (address < GB_BASE_UNUSABLE) {
601 oldValue = gb->video.oam.raw[address & 0xFF];
602 gb->video.oam.raw[address & 0xFF] = value;
603 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
604 } else if (address < GB_BASE_HRAM) {
605 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
606 return;
607 } else if (address < GB_BASE_IE) {
608 oldValue = memory->hram[address & GB_SIZE_HRAM];
609 memory->hram[address & GB_SIZE_HRAM] = value;
610 } else {
611 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
612 return;
613 }
614 }
615 if (old) {
616 *old = oldValue;
617 }
618}
619
620void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
621 const struct GBMemory* memory = &gb->memory;
622 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
623 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
624 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
625 state->memory.wramCurrentBank = memory->wramCurrentBank;
626 state->memory.sramCurrentBank = memory->sramCurrentBank;
627
628 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
629 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
630
631 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
632 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
633
634 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
635 state->memory.dmaRemaining = memory->dmaRemaining;
636 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
637
638 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
639 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
640
641 GBSerializedMemoryFlags flags = 0;
642 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
643 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
644 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
645 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
646 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
647 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
648 STORE_16LE(flags, 0, &state->memory.flags);
649
650 switch (memory->mbcType) {
651 case GB_MBC1:
652 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
653 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
654 break;
655 case GB_MBC3_RTC:
656 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
657 break;
658 case GB_MBC7:
659 state->memory.mbc7.state = memory->mbcState.mbc7.state;
660 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
661 state->memory.mbc7.address = memory->mbcState.mbc7.address;
662 state->memory.mbc7.access = memory->mbcState.mbc7.access;
663 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
664 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
665 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
666 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
667 break;
668 default:
669 break;
670 }
671}
672
673void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
674 struct GBMemory* memory = &gb->memory;
675 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
676 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
677 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
678 memory->wramCurrentBank = state->memory.wramCurrentBank;
679 memory->sramCurrentBank = state->memory.sramCurrentBank;
680
681 GBMBCSwitchBank(gb, memory->currentBank);
682 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
683 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
684
685 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
686 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
687
688 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
689 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
690
691 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
692 memory->dmaRemaining = state->memory.dmaRemaining;
693 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
694
695 uint32_t when;
696 LOAD_32LE(when, 0, &state->memory.dmaNext);
697 if (memory->dmaRemaining) {
698 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
699 }
700 LOAD_32LE(when, 0, &state->memory.hdmaNext);
701 if (memory->hdmaRemaining) {
702 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
703 }
704
705 GBSerializedMemoryFlags flags;
706 LOAD_16LE(flags, 0, &state->memory.flags);
707 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
708 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
709 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
710 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
711 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
712 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
713
714 switch (memory->mbcType) {
715 case GB_MBC1:
716 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
717 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
718 if (memory->mbcState.mbc1.mode) {
719 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
720 }
721 break;
722 case GB_MBC3_RTC:
723 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
724 break;
725 case GB_MBC7:
726 memory->mbcState.mbc7.state = state->memory.mbc7.state;
727 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
728 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
729 memory->mbcState.mbc7.access = state->memory.mbc7.access;
730 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
731 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
732 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
733 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
734 break;
735 default:
736 break;
737 }
738}
739
740void _pristineCow(struct GB* gb) {
741 if (!gb->isPristine) {
742 return;
743 }
744 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
745 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
746 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
747 if (gb->memory.rom == gb->memory.romBase) {
748 gb->memory.romBase = newRom;
749 }
750 gb->memory.rom = newRom;
751 GBMBCSwitchBank(gb, gb->memory.currentBank);
752 gb->isPristine = false;
753}