src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19struct OAMBlock {
20 uint16_t low;
21 uint16_t high;
22};
23
24static const struct OAMBlock _oamBlockDMG[] = {
25 { 0xA000, 0xFE00 },
26 { 0xA000, 0xFE00 },
27 { 0xA000, 0xFE00 },
28 { 0xA000, 0xFE00 },
29 { 0x8000, 0xA000 },
30 { 0xA000, 0xFE00 },
31 { 0xA000, 0xFE00 },
32 { 0xA000, 0xFE00 },
33};
34
35static const struct OAMBlock _oamBlockCGB[] = {
36 { 0xA000, 0xC000 },
37 { 0xA000, 0xC000 },
38 { 0xA000, 0xC000 },
39 { 0xA000, 0xC000 },
40 { 0x8000, 0xA000 },
41 { 0xA000, 0xC000 },
42 { 0xC000, 0xFE00 },
43 { 0xA000, 0xC000 },
44};
45
46static void _pristineCow(struct GB* gba);
47
48static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
49 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
50 cpu->memory.setActiveRegion(cpu, address);
51 return cpu->memory.cpuLoad8(cpu, address);
52 }
53 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
54}
55
56static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
57 struct GB* gb = (struct GB*) cpu->master;
58 struct GBMemory* memory = &gb->memory;
59 switch (address >> 12) {
60 case GB_REGION_CART_BANK0:
61 case GB_REGION_CART_BANK0 + 1:
62 case GB_REGION_CART_BANK0 + 2:
63 case GB_REGION_CART_BANK0 + 3:
64 cpu->memory.cpuLoad8 = GBFastLoad8;
65 cpu->memory.activeRegion = memory->romBase;
66 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
67 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
68 break;
69 case GB_REGION_CART_BANK1:
70 case GB_REGION_CART_BANK1 + 1:
71 case GB_REGION_CART_BANK1 + 2:
72 case GB_REGION_CART_BANK1 + 3:
73 cpu->memory.cpuLoad8 = GBFastLoad8;
74 cpu->memory.activeRegion = memory->romBank;
75 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
76 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
77 break;
78 default:
79 cpu->memory.cpuLoad8 = GBLoad8;
80 break;
81 }
82}
83
84static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
85static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
86
87void GBMemoryInit(struct GB* gb) {
88 struct LR35902Core* cpu = gb->cpu;
89 cpu->memory.cpuLoad8 = GBLoad8;
90 cpu->memory.load8 = GBLoad8;
91 cpu->memory.store8 = GBStore8;
92 cpu->memory.currentSegment = GBCurrentSegment;
93 cpu->memory.setActiveRegion = GBSetActiveRegion;
94
95 gb->memory.wram = 0;
96 gb->memory.wramBank = 0;
97 gb->memory.rom = 0;
98 gb->memory.romBank = 0;
99 gb->memory.romSize = 0;
100 gb->memory.sram = 0;
101 gb->memory.mbcType = GB_MBC_AUTODETECT;
102 gb->memory.mbcRead = NULL;
103 gb->memory.mbcWrite = NULL;
104
105 gb->memory.rtc = NULL;
106 gb->memory.rotation = NULL;
107 gb->memory.rumble = NULL;
108 gb->memory.cam = NULL;
109
110 GBIOInit(gb);
111}
112
113void GBMemoryDeinit(struct GB* gb) {
114 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
115 if (gb->memory.rom) {
116 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
117 }
118}
119
120void GBMemoryReset(struct GB* gb) {
121 if (gb->memory.wram) {
122 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
123 }
124 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
125 if (gb->model >= GB_MODEL_CGB) {
126 uint32_t* base = (uint32_t*) gb->memory.wram;
127 size_t i;
128 uint32_t pattern = 0;
129 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
130 if ((i & 0x1FF) == 0) {
131 pattern = ~pattern;
132 }
133 base[i + 0] = pattern;
134 base[i + 1] = pattern;
135 base[i + 2] = ~pattern;
136 base[i + 3] = ~pattern;
137 }
138 }
139 GBMemorySwitchWramBank(&gb->memory, 1);
140 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
141 gb->memory.currentBank = 1;
142 gb->memory.sramCurrentBank = 0;
143
144 gb->memory.ime = false;
145 gb->memory.ie = 0;
146
147 gb->memory.dmaRemaining = 0;
148 gb->memory.dmaSource = 0;
149 gb->memory.dmaDest = 0;
150 gb->memory.hdmaRemaining = 0;
151 gb->memory.hdmaSource = 0;
152 gb->memory.hdmaDest = 0;
153 gb->memory.isHdma = false;
154
155
156 gb->memory.dmaEvent.context = gb;
157 gb->memory.dmaEvent.name = "GB DMA";
158 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
159 gb->memory.dmaEvent.priority = 0x40;
160 gb->memory.hdmaEvent.context = gb;
161 gb->memory.hdmaEvent.name = "GB HDMA";
162 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
163 gb->memory.hdmaEvent.priority = 0x41;
164
165 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
166 switch (gb->memory.mbcType) {
167 case GB_MBC1:
168 gb->memory.mbcState.mbc1.mode = 0;
169 break;
170 default:
171 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
172 }
173
174 GBMBCInit(gb);
175 gb->memory.sramBank = gb->memory.sram;
176
177 if (!gb->memory.wram) {
178 GBMemoryDeinit(gb);
179 }
180}
181
182void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
183 bank &= 7;
184 if (!bank) {
185 bank = 1;
186 }
187 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
188 memory->wramCurrentBank = bank;
189}
190
191uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
192 struct GB* gb = (struct GB*) cpu->master;
193 struct GBMemory* memory = &gb->memory;
194 if (gb->memory.dmaRemaining) {
195 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
196 block = &block[memory->dmaSource >> 13];
197 if (address >= block->low && address < block->high) {
198 return 0xFF;
199 }
200 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
201 return 0xFF;
202 }
203 }
204 switch (address >> 12) {
205 case GB_REGION_CART_BANK0:
206 case GB_REGION_CART_BANK0 + 1:
207 case GB_REGION_CART_BANK0 + 2:
208 case GB_REGION_CART_BANK0 + 3:
209 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
210 case GB_REGION_CART_BANK1 + 2:
211 case GB_REGION_CART_BANK1 + 3:
212 if (memory->mbcType == GB_MBC6) {
213 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
214 }
215 // Fall through
216 case GB_REGION_CART_BANK1:
217 case GB_REGION_CART_BANK1 + 1:
218 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
219 case GB_REGION_VRAM:
220 case GB_REGION_VRAM + 1:
221 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
222 case GB_REGION_EXTERNAL_RAM:
223 case GB_REGION_EXTERNAL_RAM + 1:
224 if (memory->rtcAccess) {
225 return memory->rtcRegs[memory->activeRtcReg];
226 } else if (memory->mbcRead) {
227 return memory->mbcRead(memory, address);
228 } else if (memory->sramAccess && memory->sram) {
229 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
230 } else if (memory->mbcType == GB_HuC3) {
231 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
232 }
233 return 0xFF;
234 case GB_REGION_WORKING_RAM_BANK0:
235 case GB_REGION_WORKING_RAM_BANK0 + 2:
236 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
237 case GB_REGION_WORKING_RAM_BANK1:
238 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
239 default:
240 if (address < GB_BASE_OAM) {
241 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
242 }
243 if (address < GB_BASE_UNUSABLE) {
244 if (gb->video.mode < 2) {
245 return gb->video.oam.raw[address & 0xFF];
246 }
247 return 0xFF;
248 }
249 if (address < GB_BASE_IO) {
250 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
251 return 0xFF;
252 }
253 if (address < GB_BASE_HRAM) {
254 return GBIORead(gb, address & (GB_SIZE_IO - 1));
255 }
256 if (address < GB_BASE_IE) {
257 return memory->hram[address & GB_SIZE_HRAM];
258 }
259 return GBIORead(gb, REG_IE);
260 }
261}
262
263void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
264 struct GB* gb = (struct GB*) cpu->master;
265 struct GBMemory* memory = &gb->memory;
266 if (gb->memory.dmaRemaining) {
267 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
268 block = &block[memory->dmaSource >> 13];
269 if (address >= block->low && address < block->high) {
270 return;
271 }
272 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
273 return;
274 }
275 }
276 switch (address >> 12) {
277 case GB_REGION_CART_BANK0:
278 case GB_REGION_CART_BANK0 + 1:
279 case GB_REGION_CART_BANK0 + 2:
280 case GB_REGION_CART_BANK0 + 3:
281 case GB_REGION_CART_BANK1:
282 case GB_REGION_CART_BANK1 + 1:
283 case GB_REGION_CART_BANK1 + 2:
284 case GB_REGION_CART_BANK1 + 3:
285 memory->mbcWrite(gb, address, value);
286 cpu->memory.setActiveRegion(cpu, cpu->pc);
287 return;
288 case GB_REGION_VRAM:
289 case GB_REGION_VRAM + 1:
290 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
291 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
292 return;
293 case GB_REGION_EXTERNAL_RAM:
294 case GB_REGION_EXTERNAL_RAM + 1:
295 if (memory->rtcAccess) {
296 memory->rtcRegs[memory->activeRtcReg] = value;
297 } else if (memory->sramAccess && memory->sram) {
298 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
299 } else {
300 memory->mbcWrite(gb, address, value);
301 }
302 gb->sramDirty |= GB_SRAM_DIRT_NEW;
303 return;
304 case GB_REGION_WORKING_RAM_BANK0:
305 case GB_REGION_WORKING_RAM_BANK0 + 2:
306 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
307 return;
308 case GB_REGION_WORKING_RAM_BANK1:
309 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
310 return;
311 default:
312 if (address < GB_BASE_OAM) {
313 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
314 } else if (address < GB_BASE_UNUSABLE) {
315 if (gb->video.mode < 2) {
316 gb->video.oam.raw[address & 0xFF] = value;
317 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
318 }
319 } else if (address < GB_BASE_IO) {
320 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
321 } else if (address < GB_BASE_HRAM) {
322 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
323 } else if (address < GB_BASE_IE) {
324 memory->hram[address & GB_SIZE_HRAM] = value;
325 } else {
326 GBIOWrite(gb, REG_IE, value);
327 }
328 }
329}
330
331int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
332 struct GB* gb = (struct GB*) cpu->master;
333 struct GBMemory* memory = &gb->memory;
334 switch (address >> 12) {
335 case GB_REGION_CART_BANK0:
336 case GB_REGION_CART_BANK0 + 1:
337 case GB_REGION_CART_BANK0 + 2:
338 case GB_REGION_CART_BANK0 + 3:
339 return 0;
340 case GB_REGION_CART_BANK1:
341 case GB_REGION_CART_BANK1 + 1:
342 case GB_REGION_CART_BANK1 + 2:
343 case GB_REGION_CART_BANK1 + 3:
344 return memory->currentBank;
345 case GB_REGION_VRAM:
346 case GB_REGION_VRAM + 1:
347 return gb->video.vramCurrentBank;
348 case GB_REGION_EXTERNAL_RAM:
349 case GB_REGION_EXTERNAL_RAM + 1:
350 return memory->sramCurrentBank;
351 case GB_REGION_WORKING_RAM_BANK0:
352 case GB_REGION_WORKING_RAM_BANK0 + 2:
353 return 0;
354 case GB_REGION_WORKING_RAM_BANK1:
355 return memory->wramCurrentBank;
356 default:
357 return 0;
358 }
359}
360
361uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
362 struct GB* gb = (struct GB*) cpu->master;
363 struct GBMemory* memory = &gb->memory;
364 switch (address >> 12) {
365 case GB_REGION_CART_BANK0:
366 case GB_REGION_CART_BANK0 + 1:
367 case GB_REGION_CART_BANK0 + 2:
368 case GB_REGION_CART_BANK0 + 3:
369 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
370 case GB_REGION_CART_BANK1:
371 case GB_REGION_CART_BANK1 + 1:
372 case GB_REGION_CART_BANK1 + 2:
373 case GB_REGION_CART_BANK1 + 3:
374 if (segment < 0) {
375 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
376 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
377 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
378 } else {
379 return 0xFF;
380 }
381 case GB_REGION_VRAM:
382 case GB_REGION_VRAM + 1:
383 if (segment < 0) {
384 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
385 } else if (segment < 2) {
386 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
387 } else {
388 return 0xFF;
389 }
390 case GB_REGION_EXTERNAL_RAM:
391 case GB_REGION_EXTERNAL_RAM + 1:
392 if (memory->rtcAccess) {
393 return memory->rtcRegs[memory->activeRtcReg];
394 } else if (memory->sramAccess) {
395 if (segment < 0 && memory->sram) {
396 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
397 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
398 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
399 } else {
400 return 0xFF;
401 }
402 } else if (memory->mbcRead) {
403 return memory->mbcRead(memory, address);
404 } else if (memory->mbcType == GB_HuC3) {
405 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
406 }
407 return 0xFF;
408 case GB_REGION_WORKING_RAM_BANK0:
409 case GB_REGION_WORKING_RAM_BANK0 + 2:
410 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
411 case GB_REGION_WORKING_RAM_BANK1:
412 if (segment < 0) {
413 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
414 } else if (segment < 8) {
415 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
416 } else {
417 return 0xFF;
418 }
419 default:
420 if (address < GB_BASE_OAM) {
421 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
422 }
423 if (address < GB_BASE_UNUSABLE) {
424 if (gb->video.mode < 2) {
425 return gb->video.oam.raw[address & 0xFF];
426 }
427 return 0xFF;
428 }
429 if (address < GB_BASE_IO) {
430 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
431 return 0xFF;
432 }
433 if (address < GB_BASE_HRAM) {
434 return GBIORead(gb, address & (GB_SIZE_IO - 1));
435 }
436 if (address < GB_BASE_IE) {
437 return memory->hram[address & GB_SIZE_HRAM];
438 }
439 return GBIORead(gb, REG_IE);
440 }
441}
442
443void GBMemoryDMA(struct GB* gb, uint16_t base) {
444 if (base > 0xF100) {
445 return;
446 }
447 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
448 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
449 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
450 gb->cpu->nextEvent = gb->cpu->cycles + 8;
451 }
452 gb->memory.dmaSource = base;
453 gb->memory.dmaDest = 0;
454 gb->memory.dmaRemaining = 0xA0;
455}
456
457void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
458 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
459 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
460 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
461 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
462 gb->memory.hdmaSource &= 0xFFF0;
463 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
464 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
465 return;
466 }
467 gb->memory.hdmaDest &= 0x1FF0;
468 gb->memory.hdmaDest |= 0x8000;
469 bool wasHdma = gb->memory.isHdma;
470 gb->memory.isHdma = value & 0x80;
471 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
472 if (gb->memory.isHdma) {
473 gb->memory.hdmaRemaining = 0x10;
474 } else {
475 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
476 }
477 gb->cpuBlocked = true;
478 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
479 }
480}
481
482void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
483 struct GB* gb = context;
484 int dmaRemaining = gb->memory.dmaRemaining;
485 gb->memory.dmaRemaining = 0;
486 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
487 // TODO: Can DMA write OAM during modes 2-3?
488 gb->video.oam.raw[gb->memory.dmaDest] = b;
489 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
490 ++gb->memory.dmaSource;
491 ++gb->memory.dmaDest;
492 gb->memory.dmaRemaining = dmaRemaining - 1;
493 if (gb->memory.dmaRemaining) {
494 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
495 }
496}
497
498void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
499 struct GB* gb = context;
500 gb->cpuBlocked = true;
501 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
502 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
503 ++gb->memory.hdmaSource;
504 ++gb->memory.hdmaDest;
505 --gb->memory.hdmaRemaining;
506 if (gb->memory.hdmaRemaining) {
507 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
508 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
509 } else {
510 gb->cpuBlocked = false;
511 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
512 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
513 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
514 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
515 if (gb->memory.isHdma) {
516 --gb->memory.io[REG_HDMA5];
517 if (gb->memory.io[REG_HDMA5] == 0xFF) {
518 gb->memory.isHdma = false;
519 }
520 } else {
521 gb->memory.io[REG_HDMA5] = 0xFF;
522 }
523 }
524}
525
526void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
527 struct GB* gb = (struct GB*) cpu->master;
528 struct GBMemory* memory = &gb->memory;
529 int8_t oldValue = -1;
530
531 switch (address >> 12) {
532 case GB_REGION_CART_BANK0:
533 case GB_REGION_CART_BANK0 + 1:
534 case GB_REGION_CART_BANK0 + 2:
535 case GB_REGION_CART_BANK0 + 3:
536 _pristineCow(gb);
537 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
538 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
539 break;
540 case GB_REGION_CART_BANK1:
541 case GB_REGION_CART_BANK1 + 1:
542 case GB_REGION_CART_BANK1 + 2:
543 case GB_REGION_CART_BANK1 + 3:
544 _pristineCow(gb);
545 if (segment < 0) {
546 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
547 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
548 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
549 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
550 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
551 } else {
552 return;
553 }
554 break;
555 case GB_REGION_VRAM:
556 case GB_REGION_VRAM + 1:
557 if (segment < 0) {
558 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
559 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
560 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
561 } else if (segment < 2) {
562 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
563 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
564 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
565 } else {
566 return;
567 }
568 break;
569 case GB_REGION_EXTERNAL_RAM:
570 case GB_REGION_EXTERNAL_RAM + 1:
571 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
572 return;
573 case GB_REGION_WORKING_RAM_BANK0:
574 case GB_REGION_WORKING_RAM_BANK0 + 2:
575 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
576 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
577 break;
578 case GB_REGION_WORKING_RAM_BANK1:
579 if (segment < 0) {
580 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
581 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
582 } else if (segment < 8) {
583 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
584 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
585 } else {
586 return;
587 }
588 break;
589 default:
590 if (address < GB_BASE_OAM) {
591 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
592 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
593 } else if (address < GB_BASE_UNUSABLE) {
594 oldValue = gb->video.oam.raw[address & 0xFF];
595 gb->video.oam.raw[address & 0xFF] = value;
596 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
597 } else if (address < GB_BASE_HRAM) {
598 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
599 return;
600 } else if (address < GB_BASE_IE) {
601 oldValue = memory->hram[address & GB_SIZE_HRAM];
602 memory->hram[address & GB_SIZE_HRAM] = value;
603 } else {
604 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
605 return;
606 }
607 }
608 if (old) {
609 *old = oldValue;
610 }
611}
612
613void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
614 const struct GBMemory* memory = &gb->memory;
615 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
616 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
617 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
618 state->memory.wramCurrentBank = memory->wramCurrentBank;
619 state->memory.sramCurrentBank = memory->sramCurrentBank;
620
621 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
622 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
623
624 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
625 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
626
627 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
628 state->memory.dmaRemaining = memory->dmaRemaining;
629 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
630
631 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
632 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
633
634 GBSerializedMemoryFlags flags = 0;
635 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
636 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
637 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
638 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
639 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
640 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
641 STORE_16LE(flags, 0, &state->memory.flags);
642
643 switch (memory->mbcType) {
644 case GB_MBC1:
645 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
646 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
647 break;
648 case GB_MBC3_RTC:
649 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
650 break;
651 case GB_MBC7:
652 state->memory.mbc7.state = memory->mbcState.mbc7.state;
653 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
654 state->memory.mbc7.address = memory->mbcState.mbc7.address;
655 state->memory.mbc7.access = memory->mbcState.mbc7.access;
656 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
657 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
658 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
659 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
660 break;
661 default:
662 break;
663 }
664}
665
666void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
667 struct GBMemory* memory = &gb->memory;
668 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
669 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
670 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
671 memory->wramCurrentBank = state->memory.wramCurrentBank;
672 memory->sramCurrentBank = state->memory.sramCurrentBank;
673
674 GBMBCSwitchBank(gb, memory->currentBank);
675 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
676 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
677
678 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
679 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
680
681 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
682 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
683
684 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
685 memory->dmaRemaining = state->memory.dmaRemaining;
686 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
687
688 uint32_t when;
689 LOAD_32LE(when, 0, &state->memory.dmaNext);
690 if (memory->dmaRemaining) {
691 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
692 }
693 LOAD_32LE(when, 0, &state->memory.hdmaNext);
694 if (memory->hdmaRemaining) {
695 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
696 }
697
698 GBSerializedMemoryFlags flags;
699 LOAD_16LE(flags, 0, &state->memory.flags);
700 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
701 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
702 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
703 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
704 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
705 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
706
707 switch (memory->mbcType) {
708 case GB_MBC1:
709 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
710 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
711 if (memory->mbcState.mbc1.mode) {
712 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
713 }
714 break;
715 case GB_MBC3_RTC:
716 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
717 break;
718 case GB_MBC7:
719 memory->mbcState.mbc7.state = state->memory.mbc7.state;
720 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
721 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
722 memory->mbcState.mbc7.access = state->memory.mbc7.access;
723 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
724 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
725 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
726 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
727 break;
728 default:
729 break;
730 }
731}
732
733void _pristineCow(struct GB* gb) {
734 if (!gb->isPristine) {
735 return;
736 }
737 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
738 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
739 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
740 if (gb->memory.rom == gb->memory.romBase) {
741 gb->memory.romBase = newRom;
742 }
743 gb->memory.rom = newRom;
744 GBMBCSwitchBank(gb, gb->memory.currentBank);
745 gb->isPristine = false;
746}