src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/mbc.h"
12#include "gb/serialize.h"
13
14#include "util/memory.h"
15
16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
17
18static void _pristineCow(struct GB* gba);
19
20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
21 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
22 cpu->memory.setActiveRegion(cpu, address);
23 return cpu->memory.cpuLoad8(cpu, address);
24 }
25 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
26}
27
28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
29 struct GB* gb = (struct GB*) cpu->master;
30 struct GBMemory* memory = &gb->memory;
31 switch (address >> 12) {
32 case GB_REGION_CART_BANK0:
33 case GB_REGION_CART_BANK0 + 1:
34 case GB_REGION_CART_BANK0 + 2:
35 case GB_REGION_CART_BANK0 + 3:
36 cpu->memory.cpuLoad8 = GBFastLoad8;
37 cpu->memory.activeRegion = memory->romBase;
38 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
39 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
40 break;
41 case GB_REGION_CART_BANK1:
42 case GB_REGION_CART_BANK1 + 1:
43 case GB_REGION_CART_BANK1 + 2:
44 case GB_REGION_CART_BANK1 + 3:
45 cpu->memory.cpuLoad8 = GBFastLoad8;
46 cpu->memory.activeRegion = memory->romBank;
47 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
48 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
49 break;
50 default:
51 cpu->memory.cpuLoad8 = GBLoad8;
52 break;
53 }
54}
55
56static void _GBMemoryDMAService(struct GB* gb);
57static void _GBMemoryHDMAService(struct GB* gb);
58
59void GBMemoryInit(struct GB* gb) {
60 struct LR35902Core* cpu = gb->cpu;
61 cpu->memory.cpuLoad8 = GBLoad8;
62 cpu->memory.load8 = GBLoad8;
63 cpu->memory.store8 = GBStore8;
64 cpu->memory.setActiveRegion = GBSetActiveRegion;
65
66 gb->memory.wram = 0;
67 gb->memory.wramBank = 0;
68 gb->memory.rom = 0;
69 gb->memory.romBank = 0;
70 gb->memory.romSize = 0;
71 gb->memory.sram = 0;
72 gb->memory.mbcType = GB_MBC_AUTODETECT;
73 gb->memory.mbc = 0;
74
75 gb->memory.rtc = NULL;
76
77 GBIOInit(gb);
78}
79
80void GBMemoryDeinit(struct GB* gb) {
81 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
82 if (gb->memory.rom) {
83 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
84 }
85}
86
87void GBMemoryReset(struct GB* gb) {
88 if (gb->memory.wram) {
89 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
90 }
91 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
92 GBMemorySwitchWramBank(&gb->memory, 1);
93 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
94 gb->memory.currentBank = 1;
95 gb->memory.sramCurrentBank = 0;
96
97 gb->memory.ime = false;
98 gb->memory.ie = 0;
99
100 gb->memory.dmaNext = INT_MAX;
101 gb->memory.dmaRemaining = 0;
102 gb->memory.dmaSource = 0;
103 gb->memory.dmaDest = 0;
104 gb->memory.hdmaNext = INT_MAX;
105 gb->memory.hdmaRemaining = 0;
106 gb->memory.hdmaSource = 0;
107 gb->memory.hdmaDest = 0;
108 gb->memory.isHdma = false;
109
110 gb->memory.sramAccess = false;
111 gb->memory.rtcAccess = false;
112 gb->memory.activeRtcReg = 0;
113 gb->memory.rtcLatched = false;
114 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
115
116 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
117 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
118
119 GBMBCInit(gb);
120 gb->memory.sramBank = gb->memory.sram;
121
122 if (!gb->memory.wram) {
123 GBMemoryDeinit(gb);
124 }
125}
126
127void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
128 bank &= 7;
129 if (!bank) {
130 bank = 1;
131 }
132 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
133 memory->wramCurrentBank = bank;
134}
135
136uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
137 struct GB* gb = (struct GB*) cpu->master;
138 struct GBMemory* memory = &gb->memory;
139 switch (address >> 12) {
140 case GB_REGION_CART_BANK0:
141 case GB_REGION_CART_BANK0 + 1:
142 case GB_REGION_CART_BANK0 + 2:
143 case GB_REGION_CART_BANK0 + 3:
144 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
145 case GB_REGION_CART_BANK1:
146 case GB_REGION_CART_BANK1 + 1:
147 case GB_REGION_CART_BANK1 + 2:
148 case GB_REGION_CART_BANK1 + 3:
149 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
150 case GB_REGION_VRAM:
151 case GB_REGION_VRAM + 1:
152 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
153 case GB_REGION_EXTERNAL_RAM:
154 case GB_REGION_EXTERNAL_RAM + 1:
155 if (memory->rtcAccess) {
156 return memory->rtcRegs[memory->activeRtcReg];
157 } else if (memory->sramAccess) {
158 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
159 } else if (memory->mbcType == GB_MBC7) {
160 return GBMBC7Read(memory, address);
161 } else if (memory->mbcType == GB_HuC3) {
162 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
163 }
164 return 0xFF;
165 case GB_REGION_WORKING_RAM_BANK0:
166 case GB_REGION_WORKING_RAM_BANK0 + 2:
167 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
168 case GB_REGION_WORKING_RAM_BANK1:
169 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
170 default:
171 if (address < GB_BASE_OAM) {
172 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
173 }
174 if (address < GB_BASE_UNUSABLE) {
175 if (gb->video.mode < 2) {
176 return gb->video.oam.raw[address & 0xFF];
177 }
178 return 0xFF;
179 }
180 if (address < GB_BASE_IO) {
181 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
182 return 0xFF;
183 }
184 if (address < GB_BASE_HRAM) {
185 return GBIORead(gb, address & (GB_SIZE_IO - 1));
186 }
187 if (address < GB_BASE_IE) {
188 return memory->hram[address & GB_SIZE_HRAM];
189 }
190 return GBIORead(gb, REG_IE);
191 }
192}
193
194void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
195 struct GB* gb = (struct GB*) cpu->master;
196 struct GBMemory* memory = &gb->memory;
197 switch (address >> 12) {
198 case GB_REGION_CART_BANK0:
199 case GB_REGION_CART_BANK0 + 1:
200 case GB_REGION_CART_BANK0 + 2:
201 case GB_REGION_CART_BANK0 + 3:
202 case GB_REGION_CART_BANK1:
203 case GB_REGION_CART_BANK1 + 1:
204 case GB_REGION_CART_BANK1 + 2:
205 case GB_REGION_CART_BANK1 + 3:
206 memory->mbc(gb, address, value);
207 cpu->memory.setActiveRegion(cpu, cpu->pc);
208 return;
209 case GB_REGION_VRAM:
210 case GB_REGION_VRAM + 1:
211 // TODO: Block access in wrong modes
212 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
213 return;
214 case GB_REGION_EXTERNAL_RAM:
215 case GB_REGION_EXTERNAL_RAM + 1:
216 if (memory->rtcAccess) {
217 memory->rtcRegs[memory->activeRtcReg] = value;
218 } else if (memory->sramAccess) {
219 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
220 } else if (memory->mbcType == GB_MBC7) {
221 GBMBC7Write(memory, address, value);
222 }
223 gb->sramDirty |= GB_SRAM_DIRT_NEW;
224 return;
225 case GB_REGION_WORKING_RAM_BANK0:
226 case GB_REGION_WORKING_RAM_BANK0 + 2:
227 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
228 return;
229 case GB_REGION_WORKING_RAM_BANK1:
230 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
231 return;
232 default:
233 if (address < GB_BASE_OAM) {
234 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
235 } else if (address < GB_BASE_UNUSABLE) {
236 if (gb->video.mode < 2) {
237 gb->video.oam.raw[address & 0xFF] = value;
238 }
239 } else if (address < GB_BASE_IO) {
240 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
241 } else if (address < GB_BASE_HRAM) {
242 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
243 } else if (address < GB_BASE_IE) {
244 memory->hram[address & GB_SIZE_HRAM] = value;
245 } else {
246 GBIOWrite(gb, REG_IE, value);
247 }
248 }
249}
250uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
251 struct GB* gb = (struct GB*) cpu->master;
252 struct GBMemory* memory = &gb->memory;
253 switch (address >> 12) {
254 case GB_REGION_CART_BANK0:
255 case GB_REGION_CART_BANK0 + 1:
256 case GB_REGION_CART_BANK0 + 2:
257 case GB_REGION_CART_BANK0 + 3:
258 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
259 case GB_REGION_CART_BANK1:
260 case GB_REGION_CART_BANK1 + 1:
261 case GB_REGION_CART_BANK1 + 2:
262 case GB_REGION_CART_BANK1 + 3:
263 if (segment < 0) {
264 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
265 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
266 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
267 } else {
268 return 0xFF;
269 }
270 case GB_REGION_VRAM:
271 case GB_REGION_VRAM + 1:
272 if (segment < 0) {
273 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
274 } else if (segment < 2) {
275 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
276 } else {
277 return 0xFF;
278 }
279 case GB_REGION_EXTERNAL_RAM:
280 case GB_REGION_EXTERNAL_RAM + 1:
281 if (memory->rtcAccess) {
282 return memory->rtcRegs[memory->activeRtcReg];
283 } else if (memory->sramAccess) {
284 if (segment < 0) {
285 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
286 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
287 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
288 } else {
289 return 0xFF;
290 }
291 } else if (memory->mbcType == GB_MBC7) {
292 return GBMBC7Read(memory, address);
293 } else if (memory->mbcType == GB_HuC3) {
294 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
295 }
296 return 0xFF;
297 case GB_REGION_WORKING_RAM_BANK0:
298 case GB_REGION_WORKING_RAM_BANK0 + 2:
299 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
300 case GB_REGION_WORKING_RAM_BANK1:
301 if (segment < 0) {
302 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
303 } else if (segment < 8) {
304 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
305 } else {
306 return 0xFF;
307 }
308 default:
309 if (address < GB_BASE_OAM) {
310 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
311 }
312 if (address < GB_BASE_UNUSABLE) {
313 if (gb->video.mode < 2) {
314 return gb->video.oam.raw[address & 0xFF];
315 }
316 return 0xFF;
317 }
318 if (address < GB_BASE_IO) {
319 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
320 return 0xFF;
321 }
322 if (address < GB_BASE_HRAM) {
323 return GBIORead(gb, address & (GB_SIZE_IO - 1));
324 }
325 if (address < GB_BASE_IE) {
326 return memory->hram[address & GB_SIZE_HRAM];
327 }
328 return GBIORead(gb, REG_IE);
329 }
330}
331
332int32_t GBMemoryProcessEvents(struct GB* gb, int32_t cycles) {
333 int nextEvent = INT_MAX;
334 if (gb->memory.dmaRemaining) {
335 gb->memory.dmaNext -= cycles;
336 if (gb->memory.dmaNext <= 0) {
337 _GBMemoryDMAService(gb);
338 }
339 nextEvent = gb->memory.dmaNext;
340 }
341 if (gb->memory.hdmaRemaining) {
342 gb->memory.hdmaNext -= cycles;
343 if (gb->memory.hdmaNext <= 0) {
344 _GBMemoryHDMAService(gb);
345 }
346 if (gb->memory.hdmaNext < nextEvent) {
347 nextEvent = gb->memory.hdmaNext;
348 }
349 }
350 return nextEvent;
351}
352
353void GBMemoryDMA(struct GB* gb, uint16_t base) {
354 if (base > 0xF100) {
355 return;
356 }
357 gb->cpu->memory.store8 = GBDMAStore8;
358 gb->cpu->memory.load8 = GBDMALoad8;
359 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
360 gb->memory.dmaNext = gb->cpu->cycles + 8;
361 if (gb->memory.dmaNext < gb->cpu->nextEvent) {
362 gb->cpu->nextEvent = gb->memory.dmaNext;
363 }
364 gb->memory.dmaSource = base;
365 gb->memory.dmaDest = 0;
366 gb->memory.dmaRemaining = 0xA0;
367}
368
369void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
370 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
371 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
372 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
373 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
374 gb->memory.hdmaSource &= 0xFFF0;
375 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
376 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
377 return;
378 }
379 gb->memory.hdmaDest &= 0x1FF0;
380 gb->memory.hdmaDest |= 0x8000;
381 bool wasHdma = gb->memory.isHdma;
382 gb->memory.isHdma = value & 0x80;
383 if (!wasHdma && !gb->memory.isHdma) {
384 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
385 gb->memory.hdmaNext = gb->cpu->cycles;
386 gb->cpu->nextEvent = gb->cpu->cycles;
387 }
388}
389
390void _GBMemoryDMAService(struct GB* gb) {
391 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
392 // TODO: Can DMA write OAM during modes 2-3?
393 gb->video.oam.raw[gb->memory.dmaDest] = b;
394 ++gb->memory.dmaSource;
395 ++gb->memory.dmaDest;
396 --gb->memory.dmaRemaining;
397 if (gb->memory.dmaRemaining) {
398 gb->memory.dmaNext += 4;
399 } else {
400 gb->memory.dmaNext = INT_MAX;
401 gb->cpu->memory.store8 = GBStore8;
402 gb->cpu->memory.load8 = GBLoad8;
403 }
404}
405
406void _GBMemoryHDMAService(struct GB* gb) {
407 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
408 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
409 ++gb->memory.hdmaSource;
410 ++gb->memory.hdmaDest;
411 --gb->memory.hdmaRemaining;
412 gb->cpu->cycles += 2;
413 if (gb->memory.hdmaRemaining) {
414 gb->memory.hdmaNext += 2;
415 } else {
416 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
417 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
418 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
419 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
420 if (gb->memory.isHdma) {
421 --gb->memory.io[REG_HDMA5];
422 if (gb->memory.io[REG_HDMA5] == 0xFF) {
423 gb->memory.isHdma = false;
424 }
425 } else {
426 gb->memory.io[REG_HDMA5] |= 0x80;
427 }
428 }
429}
430
431struct OAMBlock {
432 uint16_t low;
433 uint16_t high;
434};
435
436static const struct OAMBlock _oamBlockDMG[] = {
437 { 0xA000, 0xFE00 },
438 { 0xA000, 0xFE00 },
439 { 0xA000, 0xFE00 },
440 { 0xA000, 0xFE00 },
441 { 0x8000, 0xA000 },
442 { 0xA000, 0xFE00 },
443 { 0xA000, 0xFE00 },
444 { 0xA000, 0xFE00 },
445};
446
447static const struct OAMBlock _oamBlockCGB[] = {
448 { 0xA000, 0xC000 },
449 { 0xA000, 0xC000 },
450 { 0xA000, 0xC000 },
451 { 0xA000, 0xC000 },
452 { 0x8000, 0xA000 },
453 { 0xA000, 0xC000 },
454 { 0xC000, 0xFE00 },
455 { 0xA000, 0xC000 },
456};
457
458uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
459 struct GB* gb = (struct GB*) cpu->master;
460 struct GBMemory* memory = &gb->memory;
461 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
462 block = &block[memory->dmaSource >> 13];
463 if (address >= block->low && address < block->high) {
464 return 0xFF;
465 }
466 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
467 return 0xFF;
468 }
469 return GBLoad8(cpu, address);
470}
471
472void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
473 struct GB* gb = (struct GB*) cpu->master;
474 struct GBMemory* memory = &gb->memory;
475 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
476 block = &block[memory->dmaSource >> 13];
477 if (address >= block->low && address < block->high) {
478 return;
479 }
480 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
481 return;
482 }
483 GBStore8(cpu, address, value);
484}
485
486void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
487 struct GB* gb = (struct GB*) cpu->master;
488 struct GBMemory* memory = &gb->memory;
489 int8_t oldValue = -1;
490
491 switch (address >> 12) {
492 case GB_REGION_CART_BANK0:
493 case GB_REGION_CART_BANK0 + 1:
494 case GB_REGION_CART_BANK0 + 2:
495 case GB_REGION_CART_BANK0 + 3:
496 _pristineCow(gb);
497 oldValue = memory->rom[address & (GB_SIZE_CART_BANK0 - 1)];
498 memory->rom[address & (GB_SIZE_CART_BANK0 - 1)] = value;
499 break;
500 case GB_REGION_CART_BANK1:
501 case GB_REGION_CART_BANK1 + 1:
502 case GB_REGION_CART_BANK1 + 2:
503 case GB_REGION_CART_BANK1 + 3:
504 _pristineCow(gb);
505 if (segment < 0) {
506 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
507 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
508 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
509 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
510 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
511 } else {
512 return;
513 }
514 break;
515 case GB_REGION_VRAM:
516 case GB_REGION_VRAM + 1:
517 if (segment < 0) {
518 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
519 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
520 } else if (segment < 2) {
521 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
522 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
523 } else {
524 return;
525 }
526 break;
527 case GB_REGION_EXTERNAL_RAM:
528 case GB_REGION_EXTERNAL_RAM + 1:
529 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
530 return;
531 case GB_REGION_WORKING_RAM_BANK0:
532 case GB_REGION_WORKING_RAM_BANK0 + 2:
533 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
534 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
535 break;
536 case GB_REGION_WORKING_RAM_BANK1:
537 if (segment < 0) {
538 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
539 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
540 } else if (segment < 8) {
541 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
542 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
543 } else {
544 return;
545 }
546 break;
547 default:
548 if (address < GB_BASE_OAM) {
549 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
550 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
551 } else if (address < GB_BASE_UNUSABLE) {
552 oldValue = gb->video.oam.raw[address & 0xFF];
553 gb->video.oam.raw[address & 0xFF] = value;
554 } else if (address < GB_BASE_HRAM) {
555 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
556 return;
557 } else if (address < GB_BASE_IE) {
558 oldValue = memory->hram[address & GB_SIZE_HRAM];
559 memory->hram[address & GB_SIZE_HRAM] = value;
560 } else {
561 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
562 return;
563 }
564 }
565 if (old) {
566 *old = oldValue;
567 }
568}
569
570void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
571 const struct GBMemory* memory = &gb->memory;
572 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
573 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
574 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
575 state->memory.wramCurrentBank = memory->wramCurrentBank;
576 state->memory.sramCurrentBank = memory->sramCurrentBank;
577
578 STORE_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
579 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
580 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
581
582 STORE_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
583 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
584 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
585
586 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
587 state->memory.dmaRemaining = memory->dmaRemaining;
588 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
589
590 GBSerializedMemoryFlags flags = 0;
591 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
592 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
593 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
594 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
595 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
596 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
597 STORE_16LE(flags, 0, &state->memory.flags);
598}
599
600void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
601 struct GBMemory* memory = &gb->memory;
602 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
603 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
604 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
605 memory->wramCurrentBank = state->memory.wramCurrentBank;
606 memory->sramCurrentBank = state->memory.sramCurrentBank;
607
608 GBMBCSwitchBank(memory, memory->currentBank);
609 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
610 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
611
612 LOAD_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
613 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
614 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
615
616 LOAD_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
617 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
618 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
619
620 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
621 memory->dmaRemaining = state->memory.dmaRemaining;
622 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
623
624 GBSerializedMemoryFlags flags;
625 LOAD_16LE(flags, 0, &state->memory.flags);
626 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
627 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
628 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
629 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
630 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
631 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
632}
633
634void _pristineCow(struct GB* gb) {
635 if (gb->memory.rom != gb->pristineRom) {
636 return;
637 }
638 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
639 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
640 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
641 GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
642}