src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/mbc.h"
12#include "gb/serialize.h"
13
14#include "util/memory.h"
15
16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
17
18static void _pristineCow(struct GB* gba);
19
20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
21 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
22 cpu->memory.setActiveRegion(cpu, address);
23 return cpu->memory.cpuLoad8(cpu, address);
24 }
25 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
26}
27
28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
29 struct GB* gb = (struct GB*) cpu->master;
30 struct GBMemory* memory = &gb->memory;
31 switch (address >> 12) {
32 case GB_REGION_CART_BANK0:
33 case GB_REGION_CART_BANK0 + 1:
34 case GB_REGION_CART_BANK0 + 2:
35 case GB_REGION_CART_BANK0 + 3:
36 cpu->memory.cpuLoad8 = GBFastLoad8;
37 cpu->memory.activeRegion = memory->romBase;
38 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
39 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
40 break;
41 case GB_REGION_CART_BANK1:
42 case GB_REGION_CART_BANK1 + 1:
43 case GB_REGION_CART_BANK1 + 2:
44 case GB_REGION_CART_BANK1 + 3:
45 cpu->memory.cpuLoad8 = GBFastLoad8;
46 cpu->memory.activeRegion = memory->romBank;
47 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
48 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
49 break;
50 default:
51 cpu->memory.cpuLoad8 = GBLoad8;
52 break;
53 }
54}
55
56static void _GBMemoryDMAService(struct GB* gb);
57static void _GBMemoryHDMAService(struct GB* gb);
58
59void GBMemoryInit(struct GB* gb) {
60 struct LR35902Core* cpu = gb->cpu;
61 cpu->memory.cpuLoad8 = GBLoad8;
62 cpu->memory.load8 = GBLoad8;
63 cpu->memory.store8 = GBStore8;
64 cpu->memory.setActiveRegion = GBSetActiveRegion;
65
66 gb->memory.wram = 0;
67 gb->memory.wramBank = 0;
68 gb->memory.rom = 0;
69 gb->memory.romBank = 0;
70 gb->memory.romSize = 0;
71 gb->memory.sram = 0;
72 gb->memory.mbcType = GB_MBC_NONE;
73 gb->memory.mbc = 0;
74
75 gb->memory.rtc = NULL;
76
77 GBIOInit(gb);
78}
79
80void GBMemoryDeinit(struct GB* gb) {
81 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
82 if (gb->memory.rom) {
83 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
84 }
85}
86
87void GBMemoryReset(struct GB* gb) {
88 if (gb->memory.wram) {
89 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
90 }
91 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
92 GBMemorySwitchWramBank(&gb->memory, 1);
93 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
94 gb->memory.currentBank = 1;
95 gb->memory.sramCurrentBank = 0;
96
97 gb->memory.ime = false;
98 gb->memory.ie = 0;
99
100 gb->memory.dmaNext = INT_MAX;
101 gb->memory.dmaRemaining = 0;
102 gb->memory.dmaSource = 0;
103 gb->memory.dmaDest = 0;
104 gb->memory.hdmaNext = INT_MAX;
105 gb->memory.hdmaRemaining = 0;
106 gb->memory.hdmaSource = 0;
107 gb->memory.hdmaDest = 0;
108 gb->memory.isHdma = false;
109
110 gb->memory.sramAccess = false;
111 gb->memory.rtcAccess = false;
112 gb->memory.activeRtcReg = 0;
113 gb->memory.rtcLatched = false;
114 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
115
116 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
117 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
118
119 GBMBCInit(gb);
120 gb->memory.sramBank = gb->memory.sram;
121
122 if (!gb->memory.wram) {
123 GBMemoryDeinit(gb);
124 }
125}
126
127void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
128 bank &= 7;
129 if (!bank) {
130 bank = 1;
131 }
132 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
133 memory->wramCurrentBank = bank;
134}
135
136uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
137 struct GB* gb = (struct GB*) cpu->master;
138 struct GBMemory* memory = &gb->memory;
139 switch (address >> 12) {
140 case GB_REGION_CART_BANK0:
141 case GB_REGION_CART_BANK0 + 1:
142 case GB_REGION_CART_BANK0 + 2:
143 case GB_REGION_CART_BANK0 + 3:
144 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
145 case GB_REGION_CART_BANK1:
146 case GB_REGION_CART_BANK1 + 1:
147 case GB_REGION_CART_BANK1 + 2:
148 case GB_REGION_CART_BANK1 + 3:
149 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
150 case GB_REGION_VRAM:
151 case GB_REGION_VRAM + 1:
152 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
153 case GB_REGION_EXTERNAL_RAM:
154 case GB_REGION_EXTERNAL_RAM + 1:
155 if (memory->rtcAccess) {
156 return memory->rtcRegs[memory->activeRtcReg];
157 } else if (memory->sramAccess) {
158 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
159 } else if (memory->mbcType == GB_MBC7) {
160 return GBMBC7Read(memory, address);
161 } else if (memory->mbcType == GB_HuC3) {
162 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
163 }
164 return 0xFF;
165 case GB_REGION_WORKING_RAM_BANK0:
166 case GB_REGION_WORKING_RAM_BANK0 + 2:
167 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
168 case GB_REGION_WORKING_RAM_BANK1:
169 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
170 default:
171 if (address < GB_BASE_OAM) {
172 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
173 }
174 if (address < GB_BASE_UNUSABLE) {
175 if (gb->video.mode < 2) {
176 return gb->video.oam.raw[address & 0xFF];
177 }
178 return 0xFF;
179 }
180 if (address < GB_BASE_IO) {
181 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
182 return 0xFF;
183 }
184 if (address < GB_BASE_HRAM) {
185 return GBIORead(gb, address & (GB_SIZE_IO - 1));
186 }
187 if (address < GB_BASE_IE) {
188 return memory->hram[address & GB_SIZE_HRAM];
189 }
190 return GBIORead(gb, REG_IE);
191 }
192}
193
194void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
195 struct GB* gb = (struct GB*) cpu->master;
196 struct GBMemory* memory = &gb->memory;
197 switch (address >> 12) {
198 case GB_REGION_CART_BANK0:
199 case GB_REGION_CART_BANK0 + 1:
200 case GB_REGION_CART_BANK0 + 2:
201 case GB_REGION_CART_BANK0 + 3:
202 case GB_REGION_CART_BANK1:
203 case GB_REGION_CART_BANK1 + 1:
204 case GB_REGION_CART_BANK1 + 2:
205 case GB_REGION_CART_BANK1 + 3:
206 memory->mbc(gb, address, value);
207 cpu->memory.setActiveRegion(cpu, cpu->pc);
208 return;
209 case GB_REGION_VRAM:
210 case GB_REGION_VRAM + 1:
211 // TODO: Block access in wrong modes
212 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
213 return;
214 case GB_REGION_EXTERNAL_RAM:
215 case GB_REGION_EXTERNAL_RAM + 1:
216 if (memory->rtcAccess) {
217 memory->rtcRegs[memory->activeRtcReg] = value;
218 } else if (memory->sramAccess) {
219 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
220 } else if (memory->mbcType == GB_MBC7) {
221 GBMBC7Write(memory, address, value);
222 }
223 return;
224 case GB_REGION_WORKING_RAM_BANK0:
225 case GB_REGION_WORKING_RAM_BANK0 + 2:
226 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
227 return;
228 case GB_REGION_WORKING_RAM_BANK1:
229 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
230 return;
231 default:
232 if (address < GB_BASE_OAM) {
233 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
234 } else if (address < GB_BASE_UNUSABLE) {
235 if (gb->video.mode < 2) {
236 gb->video.oam.raw[address & 0xFF] = value;
237 }
238 } else if (address < GB_BASE_IO) {
239 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
240 } else if (address < GB_BASE_HRAM) {
241 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
242 } else if (address < GB_BASE_IE) {
243 memory->hram[address & GB_SIZE_HRAM] = value;
244 } else {
245 GBIOWrite(gb, REG_IE, value);
246 }
247 }
248}
249uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
250 struct GB* gb = (struct GB*) cpu->master;
251 struct GBMemory* memory = &gb->memory;
252 switch (address >> 12) {
253 case GB_REGION_CART_BANK0:
254 case GB_REGION_CART_BANK0 + 1:
255 case GB_REGION_CART_BANK0 + 2:
256 case GB_REGION_CART_BANK0 + 3:
257 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
258 case GB_REGION_CART_BANK1:
259 case GB_REGION_CART_BANK1 + 1:
260 case GB_REGION_CART_BANK1 + 2:
261 case GB_REGION_CART_BANK1 + 3:
262 if (segment < 0) {
263 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
264 } else {
265 if ((size_t) segment * GB_SIZE_CART_BANK0 > memory->romSize) {
266 return 0xFF;
267 }
268 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
269 }
270 case GB_REGION_VRAM:
271 case GB_REGION_VRAM + 1:
272 if (segment < 0) {
273 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
274 } else {
275 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
276 }
277 case GB_REGION_EXTERNAL_RAM:
278 case GB_REGION_EXTERNAL_RAM + 1:
279 if (memory->rtcAccess) {
280 return memory->rtcRegs[memory->activeRtcReg];
281 } else if (memory->sramAccess) {
282 if (segment < 0) {
283 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
284 } else {
285 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
286 }
287 } else if (memory->mbcType == GB_MBC7) {
288 return GBMBC7Read(memory, address);
289 } else if (memory->mbcType == GB_HuC3) {
290 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
291 }
292 return 0xFF;
293 case GB_REGION_WORKING_RAM_BANK0:
294 case GB_REGION_WORKING_RAM_BANK0 + 2:
295 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
296 case GB_REGION_WORKING_RAM_BANK1:
297 if (segment < 0) {
298 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
299 } else {
300 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
301 }
302 default:
303 if (address < GB_BASE_OAM) {
304 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
305 }
306 if (address < GB_BASE_UNUSABLE) {
307 if (gb->video.mode < 2) {
308 return gb->video.oam.raw[address & 0xFF];
309 }
310 return 0xFF;
311 }
312 if (address < GB_BASE_IO) {
313 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
314 return 0xFF;
315 }
316 if (address < GB_BASE_HRAM) {
317 return GBIORead(gb, address & (GB_SIZE_IO - 1));
318 }
319 if (address < GB_BASE_IE) {
320 return memory->hram[address & GB_SIZE_HRAM];
321 }
322 return GBIORead(gb, REG_IE);
323 }
324}
325
326int32_t GBMemoryProcessEvents(struct GB* gb, int32_t cycles) {
327 int nextEvent = INT_MAX;
328 if (gb->memory.dmaRemaining) {
329 gb->memory.dmaNext -= cycles;
330 if (gb->memory.dmaNext <= 0) {
331 _GBMemoryDMAService(gb);
332 }
333 nextEvent = gb->memory.dmaNext;
334 }
335 if (gb->memory.hdmaRemaining) {
336 gb->memory.hdmaNext -= cycles;
337 if (gb->memory.hdmaNext <= 0) {
338 _GBMemoryHDMAService(gb);
339 }
340 if (gb->memory.hdmaNext < nextEvent) {
341 nextEvent = gb->memory.hdmaNext;
342 }
343 }
344 return nextEvent;
345}
346
347void GBMemoryDMA(struct GB* gb, uint16_t base) {
348 if (base > 0xF100) {
349 return;
350 }
351 gb->cpu->memory.store8 = GBDMAStore8;
352 gb->cpu->memory.load8 = GBDMALoad8;
353 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
354 gb->memory.dmaNext = gb->cpu->cycles + 8;
355 if (gb->memory.dmaNext < gb->cpu->nextEvent) {
356 gb->cpu->nextEvent = gb->memory.dmaNext;
357 }
358 gb->memory.dmaSource = base;
359 gb->memory.dmaDest = 0;
360 gb->memory.dmaRemaining = 0xA0;
361}
362
363void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
364 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
365 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
366 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
367 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
368 gb->memory.hdmaSource &= 0xFFF0;
369 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
370 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
371 return;
372 }
373 gb->memory.hdmaDest &= 0x1FF0;
374 gb->memory.hdmaDest |= 0x8000;
375 bool wasHdma = gb->memory.isHdma;
376 gb->memory.isHdma = value & 0x80;
377 if (!wasHdma && !gb->memory.isHdma) {
378 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
379 gb->memory.hdmaNext = gb->cpu->cycles;
380 gb->cpu->nextEvent = gb->cpu->cycles;
381 }
382}
383
384void _GBMemoryDMAService(struct GB* gb) {
385 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
386 // TODO: Can DMA write OAM during modes 2-3?
387 gb->video.oam.raw[gb->memory.dmaDest] = b;
388 ++gb->memory.dmaSource;
389 ++gb->memory.dmaDest;
390 --gb->memory.dmaRemaining;
391 if (gb->memory.dmaRemaining) {
392 gb->memory.dmaNext += 4;
393 } else {
394 gb->memory.dmaNext = INT_MAX;
395 gb->cpu->memory.store8 = GBStore8;
396 gb->cpu->memory.load8 = GBLoad8;
397 }
398}
399
400void _GBMemoryHDMAService(struct GB* gb) {
401 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
402 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
403 ++gb->memory.hdmaSource;
404 ++gb->memory.hdmaDest;
405 --gb->memory.hdmaRemaining;
406 gb->cpu->cycles += 2;
407 if (gb->memory.hdmaRemaining) {
408 gb->memory.hdmaNext += 2;
409 } else {
410 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
411 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
412 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
413 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
414 if (gb->memory.isHdma) {
415 --gb->memory.io[REG_HDMA5];
416 if (gb->memory.io[REG_HDMA5] == 0xFF) {
417 gb->memory.isHdma = false;
418 }
419 } else {
420 gb->memory.io[REG_HDMA5] |= 0x80;
421 }
422 }
423}
424
425struct OAMBlock {
426 uint16_t low;
427 uint16_t high;
428};
429
430static const struct OAMBlock _oamBlockDMG[] = {
431 { 0xA000, 0xFE00 },
432 { 0xA000, 0xFE00 },
433 { 0xA000, 0xFE00 },
434 { 0xA000, 0xFE00 },
435 { 0x8000, 0xA000 },
436 { 0xA000, 0xFE00 },
437 { 0xA000, 0xFE00 },
438 { 0xA000, 0xFE00 },
439};
440
441static const struct OAMBlock _oamBlockCGB[] = {
442 { 0xA000, 0xC000 },
443 { 0xA000, 0xC000 },
444 { 0xA000, 0xC000 },
445 { 0xA000, 0xC000 },
446 { 0x8000, 0xA000 },
447 { 0xA000, 0xC000 },
448 { 0xC000, 0xFE00 },
449 { 0xA000, 0xC000 },
450};
451
452uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
453 struct GB* gb = (struct GB*) cpu->master;
454 struct GBMemory* memory = &gb->memory;
455 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
456 block = &block[memory->dmaSource >> 13];
457 if (address >= block->low && address < block->high) {
458 return 0xFF;
459 }
460 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
461 return 0xFF;
462 }
463 return GBLoad8(cpu, address);
464}
465
466void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
467 struct GB* gb = (struct GB*) cpu->master;
468 struct GBMemory* memory = &gb->memory;
469 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
470 block = &block[memory->dmaSource >> 13];
471 if (address >= block->low && address < block->high) {
472 return;
473 }
474 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
475 return;
476 }
477 GBStore8(cpu, address, value);
478}
479
480void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old) {
481 struct GB* gb = (struct GB*) cpu->master;
482 struct GBMemory* memory = &gb->memory;
483 int8_t oldValue = -1;
484
485 switch (address >> 12) {
486 case GB_REGION_CART_BANK0:
487 case GB_REGION_CART_BANK0 + 1:
488 case GB_REGION_CART_BANK0 + 2:
489 case GB_REGION_CART_BANK0 + 3:
490 _pristineCow(gb);
491 oldValue = memory->rom[address & (GB_SIZE_CART_BANK0 - 1)];
492 memory->rom[address & (GB_SIZE_CART_BANK0 - 1)] = value;
493 break;
494 case GB_REGION_CART_BANK1:
495 case GB_REGION_CART_BANK1 + 1:
496 case GB_REGION_CART_BANK1 + 2:
497 case GB_REGION_CART_BANK1 + 3:
498 _pristineCow(gb);
499 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
500 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
501 break;
502 case GB_REGION_VRAM:
503 case GB_REGION_VRAM + 1:
504 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
505 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
506 break;
507 case GB_REGION_EXTERNAL_RAM:
508 case GB_REGION_EXTERNAL_RAM + 1:
509 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
510 return;
511 case GB_REGION_WORKING_RAM_BANK0:
512 case GB_REGION_WORKING_RAM_BANK0 + 2:
513 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
514 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
515 break;
516 case GB_REGION_WORKING_RAM_BANK1:
517 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
518 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
519 break;
520 default:
521 if (address < GB_BASE_OAM) {
522 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
523 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
524 } else if (address < GB_BASE_UNUSABLE) {
525 oldValue = gb->video.oam.raw[address & 0xFF];
526 gb->video.oam.raw[address & 0xFF] = value;
527 } else if (address < GB_BASE_HRAM) {
528 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
529 return;
530 } else if (address < GB_BASE_IE) {
531 oldValue = memory->hram[address & GB_SIZE_HRAM];
532 memory->hram[address & GB_SIZE_HRAM] = value;
533 } else {
534 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
535 return;
536 }
537 }
538 if (old) {
539 *old = oldValue;
540 }
541}
542
543void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
544 const struct GBMemory* memory = &gb->memory;
545 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
546 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
547 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
548 state->memory.wramCurrentBank = memory->wramCurrentBank;
549 state->memory.sramCurrentBank = memory->sramCurrentBank;
550
551 STORE_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
552 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
553 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
554
555 STORE_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
556 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
557 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
558
559 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
560 state->memory.dmaRemaining = memory->dmaRemaining;
561 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
562
563 GBSerializedMemoryFlags flags = 0;
564 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
565 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
566 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
567 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
568 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
569 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
570 STORE_16LE(flags, 0, &state->memory.flags);
571}
572
573void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
574 struct GBMemory* memory = &gb->memory;
575 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
576 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
577 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
578 memory->wramCurrentBank = state->memory.wramCurrentBank;
579 memory->sramCurrentBank = state->memory.sramCurrentBank;
580
581 GBMBCSwitchBank(memory, memory->currentBank);
582 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
583 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
584
585 LOAD_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
586 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
587 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
588
589 LOAD_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
590 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
591 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
592
593 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
594 memory->dmaRemaining = state->memory.dmaRemaining;
595 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
596
597 GBSerializedMemoryFlags flags;
598 LOAD_16LE(flags, 0, &state->memory.flags);
599 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
600 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
601 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
602 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
603 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
604 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
605}
606
607void _pristineCow(struct GB* gb) {
608 if (gb->memory.rom != gb->pristineRom) {
609 return;
610 }
611 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
612 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
613 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
614 GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
615}