src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/mbc.h"
12#include "gb/serialize.h"
13
14#include "util/memory.h"
15
16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
17
18static void _pristineCow(struct GB* gba);
19
20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
21 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
22 cpu->memory.setActiveRegion(cpu, address);
23 return cpu->memory.cpuLoad8(cpu, address);
24 }
25 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
26}
27
28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
29 struct GB* gb = (struct GB*) cpu->master;
30 struct GBMemory* memory = &gb->memory;
31 switch (address >> 12) {
32 case GB_REGION_CART_BANK0:
33 case GB_REGION_CART_BANK0 + 1:
34 case GB_REGION_CART_BANK0 + 2:
35 case GB_REGION_CART_BANK0 + 3:
36 cpu->memory.cpuLoad8 = GBFastLoad8;
37 cpu->memory.activeRegion = memory->romBase;
38 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
39 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
40 break;
41 case GB_REGION_CART_BANK1:
42 case GB_REGION_CART_BANK1 + 1:
43 case GB_REGION_CART_BANK1 + 2:
44 case GB_REGION_CART_BANK1 + 3:
45 cpu->memory.cpuLoad8 = GBFastLoad8;
46 cpu->memory.activeRegion = memory->romBank;
47 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
48 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
49 break;
50 default:
51 cpu->memory.cpuLoad8 = GBLoad8;
52 break;
53 }
54}
55
56static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
57static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
58
59void GBMemoryInit(struct GB* gb) {
60 struct LR35902Core* cpu = gb->cpu;
61 cpu->memory.cpuLoad8 = GBLoad8;
62 cpu->memory.load8 = GBLoad8;
63 cpu->memory.store8 = GBStore8;
64 cpu->memory.setActiveRegion = GBSetActiveRegion;
65
66 gb->memory.wram = 0;
67 gb->memory.wramBank = 0;
68 gb->memory.rom = 0;
69 gb->memory.romBank = 0;
70 gb->memory.romSize = 0;
71 gb->memory.sram = 0;
72 gb->memory.mbcType = GB_MBC_AUTODETECT;
73 gb->memory.mbc = 0;
74
75 gb->memory.rtc = NULL;
76
77 GBIOInit(gb);
78}
79
80void GBMemoryDeinit(struct GB* gb) {
81 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
82 if (gb->memory.rom) {
83 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
84 }
85}
86
87void GBMemoryReset(struct GB* gb) {
88 if (gb->memory.wram) {
89 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
90 }
91 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
92 if (gb->model >= GB_MODEL_CGB) {
93 uint32_t* base = (uint32_t*) gb->memory.wram;
94 size_t i;
95 uint32_t pattern = 0;
96 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
97 if ((i & 0x1FF) == 0) {
98 pattern = ~pattern;
99 }
100 base[i + 0] = pattern;
101 base[i + 1] = pattern;
102 base[i + 2] = ~pattern;
103 base[i + 3] = ~pattern;
104 }
105 }
106 GBMemorySwitchWramBank(&gb->memory, 1);
107 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
108 gb->memory.currentBank = 1;
109 gb->memory.sramCurrentBank = 0;
110
111 gb->memory.ime = false;
112 gb->memory.ie = 0;
113
114 gb->memory.dmaRemaining = 0;
115 gb->memory.dmaSource = 0;
116 gb->memory.dmaDest = 0;
117 gb->memory.hdmaRemaining = 0;
118 gb->memory.hdmaSource = 0;
119 gb->memory.hdmaDest = 0;
120 gb->memory.isHdma = false;
121
122
123 gb->memory.dmaEvent.context = gb;
124 gb->memory.dmaEvent.name = "GB DMA";
125 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
126 gb->memory.hdmaEvent.context = gb;
127 gb->memory.hdmaEvent.name = "GB HDMA";
128 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
129
130 gb->memory.sramAccess = false;
131 gb->memory.rtcAccess = false;
132 gb->memory.activeRtcReg = 0;
133 gb->memory.rtcLatched = false;
134 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
135
136 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
137 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
138
139 GBMBCInit(gb);
140 gb->memory.sramBank = gb->memory.sram;
141
142 if (!gb->memory.wram) {
143 GBMemoryDeinit(gb);
144 }
145}
146
147void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
148 bank &= 7;
149 if (!bank) {
150 bank = 1;
151 }
152 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
153 memory->wramCurrentBank = bank;
154}
155
156uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
157 struct GB* gb = (struct GB*) cpu->master;
158 struct GBMemory* memory = &gb->memory;
159 switch (address >> 12) {
160 case GB_REGION_CART_BANK0:
161 case GB_REGION_CART_BANK0 + 1:
162 case GB_REGION_CART_BANK0 + 2:
163 case GB_REGION_CART_BANK0 + 3:
164 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
165 case GB_REGION_CART_BANK1:
166 case GB_REGION_CART_BANK1 + 1:
167 case GB_REGION_CART_BANK1 + 2:
168 case GB_REGION_CART_BANK1 + 3:
169 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
170 case GB_REGION_VRAM:
171 case GB_REGION_VRAM + 1:
172 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
173 case GB_REGION_EXTERNAL_RAM:
174 case GB_REGION_EXTERNAL_RAM + 1:
175 if (memory->rtcAccess) {
176 return memory->rtcRegs[memory->activeRtcReg];
177 } else if (memory->sramAccess) {
178 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
179 } else if (memory->mbcType == GB_MBC7) {
180 return GBMBC7Read(memory, address);
181 } else if (memory->mbcType == GB_HuC3) {
182 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
183 }
184 return 0xFF;
185 case GB_REGION_WORKING_RAM_BANK0:
186 case GB_REGION_WORKING_RAM_BANK0 + 2:
187 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
188 case GB_REGION_WORKING_RAM_BANK1:
189 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
190 default:
191 if (address < GB_BASE_OAM) {
192 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
193 }
194 if (address < GB_BASE_UNUSABLE) {
195 if (gb->video.mode < 2) {
196 return gb->video.oam.raw[address & 0xFF];
197 }
198 return 0xFF;
199 }
200 if (address < GB_BASE_IO) {
201 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
202 return 0xFF;
203 }
204 if (address < GB_BASE_HRAM) {
205 return GBIORead(gb, address & (GB_SIZE_IO - 1));
206 }
207 if (address < GB_BASE_IE) {
208 return memory->hram[address & GB_SIZE_HRAM];
209 }
210 return GBIORead(gb, REG_IE);
211 }
212}
213
214void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
215 struct GB* gb = (struct GB*) cpu->master;
216 struct GBMemory* memory = &gb->memory;
217 switch (address >> 12) {
218 case GB_REGION_CART_BANK0:
219 case GB_REGION_CART_BANK0 + 1:
220 case GB_REGION_CART_BANK0 + 2:
221 case GB_REGION_CART_BANK0 + 3:
222 case GB_REGION_CART_BANK1:
223 case GB_REGION_CART_BANK1 + 1:
224 case GB_REGION_CART_BANK1 + 2:
225 case GB_REGION_CART_BANK1 + 3:
226 memory->mbc(gb, address, value);
227 cpu->memory.setActiveRegion(cpu, cpu->pc);
228 return;
229 case GB_REGION_VRAM:
230 case GB_REGION_VRAM + 1:
231 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
232 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
233 return;
234 case GB_REGION_EXTERNAL_RAM:
235 case GB_REGION_EXTERNAL_RAM + 1:
236 if (memory->rtcAccess) {
237 memory->rtcRegs[memory->activeRtcReg] = value;
238 } else if (memory->sramAccess) {
239 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
240 } else if (memory->mbcType == GB_MBC7) {
241 GBMBC7Write(memory, address, value);
242 }
243 gb->sramDirty |= GB_SRAM_DIRT_NEW;
244 return;
245 case GB_REGION_WORKING_RAM_BANK0:
246 case GB_REGION_WORKING_RAM_BANK0 + 2:
247 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
248 return;
249 case GB_REGION_WORKING_RAM_BANK1:
250 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
251 return;
252 default:
253 if (address < GB_BASE_OAM) {
254 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
255 } else if (address < GB_BASE_UNUSABLE) {
256 if (gb->video.mode < 2) {
257 gb->video.oam.raw[address & 0xFF] = value;
258 }
259 } else if (address < GB_BASE_IO) {
260 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
261 } else if (address < GB_BASE_HRAM) {
262 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
263 } else if (address < GB_BASE_IE) {
264 memory->hram[address & GB_SIZE_HRAM] = value;
265 } else {
266 GBIOWrite(gb, REG_IE, value);
267 }
268 }
269}
270uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
271 struct GB* gb = (struct GB*) cpu->master;
272 struct GBMemory* memory = &gb->memory;
273 switch (address >> 12) {
274 case GB_REGION_CART_BANK0:
275 case GB_REGION_CART_BANK0 + 1:
276 case GB_REGION_CART_BANK0 + 2:
277 case GB_REGION_CART_BANK0 + 3:
278 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
279 case GB_REGION_CART_BANK1:
280 case GB_REGION_CART_BANK1 + 1:
281 case GB_REGION_CART_BANK1 + 2:
282 case GB_REGION_CART_BANK1 + 3:
283 if (segment < 0) {
284 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
285 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
286 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
287 } else {
288 return 0xFF;
289 }
290 case GB_REGION_VRAM:
291 case GB_REGION_VRAM + 1:
292 if (segment < 0) {
293 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
294 } else if (segment < 2) {
295 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
296 } else {
297 return 0xFF;
298 }
299 case GB_REGION_EXTERNAL_RAM:
300 case GB_REGION_EXTERNAL_RAM + 1:
301 if (memory->rtcAccess) {
302 return memory->rtcRegs[memory->activeRtcReg];
303 } else if (memory->sramAccess) {
304 if (segment < 0) {
305 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
306 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
307 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
308 } else {
309 return 0xFF;
310 }
311 } else if (memory->mbcType == GB_MBC7) {
312 return GBMBC7Read(memory, address);
313 } else if (memory->mbcType == GB_HuC3) {
314 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
315 }
316 return 0xFF;
317 case GB_REGION_WORKING_RAM_BANK0:
318 case GB_REGION_WORKING_RAM_BANK0 + 2:
319 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
320 case GB_REGION_WORKING_RAM_BANK1:
321 if (segment < 0) {
322 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
323 } else if (segment < 8) {
324 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
325 } else {
326 return 0xFF;
327 }
328 default:
329 if (address < GB_BASE_OAM) {
330 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
331 }
332 if (address < GB_BASE_UNUSABLE) {
333 if (gb->video.mode < 2) {
334 return gb->video.oam.raw[address & 0xFF];
335 }
336 return 0xFF;
337 }
338 if (address < GB_BASE_IO) {
339 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
340 return 0xFF;
341 }
342 if (address < GB_BASE_HRAM) {
343 return GBIORead(gb, address & (GB_SIZE_IO - 1));
344 }
345 if (address < GB_BASE_IE) {
346 return memory->hram[address & GB_SIZE_HRAM];
347 }
348 return GBIORead(gb, REG_IE);
349 }
350}
351
352void GBMemoryDMA(struct GB* gb, uint16_t base) {
353 if (base > 0xF100) {
354 return;
355 }
356 gb->cpu->memory.store8 = GBDMAStore8;
357 gb->cpu->memory.load8 = GBDMALoad8;
358 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
359 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
360 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
361 gb->cpu->nextEvent = gb->cpu->cycles + 8;
362 }
363 gb->memory.dmaSource = base;
364 gb->memory.dmaDest = 0;
365 gb->memory.dmaRemaining = 0xA0;
366}
367
368void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
369 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
370 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
371 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
372 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
373 gb->memory.hdmaSource &= 0xFFF0;
374 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
375 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
376 return;
377 }
378 gb->memory.hdmaDest &= 0x1FF0;
379 gb->memory.hdmaDest |= 0x8000;
380 bool wasHdma = gb->memory.isHdma;
381 gb->memory.isHdma = value & 0x80;
382 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
383 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
384 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
385 gb->cpu->nextEvent = gb->cpu->cycles;
386 }
387}
388
389void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
390 struct GB* gb = context;
391 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
392 // TODO: Can DMA write OAM during modes 2-3?
393 gb->video.oam.raw[gb->memory.dmaDest] = b;
394 ++gb->memory.dmaSource;
395 ++gb->memory.dmaDest;
396 --gb->memory.dmaRemaining;
397 if (gb->memory.dmaRemaining) {
398 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
399 } else {
400 gb->cpu->memory.store8 = GBStore8;
401 gb->cpu->memory.load8 = GBLoad8;
402 }
403}
404
405void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
406 struct GB* gb = context;
407 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
408 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
409 ++gb->memory.hdmaSource;
410 ++gb->memory.hdmaDest;
411 --gb->memory.hdmaRemaining;
412 gb->cpu->cycles += 2;
413 if (gb->memory.hdmaRemaining) {
414 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
415 } else {
416 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
417 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
418 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
419 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
420 if (gb->memory.isHdma) {
421 --gb->memory.io[REG_HDMA5];
422 if (gb->memory.io[REG_HDMA5] == 0xFF) {
423 gb->memory.isHdma = false;
424 }
425 } else {
426 gb->memory.io[REG_HDMA5] = 0xFF;
427 }
428 }
429}
430
431struct OAMBlock {
432 uint16_t low;
433 uint16_t high;
434};
435
436static const struct OAMBlock _oamBlockDMG[] = {
437 { 0xA000, 0xFE00 },
438 { 0xA000, 0xFE00 },
439 { 0xA000, 0xFE00 },
440 { 0xA000, 0xFE00 },
441 { 0x8000, 0xA000 },
442 { 0xA000, 0xFE00 },
443 { 0xA000, 0xFE00 },
444 { 0xA000, 0xFE00 },
445};
446
447static const struct OAMBlock _oamBlockCGB[] = {
448 { 0xA000, 0xC000 },
449 { 0xA000, 0xC000 },
450 { 0xA000, 0xC000 },
451 { 0xA000, 0xC000 },
452 { 0x8000, 0xA000 },
453 { 0xA000, 0xC000 },
454 { 0xC000, 0xFE00 },
455 { 0xA000, 0xC000 },
456};
457
458uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
459 struct GB* gb = (struct GB*) cpu->master;
460 struct GBMemory* memory = &gb->memory;
461 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
462 block = &block[memory->dmaSource >> 13];
463 if (address >= block->low && address < block->high) {
464 return 0xFF;
465 }
466 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
467 return 0xFF;
468 }
469 return GBLoad8(cpu, address);
470}
471
472void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
473 struct GB* gb = (struct GB*) cpu->master;
474 struct GBMemory* memory = &gb->memory;
475 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
476 block = &block[memory->dmaSource >> 13];
477 if (address >= block->low && address < block->high) {
478 return;
479 }
480 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
481 return;
482 }
483 GBStore8(cpu, address, value);
484}
485
486void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
487 struct GB* gb = (struct GB*) cpu->master;
488 struct GBMemory* memory = &gb->memory;
489 int8_t oldValue = -1;
490
491 switch (address >> 12) {
492 case GB_REGION_CART_BANK0:
493 case GB_REGION_CART_BANK0 + 1:
494 case GB_REGION_CART_BANK0 + 2:
495 case GB_REGION_CART_BANK0 + 3:
496 _pristineCow(gb);
497 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
498 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
499 break;
500 case GB_REGION_CART_BANK1:
501 case GB_REGION_CART_BANK1 + 1:
502 case GB_REGION_CART_BANK1 + 2:
503 case GB_REGION_CART_BANK1 + 3:
504 _pristineCow(gb);
505 if (segment < 0) {
506 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
507 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
508 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
509 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
510 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
511 } else {
512 return;
513 }
514 break;
515 case GB_REGION_VRAM:
516 case GB_REGION_VRAM + 1:
517 if (segment < 0) {
518 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
519 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
520 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
521 } else if (segment < 2) {
522 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
523 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
524 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
525 } else {
526 return;
527 }
528 break;
529 case GB_REGION_EXTERNAL_RAM:
530 case GB_REGION_EXTERNAL_RAM + 1:
531 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
532 return;
533 case GB_REGION_WORKING_RAM_BANK0:
534 case GB_REGION_WORKING_RAM_BANK0 + 2:
535 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
536 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
537 break;
538 case GB_REGION_WORKING_RAM_BANK1:
539 if (segment < 0) {
540 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
541 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
542 } else if (segment < 8) {
543 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
544 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
545 } else {
546 return;
547 }
548 break;
549 default:
550 if (address < GB_BASE_OAM) {
551 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
552 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
553 } else if (address < GB_BASE_UNUSABLE) {
554 oldValue = gb->video.oam.raw[address & 0xFF];
555 gb->video.oam.raw[address & 0xFF] = value;
556 } else if (address < GB_BASE_HRAM) {
557 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
558 return;
559 } else if (address < GB_BASE_IE) {
560 oldValue = memory->hram[address & GB_SIZE_HRAM];
561 memory->hram[address & GB_SIZE_HRAM] = value;
562 } else {
563 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
564 return;
565 }
566 }
567 if (old) {
568 *old = oldValue;
569 }
570}
571
572void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
573 const struct GBMemory* memory = &gb->memory;
574 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
575 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
576 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
577 state->memory.wramCurrentBank = memory->wramCurrentBank;
578 state->memory.sramCurrentBank = memory->sramCurrentBank;
579
580 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
581 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
582
583 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
584 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
585
586 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
587 state->memory.dmaRemaining = memory->dmaRemaining;
588 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
589
590 GBSerializedMemoryFlags flags = 0;
591 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
592 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
593 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
594 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
595 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
596 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
597 STORE_16LE(flags, 0, &state->memory.flags);
598}
599
600void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
601 struct GBMemory* memory = &gb->memory;
602 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
603 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
604 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
605 memory->wramCurrentBank = state->memory.wramCurrentBank;
606 memory->sramCurrentBank = state->memory.sramCurrentBank;
607
608 GBMBCSwitchBank(memory, memory->currentBank);
609 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
610 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
611
612 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
613 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
614
615 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
616 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
617
618 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
619 memory->dmaRemaining = state->memory.dmaRemaining;
620 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
621
622 GBSerializedMemoryFlags flags;
623 LOAD_16LE(flags, 0, &state->memory.flags);
624 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
625 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
626 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
627 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
628 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
629 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
630}
631
632void _pristineCow(struct GB* gb) {
633 if (gb->memory.rom != gb->pristineRom) {
634 return;
635 }
636 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
637 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
638 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
639 if (gb->pristineRom == gb->memory.romBase) {
640 gb->memory.romBase = gb->memory.rom;
641 }
642 GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
643}