src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/mbc.h"
12#include "gb/serialize.h"
13
14#include "util/memory.h"
15
16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
17
18static void _pristineCow(struct GB* gba);
19
20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
21 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
22 cpu->memory.setActiveRegion(cpu, address);
23 return cpu->memory.cpuLoad8(cpu, address);
24 }
25 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
26}
27
28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
29 struct GB* gb = (struct GB*) cpu->master;
30 struct GBMemory* memory = &gb->memory;
31 switch (address >> 12) {
32 case GB_REGION_CART_BANK0:
33 case GB_REGION_CART_BANK0 + 1:
34 case GB_REGION_CART_BANK0 + 2:
35 case GB_REGION_CART_BANK0 + 3:
36 cpu->memory.cpuLoad8 = GBFastLoad8;
37 cpu->memory.activeRegion = memory->romBase;
38 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
39 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
40 break;
41 case GB_REGION_CART_BANK1:
42 case GB_REGION_CART_BANK1 + 1:
43 case GB_REGION_CART_BANK1 + 2:
44 case GB_REGION_CART_BANK1 + 3:
45 cpu->memory.cpuLoad8 = GBFastLoad8;
46 cpu->memory.activeRegion = memory->romBank;
47 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
48 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
49 break;
50 default:
51 cpu->memory.cpuLoad8 = GBLoad8;
52 break;
53 }
54}
55
56static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
57static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
58
59void GBMemoryInit(struct GB* gb) {
60 struct LR35902Core* cpu = gb->cpu;
61 cpu->memory.cpuLoad8 = GBLoad8;
62 cpu->memory.load8 = GBLoad8;
63 cpu->memory.store8 = GBStore8;
64 cpu->memory.setActiveRegion = GBSetActiveRegion;
65
66 gb->memory.wram = 0;
67 gb->memory.wramBank = 0;
68 gb->memory.rom = 0;
69 gb->memory.romBank = 0;
70 gb->memory.romSize = 0;
71 gb->memory.sram = 0;
72 gb->memory.mbcType = GB_MBC_AUTODETECT;
73 gb->memory.mbc = 0;
74
75 gb->memory.rtc = NULL;
76
77 GBIOInit(gb);
78}
79
80void GBMemoryDeinit(struct GB* gb) {
81 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
82 if (gb->memory.rom) {
83 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
84 }
85}
86
87void GBMemoryReset(struct GB* gb) {
88 if (gb->memory.wram) {
89 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
90 }
91 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
92 if (gb->model >= GB_MODEL_CGB) {
93 uint32_t* base = (uint32_t*) gb->memory.wram;
94 size_t i;
95 uint32_t pattern = 0;
96 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
97 if ((i & 0x1FF) == 0) {
98 pattern = ~pattern;
99 }
100 base[i + 0] = pattern;
101 base[i + 1] = pattern;
102 base[i + 2] = ~pattern;
103 base[i + 3] = ~pattern;
104 }
105 }
106 GBMemorySwitchWramBank(&gb->memory, 1);
107 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
108 gb->memory.currentBank = 1;
109 gb->memory.sramCurrentBank = 0;
110
111 gb->memory.ime = false;
112 gb->memory.ie = 0;
113
114 gb->memory.dmaRemaining = 0;
115 gb->memory.dmaSource = 0;
116 gb->memory.dmaDest = 0;
117 gb->memory.hdmaRemaining = 0;
118 gb->memory.hdmaSource = 0;
119 gb->memory.hdmaDest = 0;
120 gb->memory.isHdma = false;
121
122
123 gb->memory.dmaEvent.context = gb;
124 gb->memory.dmaEvent.name = "GB DMA";
125 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
126 gb->memory.hdmaEvent.context = gb;
127 gb->memory.hdmaEvent.name = "GB HDMA";
128 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
129
130 gb->memory.sramAccess = false;
131 gb->memory.rtcAccess = false;
132 gb->memory.activeRtcReg = 0;
133 gb->memory.rtcLatched = false;
134 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
135
136 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
137 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
138
139 GBMBCInit(gb);
140 gb->memory.sramBank = gb->memory.sram;
141
142 if (!gb->memory.wram) {
143 GBMemoryDeinit(gb);
144 }
145}
146
147void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
148 bank &= 7;
149 if (!bank) {
150 bank = 1;
151 }
152 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
153 memory->wramCurrentBank = bank;
154}
155
156uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
157 struct GB* gb = (struct GB*) cpu->master;
158 struct GBMemory* memory = &gb->memory;
159 switch (address >> 12) {
160 case GB_REGION_CART_BANK0:
161 case GB_REGION_CART_BANK0 + 1:
162 case GB_REGION_CART_BANK0 + 2:
163 case GB_REGION_CART_BANK0 + 3:
164 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
165 case GB_REGION_CART_BANK1:
166 case GB_REGION_CART_BANK1 + 1:
167 case GB_REGION_CART_BANK1 + 2:
168 case GB_REGION_CART_BANK1 + 3:
169 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
170 case GB_REGION_VRAM:
171 case GB_REGION_VRAM + 1:
172 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
173 case GB_REGION_EXTERNAL_RAM:
174 case GB_REGION_EXTERNAL_RAM + 1:
175 if (memory->rtcAccess) {
176 return memory->rtcRegs[memory->activeRtcReg];
177 } else if (memory->sramAccess) {
178 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
179 } else if (memory->mbcType == GB_MBC7) {
180 return GBMBC7Read(memory, address);
181 } else if (memory->mbcType == GB_HuC3) {
182 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
183 }
184 return 0xFF;
185 case GB_REGION_WORKING_RAM_BANK0:
186 case GB_REGION_WORKING_RAM_BANK0 + 2:
187 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
188 case GB_REGION_WORKING_RAM_BANK1:
189 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
190 default:
191 if (address < GB_BASE_OAM) {
192 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
193 }
194 if (address < GB_BASE_UNUSABLE) {
195 if (gb->video.mode < 2) {
196 return gb->video.oam.raw[address & 0xFF];
197 }
198 return 0xFF;
199 }
200 if (address < GB_BASE_IO) {
201 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
202 return 0xFF;
203 }
204 if (address < GB_BASE_HRAM) {
205 return GBIORead(gb, address & (GB_SIZE_IO - 1));
206 }
207 if (address < GB_BASE_IE) {
208 return memory->hram[address & GB_SIZE_HRAM];
209 }
210 return GBIORead(gb, REG_IE);
211 }
212}
213
214void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
215 struct GB* gb = (struct GB*) cpu->master;
216 struct GBMemory* memory = &gb->memory;
217 switch (address >> 12) {
218 case GB_REGION_CART_BANK0:
219 case GB_REGION_CART_BANK0 + 1:
220 case GB_REGION_CART_BANK0 + 2:
221 case GB_REGION_CART_BANK0 + 3:
222 case GB_REGION_CART_BANK1:
223 case GB_REGION_CART_BANK1 + 1:
224 case GB_REGION_CART_BANK1 + 2:
225 case GB_REGION_CART_BANK1 + 3:
226 memory->mbc(gb, address, value);
227 cpu->memory.setActiveRegion(cpu, cpu->pc);
228 return;
229 case GB_REGION_VRAM:
230 case GB_REGION_VRAM + 1:
231 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
232 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
233 return;
234 case GB_REGION_EXTERNAL_RAM:
235 case GB_REGION_EXTERNAL_RAM + 1:
236 if (memory->rtcAccess) {
237 memory->rtcRegs[memory->activeRtcReg] = value;
238 } else if (memory->sramAccess) {
239 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
240 } else if (memory->mbcType == GB_MBC7) {
241 GBMBC7Write(memory, address, value);
242 }
243 gb->sramDirty |= GB_SRAM_DIRT_NEW;
244 return;
245 case GB_REGION_WORKING_RAM_BANK0:
246 case GB_REGION_WORKING_RAM_BANK0 + 2:
247 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
248 return;
249 case GB_REGION_WORKING_RAM_BANK1:
250 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
251 return;
252 default:
253 if (address < GB_BASE_OAM) {
254 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
255 } else if (address < GB_BASE_UNUSABLE) {
256 if (gb->video.mode < 2) {
257 gb->video.oam.raw[address & 0xFF] = value;
258 }
259 } else if (address < GB_BASE_IO) {
260 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
261 } else if (address < GB_BASE_HRAM) {
262 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
263 } else if (address < GB_BASE_IE) {
264 memory->hram[address & GB_SIZE_HRAM] = value;
265 } else {
266 GBIOWrite(gb, REG_IE, value);
267 }
268 }
269}
270uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
271 struct GB* gb = (struct GB*) cpu->master;
272 struct GBMemory* memory = &gb->memory;
273 switch (address >> 12) {
274 case GB_REGION_CART_BANK0:
275 case GB_REGION_CART_BANK0 + 1:
276 case GB_REGION_CART_BANK0 + 2:
277 case GB_REGION_CART_BANK0 + 3:
278 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
279 case GB_REGION_CART_BANK1:
280 case GB_REGION_CART_BANK1 + 1:
281 case GB_REGION_CART_BANK1 + 2:
282 case GB_REGION_CART_BANK1 + 3:
283 if (segment < 0) {
284 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
285 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
286 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
287 } else {
288 return 0xFF;
289 }
290 case GB_REGION_VRAM:
291 case GB_REGION_VRAM + 1:
292 if (segment < 0) {
293 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
294 } else if (segment < 2) {
295 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
296 } else {
297 return 0xFF;
298 }
299 case GB_REGION_EXTERNAL_RAM:
300 case GB_REGION_EXTERNAL_RAM + 1:
301 if (memory->rtcAccess) {
302 return memory->rtcRegs[memory->activeRtcReg];
303 } else if (memory->sramAccess) {
304 if (segment < 0) {
305 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
306 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
307 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
308 } else {
309 return 0xFF;
310 }
311 } else if (memory->mbcType == GB_MBC7) {
312 return GBMBC7Read(memory, address);
313 } else if (memory->mbcType == GB_HuC3) {
314 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
315 }
316 return 0xFF;
317 case GB_REGION_WORKING_RAM_BANK0:
318 case GB_REGION_WORKING_RAM_BANK0 + 2:
319 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
320 case GB_REGION_WORKING_RAM_BANK1:
321 if (segment < 0) {
322 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
323 } else if (segment < 8) {
324 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
325 } else {
326 return 0xFF;
327 }
328 default:
329 if (address < GB_BASE_OAM) {
330 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
331 }
332 if (address < GB_BASE_UNUSABLE) {
333 if (gb->video.mode < 2) {
334 return gb->video.oam.raw[address & 0xFF];
335 }
336 return 0xFF;
337 }
338 if (address < GB_BASE_IO) {
339 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
340 return 0xFF;
341 }
342 if (address < GB_BASE_HRAM) {
343 return GBIORead(gb, address & (GB_SIZE_IO - 1));
344 }
345 if (address < GB_BASE_IE) {
346 return memory->hram[address & GB_SIZE_HRAM];
347 }
348 return GBIORead(gb, REG_IE);
349 }
350}
351
352void GBMemoryDMA(struct GB* gb, uint16_t base) {
353 if (base > 0xF100) {
354 return;
355 }
356 gb->cpu->memory.store8 = GBDMAStore8;
357 gb->cpu->memory.load8 = GBDMALoad8;
358 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
359 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
360 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
361 gb->cpu->nextEvent = gb->cpu->cycles + 8;
362 }
363 gb->memory.dmaSource = base;
364 gb->memory.dmaDest = 0;
365 gb->memory.dmaRemaining = 0xA0;
366}
367
368void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
369 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
370 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
371 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
372 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
373 gb->memory.hdmaSource &= 0xFFF0;
374 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
375 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
376 return;
377 }
378 gb->memory.hdmaDest &= 0x1FF0;
379 gb->memory.hdmaDest |= 0x8000;
380 bool wasHdma = gb->memory.isHdma;
381 gb->memory.isHdma = value & 0x80;
382 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
383 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
384 gb->cpuBlocked = true;
385 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
386 gb->cpu->nextEvent = gb->cpu->cycles;
387 }
388}
389
390void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
391 struct GB* gb = context;
392 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
393 // TODO: Can DMA write OAM during modes 2-3?
394 gb->video.oam.raw[gb->memory.dmaDest] = b;
395 ++gb->memory.dmaSource;
396 ++gb->memory.dmaDest;
397 --gb->memory.dmaRemaining;
398 if (gb->memory.dmaRemaining) {
399 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
400 } else {
401 gb->cpu->memory.store8 = GBStore8;
402 gb->cpu->memory.load8 = GBLoad8;
403 }
404}
405
406void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
407 struct GB* gb = context;
408 gb->cpuBlocked = true;
409 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
410 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
411 ++gb->memory.hdmaSource;
412 ++gb->memory.hdmaDest;
413 --gb->memory.hdmaRemaining;
414 if (gb->memory.hdmaRemaining) {
415 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
416 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
417 } else {
418 gb->cpuBlocked = false;
419 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
420 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
421 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
422 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
423 if (gb->memory.isHdma) {
424 --gb->memory.io[REG_HDMA5];
425 if (gb->memory.io[REG_HDMA5] == 0xFF) {
426 gb->memory.isHdma = false;
427 }
428 } else {
429 gb->memory.io[REG_HDMA5] = 0xFF;
430 }
431 }
432}
433
434struct OAMBlock {
435 uint16_t low;
436 uint16_t high;
437};
438
439static const struct OAMBlock _oamBlockDMG[] = {
440 { 0xA000, 0xFE00 },
441 { 0xA000, 0xFE00 },
442 { 0xA000, 0xFE00 },
443 { 0xA000, 0xFE00 },
444 { 0x8000, 0xA000 },
445 { 0xA000, 0xFE00 },
446 { 0xA000, 0xFE00 },
447 { 0xA000, 0xFE00 },
448};
449
450static const struct OAMBlock _oamBlockCGB[] = {
451 { 0xA000, 0xC000 },
452 { 0xA000, 0xC000 },
453 { 0xA000, 0xC000 },
454 { 0xA000, 0xC000 },
455 { 0x8000, 0xA000 },
456 { 0xA000, 0xC000 },
457 { 0xC000, 0xFE00 },
458 { 0xA000, 0xC000 },
459};
460
461uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
462 struct GB* gb = (struct GB*) cpu->master;
463 struct GBMemory* memory = &gb->memory;
464 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
465 block = &block[memory->dmaSource >> 13];
466 if (address >= block->low && address < block->high) {
467 return 0xFF;
468 }
469 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
470 return 0xFF;
471 }
472 return GBLoad8(cpu, address);
473}
474
475void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
476 struct GB* gb = (struct GB*) cpu->master;
477 struct GBMemory* memory = &gb->memory;
478 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
479 block = &block[memory->dmaSource >> 13];
480 if (address >= block->low && address < block->high) {
481 return;
482 }
483 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
484 return;
485 }
486 GBStore8(cpu, address, value);
487}
488
489void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
490 struct GB* gb = (struct GB*) cpu->master;
491 struct GBMemory* memory = &gb->memory;
492 int8_t oldValue = -1;
493
494 switch (address >> 12) {
495 case GB_REGION_CART_BANK0:
496 case GB_REGION_CART_BANK0 + 1:
497 case GB_REGION_CART_BANK0 + 2:
498 case GB_REGION_CART_BANK0 + 3:
499 _pristineCow(gb);
500 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
501 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
502 break;
503 case GB_REGION_CART_BANK1:
504 case GB_REGION_CART_BANK1 + 1:
505 case GB_REGION_CART_BANK1 + 2:
506 case GB_REGION_CART_BANK1 + 3:
507 _pristineCow(gb);
508 if (segment < 0) {
509 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
510 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
511 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
512 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
513 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
514 } else {
515 return;
516 }
517 break;
518 case GB_REGION_VRAM:
519 case GB_REGION_VRAM + 1:
520 if (segment < 0) {
521 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
522 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
523 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
524 } else if (segment < 2) {
525 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
526 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
527 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
528 } else {
529 return;
530 }
531 break;
532 case GB_REGION_EXTERNAL_RAM:
533 case GB_REGION_EXTERNAL_RAM + 1:
534 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
535 return;
536 case GB_REGION_WORKING_RAM_BANK0:
537 case GB_REGION_WORKING_RAM_BANK0 + 2:
538 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
539 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
540 break;
541 case GB_REGION_WORKING_RAM_BANK1:
542 if (segment < 0) {
543 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
544 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
545 } else if (segment < 8) {
546 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
547 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
548 } else {
549 return;
550 }
551 break;
552 default:
553 if (address < GB_BASE_OAM) {
554 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
555 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
556 } else if (address < GB_BASE_UNUSABLE) {
557 oldValue = gb->video.oam.raw[address & 0xFF];
558 gb->video.oam.raw[address & 0xFF] = value;
559 } else if (address < GB_BASE_HRAM) {
560 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
561 return;
562 } else if (address < GB_BASE_IE) {
563 oldValue = memory->hram[address & GB_SIZE_HRAM];
564 memory->hram[address & GB_SIZE_HRAM] = value;
565 } else {
566 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
567 return;
568 }
569 }
570 if (old) {
571 *old = oldValue;
572 }
573}
574
575void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
576 const struct GBMemory* memory = &gb->memory;
577 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
578 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
579 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
580 state->memory.wramCurrentBank = memory->wramCurrentBank;
581 state->memory.sramCurrentBank = memory->sramCurrentBank;
582
583 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
584 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
585
586 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
587 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
588
589 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
590 state->memory.dmaRemaining = memory->dmaRemaining;
591 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
592
593 GBSerializedMemoryFlags flags = 0;
594 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
595 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
596 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
597 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
598 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
599 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
600 STORE_16LE(flags, 0, &state->memory.flags);
601}
602
603void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
604 struct GBMemory* memory = &gb->memory;
605 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
606 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
607 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
608 memory->wramCurrentBank = state->memory.wramCurrentBank;
609 memory->sramCurrentBank = state->memory.sramCurrentBank;
610
611 GBMBCSwitchBank(memory, memory->currentBank);
612 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
613 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
614
615 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
616 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
617
618 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
619 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
620
621 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
622 memory->dmaRemaining = state->memory.dmaRemaining;
623 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
624
625 GBSerializedMemoryFlags flags;
626 LOAD_16LE(flags, 0, &state->memory.flags);
627 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
628 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
629 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
630 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
631 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
632 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
633}
634
635void _pristineCow(struct GB* gb) {
636 if (gb->memory.rom != gb->pristineRom) {
637 return;
638 }
639 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
640 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
641 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
642 if (gb->pristineRom == gb->memory.romBase) {
643 gb->memory.romBase = gb->memory.rom;
644 }
645 GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
646}