src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/mbc.h"
12#include "gb/serialize.h"
13
14#include "util/memory.h"
15
16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
17
18static void _pristineCow(struct GB* gba);
19
20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
21 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
22 cpu->memory.setActiveRegion(cpu, address);
23 return cpu->memory.cpuLoad8(cpu, address);
24 }
25 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
26}
27
28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
29 struct GB* gb = (struct GB*) cpu->master;
30 struct GBMemory* memory = &gb->memory;
31 switch (address >> 12) {
32 case GB_REGION_CART_BANK0:
33 case GB_REGION_CART_BANK0 + 1:
34 case GB_REGION_CART_BANK0 + 2:
35 case GB_REGION_CART_BANK0 + 3:
36 cpu->memory.cpuLoad8 = GBFastLoad8;
37 cpu->memory.activeRegion = memory->romBase;
38 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
39 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
40 break;
41 case GB_REGION_CART_BANK1:
42 case GB_REGION_CART_BANK1 + 1:
43 case GB_REGION_CART_BANK1 + 2:
44 case GB_REGION_CART_BANK1 + 3:
45 cpu->memory.cpuLoad8 = GBFastLoad8;
46 cpu->memory.activeRegion = memory->romBank;
47 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
48 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
49 break;
50 default:
51 cpu->memory.cpuLoad8 = GBLoad8;
52 break;
53 }
54}
55
56static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
57static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
58
59void GBMemoryInit(struct GB* gb) {
60 struct LR35902Core* cpu = gb->cpu;
61 cpu->memory.cpuLoad8 = GBLoad8;
62 cpu->memory.load8 = GBLoad8;
63 cpu->memory.store8 = GBStore8;
64 cpu->memory.setActiveRegion = GBSetActiveRegion;
65
66 gb->memory.wram = 0;
67 gb->memory.wramBank = 0;
68 gb->memory.rom = 0;
69 gb->memory.romBank = 0;
70 gb->memory.romSize = 0;
71 gb->memory.sram = 0;
72 gb->memory.mbcType = GB_MBC_AUTODETECT;
73 gb->memory.mbc = 0;
74
75 gb->memory.rtc = NULL;
76
77 GBIOInit(gb);
78}
79
80void GBMemoryDeinit(struct GB* gb) {
81 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
82 if (gb->memory.rom) {
83 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
84 }
85}
86
87void GBMemoryReset(struct GB* gb) {
88 if (gb->memory.wram) {
89 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
90 }
91 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
92 if (gb->model >= GB_MODEL_CGB) {
93 uint32_t* base = (uint32_t*) gb->memory.wram;
94 size_t i;
95 uint32_t pattern = 0;
96 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
97 if ((i & 0x1FF) == 0) {
98 pattern = ~pattern;
99 }
100 base[i + 0] = pattern;
101 base[i + 1] = pattern;
102 base[i + 2] = ~pattern;
103 base[i + 3] = ~pattern;
104 }
105 }
106 GBMemorySwitchWramBank(&gb->memory, 1);
107 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
108 gb->memory.currentBank = 1;
109 gb->memory.sramCurrentBank = 0;
110
111 gb->memory.ime = false;
112 gb->memory.ie = 0;
113
114 gb->memory.dmaRemaining = 0;
115 gb->memory.dmaSource = 0;
116 gb->memory.dmaDest = 0;
117 gb->memory.hdmaRemaining = 0;
118 gb->memory.hdmaSource = 0;
119 gb->memory.hdmaDest = 0;
120 gb->memory.isHdma = false;
121
122
123 gb->memory.dmaEvent.context = gb;
124 gb->memory.dmaEvent.name = "GB DMA";
125 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
126 gb->memory.dmaEvent.priority = 0x40;
127 gb->memory.hdmaEvent.context = gb;
128 gb->memory.hdmaEvent.name = "GB HDMA";
129 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
130 gb->memory.hdmaEvent.priority = 0x41;
131
132 gb->memory.sramAccess = false;
133 gb->memory.rtcAccess = false;
134 gb->memory.activeRtcReg = 0;
135 gb->memory.rtcLatched = false;
136 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
137
138 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
139 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
140
141 GBMBCInit(gb);
142 gb->memory.sramBank = gb->memory.sram;
143
144 if (!gb->memory.wram) {
145 GBMemoryDeinit(gb);
146 }
147}
148
149void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
150 bank &= 7;
151 if (!bank) {
152 bank = 1;
153 }
154 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
155 memory->wramCurrentBank = bank;
156}
157
158uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
159 struct GB* gb = (struct GB*) cpu->master;
160 struct GBMemory* memory = &gb->memory;
161 switch (address >> 12) {
162 case GB_REGION_CART_BANK0:
163 case GB_REGION_CART_BANK0 + 1:
164 case GB_REGION_CART_BANK0 + 2:
165 case GB_REGION_CART_BANK0 + 3:
166 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
167 case GB_REGION_CART_BANK1:
168 case GB_REGION_CART_BANK1 + 1:
169 case GB_REGION_CART_BANK1 + 2:
170 case GB_REGION_CART_BANK1 + 3:
171 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
172 case GB_REGION_VRAM:
173 case GB_REGION_VRAM + 1:
174 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
175 case GB_REGION_EXTERNAL_RAM:
176 case GB_REGION_EXTERNAL_RAM + 1:
177 if (memory->rtcAccess) {
178 return memory->rtcRegs[memory->activeRtcReg];
179 } else if (memory->sramAccess) {
180 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
181 } else if (memory->mbcType == GB_MBC7) {
182 return GBMBC7Read(memory, address);
183 } else if (memory->mbcType == GB_HuC3) {
184 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
185 }
186 return 0xFF;
187 case GB_REGION_WORKING_RAM_BANK0:
188 case GB_REGION_WORKING_RAM_BANK0 + 2:
189 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
190 case GB_REGION_WORKING_RAM_BANK1:
191 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
192 default:
193 if (address < GB_BASE_OAM) {
194 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
195 }
196 if (address < GB_BASE_UNUSABLE) {
197 if (gb->video.mode < 2) {
198 return gb->video.oam.raw[address & 0xFF];
199 }
200 return 0xFF;
201 }
202 if (address < GB_BASE_IO) {
203 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
204 return 0xFF;
205 }
206 if (address < GB_BASE_HRAM) {
207 return GBIORead(gb, address & (GB_SIZE_IO - 1));
208 }
209 if (address < GB_BASE_IE) {
210 return memory->hram[address & GB_SIZE_HRAM];
211 }
212 return GBIORead(gb, REG_IE);
213 }
214}
215
216void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
217 struct GB* gb = (struct GB*) cpu->master;
218 struct GBMemory* memory = &gb->memory;
219 switch (address >> 12) {
220 case GB_REGION_CART_BANK0:
221 case GB_REGION_CART_BANK0 + 1:
222 case GB_REGION_CART_BANK0 + 2:
223 case GB_REGION_CART_BANK0 + 3:
224 case GB_REGION_CART_BANK1:
225 case GB_REGION_CART_BANK1 + 1:
226 case GB_REGION_CART_BANK1 + 2:
227 case GB_REGION_CART_BANK1 + 3:
228 memory->mbc(gb, address, value);
229 cpu->memory.setActiveRegion(cpu, cpu->pc);
230 return;
231 case GB_REGION_VRAM:
232 case GB_REGION_VRAM + 1:
233 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
234 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
235 return;
236 case GB_REGION_EXTERNAL_RAM:
237 case GB_REGION_EXTERNAL_RAM + 1:
238 if (memory->rtcAccess) {
239 memory->rtcRegs[memory->activeRtcReg] = value;
240 } else if (memory->sramAccess) {
241 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
242 } else if (memory->mbcType == GB_MBC7) {
243 GBMBC7Write(memory, address, value);
244 }
245 gb->sramDirty |= GB_SRAM_DIRT_NEW;
246 return;
247 case GB_REGION_WORKING_RAM_BANK0:
248 case GB_REGION_WORKING_RAM_BANK0 + 2:
249 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
250 return;
251 case GB_REGION_WORKING_RAM_BANK1:
252 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
253 return;
254 default:
255 if (address < GB_BASE_OAM) {
256 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
257 } else if (address < GB_BASE_UNUSABLE) {
258 if (gb->video.mode < 2) {
259 gb->video.oam.raw[address & 0xFF] = value;
260 }
261 } else if (address < GB_BASE_IO) {
262 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
263 } else if (address < GB_BASE_HRAM) {
264 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
265 } else if (address < GB_BASE_IE) {
266 memory->hram[address & GB_SIZE_HRAM] = value;
267 } else {
268 GBIOWrite(gb, REG_IE, value);
269 }
270 }
271}
272uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
273 struct GB* gb = (struct GB*) cpu->master;
274 struct GBMemory* memory = &gb->memory;
275 switch (address >> 12) {
276 case GB_REGION_CART_BANK0:
277 case GB_REGION_CART_BANK0 + 1:
278 case GB_REGION_CART_BANK0 + 2:
279 case GB_REGION_CART_BANK0 + 3:
280 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
281 case GB_REGION_CART_BANK1:
282 case GB_REGION_CART_BANK1 + 1:
283 case GB_REGION_CART_BANK1 + 2:
284 case GB_REGION_CART_BANK1 + 3:
285 if (segment < 0) {
286 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
287 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
288 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
289 } else {
290 return 0xFF;
291 }
292 case GB_REGION_VRAM:
293 case GB_REGION_VRAM + 1:
294 if (segment < 0) {
295 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
296 } else if (segment < 2) {
297 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
298 } else {
299 return 0xFF;
300 }
301 case GB_REGION_EXTERNAL_RAM:
302 case GB_REGION_EXTERNAL_RAM + 1:
303 if (memory->rtcAccess) {
304 return memory->rtcRegs[memory->activeRtcReg];
305 } else if (memory->sramAccess) {
306 if (segment < 0) {
307 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
308 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
309 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
310 } else {
311 return 0xFF;
312 }
313 } else if (memory->mbcType == GB_MBC7) {
314 return GBMBC7Read(memory, address);
315 } else if (memory->mbcType == GB_HuC3) {
316 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
317 }
318 return 0xFF;
319 case GB_REGION_WORKING_RAM_BANK0:
320 case GB_REGION_WORKING_RAM_BANK0 + 2:
321 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
322 case GB_REGION_WORKING_RAM_BANK1:
323 if (segment < 0) {
324 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
325 } else if (segment < 8) {
326 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
327 } else {
328 return 0xFF;
329 }
330 default:
331 if (address < GB_BASE_OAM) {
332 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
333 }
334 if (address < GB_BASE_UNUSABLE) {
335 if (gb->video.mode < 2) {
336 return gb->video.oam.raw[address & 0xFF];
337 }
338 return 0xFF;
339 }
340 if (address < GB_BASE_IO) {
341 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
342 return 0xFF;
343 }
344 if (address < GB_BASE_HRAM) {
345 return GBIORead(gb, address & (GB_SIZE_IO - 1));
346 }
347 if (address < GB_BASE_IE) {
348 return memory->hram[address & GB_SIZE_HRAM];
349 }
350 return GBIORead(gb, REG_IE);
351 }
352}
353
354void GBMemoryDMA(struct GB* gb, uint16_t base) {
355 if (base > 0xF100) {
356 return;
357 }
358 gb->cpu->memory.store8 = GBDMAStore8;
359 gb->cpu->memory.load8 = GBDMALoad8;
360 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
361 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
362 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
363 gb->cpu->nextEvent = gb->cpu->cycles + 8;
364 }
365 gb->memory.dmaSource = base;
366 gb->memory.dmaDest = 0;
367 gb->memory.dmaRemaining = 0xA0;
368}
369
370void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
371 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
372 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
373 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
374 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
375 gb->memory.hdmaSource &= 0xFFF0;
376 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
377 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
378 return;
379 }
380 gb->memory.hdmaDest &= 0x1FF0;
381 gb->memory.hdmaDest |= 0x8000;
382 bool wasHdma = gb->memory.isHdma;
383 gb->memory.isHdma = value & 0x80;
384 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
385 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
386 gb->cpuBlocked = true;
387 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
388 gb->cpu->nextEvent = gb->cpu->cycles;
389 }
390}
391
392void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
393 struct GB* gb = context;
394 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
395 // TODO: Can DMA write OAM during modes 2-3?
396 gb->video.oam.raw[gb->memory.dmaDest] = b;
397 ++gb->memory.dmaSource;
398 ++gb->memory.dmaDest;
399 --gb->memory.dmaRemaining;
400 if (gb->memory.dmaRemaining) {
401 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
402 } else {
403 gb->cpu->memory.store8 = GBStore8;
404 gb->cpu->memory.load8 = GBLoad8;
405 }
406}
407
408void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
409 struct GB* gb = context;
410 gb->cpuBlocked = true;
411 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
412 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
413 ++gb->memory.hdmaSource;
414 ++gb->memory.hdmaDest;
415 --gb->memory.hdmaRemaining;
416 if (gb->memory.hdmaRemaining) {
417 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
418 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
419 } else {
420 gb->cpuBlocked = false;
421 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
422 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
423 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
424 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
425 if (gb->memory.isHdma) {
426 --gb->memory.io[REG_HDMA5];
427 if (gb->memory.io[REG_HDMA5] == 0xFF) {
428 gb->memory.isHdma = false;
429 }
430 } else {
431 gb->memory.io[REG_HDMA5] = 0xFF;
432 }
433 }
434}
435
436struct OAMBlock {
437 uint16_t low;
438 uint16_t high;
439};
440
441static const struct OAMBlock _oamBlockDMG[] = {
442 { 0xA000, 0xFE00 },
443 { 0xA000, 0xFE00 },
444 { 0xA000, 0xFE00 },
445 { 0xA000, 0xFE00 },
446 { 0x8000, 0xA000 },
447 { 0xA000, 0xFE00 },
448 { 0xA000, 0xFE00 },
449 { 0xA000, 0xFE00 },
450};
451
452static const struct OAMBlock _oamBlockCGB[] = {
453 { 0xA000, 0xC000 },
454 { 0xA000, 0xC000 },
455 { 0xA000, 0xC000 },
456 { 0xA000, 0xC000 },
457 { 0x8000, 0xA000 },
458 { 0xA000, 0xC000 },
459 { 0xC000, 0xFE00 },
460 { 0xA000, 0xC000 },
461};
462
463uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
464 struct GB* gb = (struct GB*) cpu->master;
465 struct GBMemory* memory = &gb->memory;
466 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
467 block = &block[memory->dmaSource >> 13];
468 if (address >= block->low && address < block->high) {
469 return 0xFF;
470 }
471 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
472 return 0xFF;
473 }
474 return GBLoad8(cpu, address);
475}
476
477void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
478 struct GB* gb = (struct GB*) cpu->master;
479 struct GBMemory* memory = &gb->memory;
480 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
481 block = &block[memory->dmaSource >> 13];
482 if (address >= block->low && address < block->high) {
483 return;
484 }
485 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
486 return;
487 }
488 GBStore8(cpu, address, value);
489}
490
491void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
492 struct GB* gb = (struct GB*) cpu->master;
493 struct GBMemory* memory = &gb->memory;
494 int8_t oldValue = -1;
495
496 switch (address >> 12) {
497 case GB_REGION_CART_BANK0:
498 case GB_REGION_CART_BANK0 + 1:
499 case GB_REGION_CART_BANK0 + 2:
500 case GB_REGION_CART_BANK0 + 3:
501 _pristineCow(gb);
502 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
503 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
504 break;
505 case GB_REGION_CART_BANK1:
506 case GB_REGION_CART_BANK1 + 1:
507 case GB_REGION_CART_BANK1 + 2:
508 case GB_REGION_CART_BANK1 + 3:
509 _pristineCow(gb);
510 if (segment < 0) {
511 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
512 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
513 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
514 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
515 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
516 } else {
517 return;
518 }
519 break;
520 case GB_REGION_VRAM:
521 case GB_REGION_VRAM + 1:
522 if (segment < 0) {
523 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
524 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
525 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
526 } else if (segment < 2) {
527 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
528 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
529 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
530 } else {
531 return;
532 }
533 break;
534 case GB_REGION_EXTERNAL_RAM:
535 case GB_REGION_EXTERNAL_RAM + 1:
536 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
537 return;
538 case GB_REGION_WORKING_RAM_BANK0:
539 case GB_REGION_WORKING_RAM_BANK0 + 2:
540 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
541 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
542 break;
543 case GB_REGION_WORKING_RAM_BANK1:
544 if (segment < 0) {
545 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
546 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
547 } else if (segment < 8) {
548 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
549 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
550 } else {
551 return;
552 }
553 break;
554 default:
555 if (address < GB_BASE_OAM) {
556 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
557 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
558 } else if (address < GB_BASE_UNUSABLE) {
559 oldValue = gb->video.oam.raw[address & 0xFF];
560 gb->video.oam.raw[address & 0xFF] = value;
561 } else if (address < GB_BASE_HRAM) {
562 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
563 return;
564 } else if (address < GB_BASE_IE) {
565 oldValue = memory->hram[address & GB_SIZE_HRAM];
566 memory->hram[address & GB_SIZE_HRAM] = value;
567 } else {
568 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
569 return;
570 }
571 }
572 if (old) {
573 *old = oldValue;
574 }
575}
576
577void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
578 const struct GBMemory* memory = &gb->memory;
579 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
580 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
581 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
582 state->memory.wramCurrentBank = memory->wramCurrentBank;
583 state->memory.sramCurrentBank = memory->sramCurrentBank;
584
585 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
586 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
587
588 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
589 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
590
591 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
592 state->memory.dmaRemaining = memory->dmaRemaining;
593 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
594
595 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
596 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
597
598 GBSerializedMemoryFlags flags = 0;
599 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
600 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
601 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
602 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
603 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
604 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
605 STORE_16LE(flags, 0, &state->memory.flags);
606}
607
608void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
609 struct GBMemory* memory = &gb->memory;
610 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
611 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
612 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
613 memory->wramCurrentBank = state->memory.wramCurrentBank;
614 memory->sramCurrentBank = state->memory.sramCurrentBank;
615
616 GBMBCSwitchBank(memory, memory->currentBank);
617 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
618 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
619
620 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
621 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
622
623 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
624 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
625
626 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
627 memory->dmaRemaining = state->memory.dmaRemaining;
628 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
629
630 uint32_t when;
631 LOAD_32LE(when, 0, &state->memory.dmaNext);
632 mTimingDeschedule(&gb->timing, &memory->dmaEvent);
633 if (memory->dmaRemaining) {
634 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
635 }
636 LOAD_32LE(when, 0, &state->memory.hdmaNext);
637 mTimingDeschedule(&gb->timing, &memory->hdmaEvent);
638 if (memory->hdmaRemaining) {
639 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
640 }
641
642 GBSerializedMemoryFlags flags;
643 LOAD_16LE(flags, 0, &state->memory.flags);
644 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
645 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
646 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
647 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
648 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
649 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
650}
651
652void _pristineCow(struct GB* gb) {
653 if (gb->memory.rom != gb->pristineRom) {
654 return;
655 }
656 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
657 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
658 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
659 if (gb->pristineRom == gb->memory.romBase) {
660 gb->memory.romBase = gb->memory.rom;
661 }
662 GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
663}