src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19struct OAMBlock {
20 uint16_t low;
21 uint16_t high;
22};
23
24static const struct OAMBlock _oamBlockDMG[] = {
25 { 0xA000, 0xFE00 },
26 { 0xA000, 0xFE00 },
27 { 0xA000, 0xFE00 },
28 { 0xA000, 0xFE00 },
29 { 0x8000, 0xA000 },
30 { 0xA000, 0xFE00 },
31 { 0xA000, 0xFE00 },
32 { 0xA000, 0xFE00 },
33};
34
35static const struct OAMBlock _oamBlockCGB[] = {
36 { 0xA000, 0xC000 },
37 { 0xA000, 0xC000 },
38 { 0xA000, 0xC000 },
39 { 0xA000, 0xC000 },
40 { 0x8000, 0xA000 },
41 { 0xA000, 0xC000 },
42 { 0xC000, 0xFE00 },
43 { 0xA000, 0xC000 },
44};
45
46static void _pristineCow(struct GB* gba);
47
48static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
49 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
50 cpu->memory.setActiveRegion(cpu, address);
51 return cpu->memory.cpuLoad8(cpu, address);
52 }
53 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
54}
55
56static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
57 struct GB* gb = (struct GB*) cpu->master;
58 struct GBMemory* memory = &gb->memory;
59 switch (address >> 12) {
60 case GB_REGION_CART_BANK0:
61 case GB_REGION_CART_BANK0 + 1:
62 case GB_REGION_CART_BANK0 + 2:
63 case GB_REGION_CART_BANK0 + 3:
64 cpu->memory.cpuLoad8 = GBFastLoad8;
65 cpu->memory.activeRegion = memory->romBase;
66 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
67 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
68 break;
69 case GB_REGION_CART_BANK1:
70 case GB_REGION_CART_BANK1 + 1:
71 case GB_REGION_CART_BANK1 + 2:
72 case GB_REGION_CART_BANK1 + 3:
73 cpu->memory.cpuLoad8 = GBFastLoad8;
74 cpu->memory.activeRegion = memory->romBank;
75 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
76 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
77 break;
78 default:
79 cpu->memory.cpuLoad8 = GBLoad8;
80 break;
81 }
82}
83
84static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
85static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
86
87void GBMemoryInit(struct GB* gb) {
88 struct LR35902Core* cpu = gb->cpu;
89 cpu->memory.cpuLoad8 = GBLoad8;
90 cpu->memory.load8 = GBLoad8;
91 cpu->memory.store8 = GBStore8;
92 cpu->memory.currentSegment = GBCurrentSegment;
93 cpu->memory.setActiveRegion = GBSetActiveRegion;
94
95 gb->memory.wram = 0;
96 gb->memory.wramBank = 0;
97 gb->memory.rom = 0;
98 gb->memory.romBank = 0;
99 gb->memory.romSize = 0;
100 gb->memory.sram = 0;
101 gb->memory.mbcType = GB_MBC_AUTODETECT;
102 gb->memory.mbcRead = NULL;
103 gb->memory.mbcWrite = NULL;
104
105 gb->memory.rtc = NULL;
106 gb->memory.rotation = NULL;
107 gb->memory.rumble = NULL;
108
109 GBIOInit(gb);
110}
111
112void GBMemoryDeinit(struct GB* gb) {
113 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
114 if (gb->memory.rom) {
115 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
116 }
117}
118
119void GBMemoryReset(struct GB* gb) {
120 if (gb->memory.wram) {
121 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
122 }
123 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
124 if (gb->model >= GB_MODEL_CGB) {
125 uint32_t* base = (uint32_t*) gb->memory.wram;
126 size_t i;
127 uint32_t pattern = 0;
128 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
129 if ((i & 0x1FF) == 0) {
130 pattern = ~pattern;
131 }
132 base[i + 0] = pattern;
133 base[i + 1] = pattern;
134 base[i + 2] = ~pattern;
135 base[i + 3] = ~pattern;
136 }
137 }
138 GBMemorySwitchWramBank(&gb->memory, 1);
139 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
140 gb->memory.currentBank = 1;
141 gb->memory.sramCurrentBank = 0;
142
143 gb->memory.ime = false;
144 gb->memory.ie = 0;
145
146 gb->memory.dmaRemaining = 0;
147 gb->memory.dmaSource = 0;
148 gb->memory.dmaDest = 0;
149 gb->memory.hdmaRemaining = 0;
150 gb->memory.hdmaSource = 0;
151 gb->memory.hdmaDest = 0;
152 gb->memory.isHdma = false;
153
154
155 gb->memory.dmaEvent.context = gb;
156 gb->memory.dmaEvent.name = "GB DMA";
157 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
158 gb->memory.dmaEvent.priority = 0x40;
159 gb->memory.hdmaEvent.context = gb;
160 gb->memory.hdmaEvent.name = "GB HDMA";
161 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
162 gb->memory.hdmaEvent.priority = 0x41;
163
164 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
165 switch (gb->memory.mbcType) {
166 case GB_MBC1:
167 gb->memory.mbcState.mbc1.mode = 0;
168 break;
169 default:
170 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
171 }
172
173 GBMBCInit(gb);
174 gb->memory.sramBank = gb->memory.sram;
175
176 if (!gb->memory.wram) {
177 GBMemoryDeinit(gb);
178 }
179}
180
181void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
182 bank &= 7;
183 if (!bank) {
184 bank = 1;
185 }
186 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
187 memory->wramCurrentBank = bank;
188}
189
190uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
191 struct GB* gb = (struct GB*) cpu->master;
192 struct GBMemory* memory = &gb->memory;
193 if (gb->memory.dmaRemaining) {
194 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
195 block = &block[memory->dmaSource >> 13];
196 if (address >= block->low && address < block->high) {
197 return 0xFF;
198 }
199 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
200 return 0xFF;
201 }
202 }
203 switch (address >> 12) {
204 case GB_REGION_CART_BANK0:
205 case GB_REGION_CART_BANK0 + 1:
206 case GB_REGION_CART_BANK0 + 2:
207 case GB_REGION_CART_BANK0 + 3:
208 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
209 case GB_REGION_CART_BANK1:
210 case GB_REGION_CART_BANK1 + 1:
211 case GB_REGION_CART_BANK1 + 2:
212 case GB_REGION_CART_BANK1 + 3:
213 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
214 case GB_REGION_VRAM:
215 case GB_REGION_VRAM + 1:
216 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
217 case GB_REGION_EXTERNAL_RAM:
218 case GB_REGION_EXTERNAL_RAM + 1:
219 if (memory->rtcAccess) {
220 return memory->rtcRegs[memory->activeRtcReg];
221 } else if (memory->mbcRead) {
222 return memory->mbcRead(memory, address);
223 } else if (memory->sramAccess) {
224 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
225 } else if (memory->mbcType == GB_HuC3) {
226 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
227 }
228 return 0xFF;
229 case GB_REGION_WORKING_RAM_BANK0:
230 case GB_REGION_WORKING_RAM_BANK0 + 2:
231 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
232 case GB_REGION_WORKING_RAM_BANK1:
233 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
234 default:
235 if (address < GB_BASE_OAM) {
236 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
237 }
238 if (address < GB_BASE_UNUSABLE) {
239 if (gb->video.mode < 2) {
240 return gb->video.oam.raw[address & 0xFF];
241 }
242 return 0xFF;
243 }
244 if (address < GB_BASE_IO) {
245 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
246 return 0xFF;
247 }
248 if (address < GB_BASE_HRAM) {
249 return GBIORead(gb, address & (GB_SIZE_IO - 1));
250 }
251 if (address < GB_BASE_IE) {
252 return memory->hram[address & GB_SIZE_HRAM];
253 }
254 return GBIORead(gb, REG_IE);
255 }
256}
257
258void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
259 struct GB* gb = (struct GB*) cpu->master;
260 struct GBMemory* memory = &gb->memory;
261 if (gb->memory.dmaRemaining) {
262 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
263 block = &block[memory->dmaSource >> 13];
264 if (address >= block->low && address < block->high) {
265 return;
266 }
267 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
268 return;
269 }
270 }
271 switch (address >> 12) {
272 case GB_REGION_CART_BANK0:
273 case GB_REGION_CART_BANK0 + 1:
274 case GB_REGION_CART_BANK0 + 2:
275 case GB_REGION_CART_BANK0 + 3:
276 case GB_REGION_CART_BANK1:
277 case GB_REGION_CART_BANK1 + 1:
278 case GB_REGION_CART_BANK1 + 2:
279 case GB_REGION_CART_BANK1 + 3:
280 memory->mbcWrite(gb, address, value);
281 cpu->memory.setActiveRegion(cpu, cpu->pc);
282 return;
283 case GB_REGION_VRAM:
284 case GB_REGION_VRAM + 1:
285 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
286 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
287 return;
288 case GB_REGION_EXTERNAL_RAM:
289 case GB_REGION_EXTERNAL_RAM + 1:
290 if (memory->rtcAccess) {
291 memory->rtcRegs[memory->activeRtcReg] = value;
292 } else if (memory->sramAccess) {
293 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
294 } else {
295 memory->mbcWrite(gb, address, value);
296 }
297 gb->sramDirty |= GB_SRAM_DIRT_NEW;
298 return;
299 case GB_REGION_WORKING_RAM_BANK0:
300 case GB_REGION_WORKING_RAM_BANK0 + 2:
301 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
302 return;
303 case GB_REGION_WORKING_RAM_BANK1:
304 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
305 return;
306 default:
307 if (address < GB_BASE_OAM) {
308 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
309 } else if (address < GB_BASE_UNUSABLE) {
310 if (gb->video.mode < 2) {
311 gb->video.oam.raw[address & 0xFF] = value;
312 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
313 }
314 } else if (address < GB_BASE_IO) {
315 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
316 } else if (address < GB_BASE_HRAM) {
317 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
318 } else if (address < GB_BASE_IE) {
319 memory->hram[address & GB_SIZE_HRAM] = value;
320 } else {
321 GBIOWrite(gb, REG_IE, value);
322 }
323 }
324}
325
326int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
327 struct GB* gb = (struct GB*) cpu->master;
328 struct GBMemory* memory = &gb->memory;
329 switch (address >> 12) {
330 case GB_REGION_CART_BANK0:
331 case GB_REGION_CART_BANK0 + 1:
332 case GB_REGION_CART_BANK0 + 2:
333 case GB_REGION_CART_BANK0 + 3:
334 return 0;
335 case GB_REGION_CART_BANK1:
336 case GB_REGION_CART_BANK1 + 1:
337 case GB_REGION_CART_BANK1 + 2:
338 case GB_REGION_CART_BANK1 + 3:
339 return memory->currentBank;
340 case GB_REGION_VRAM:
341 case GB_REGION_VRAM + 1:
342 return gb->video.vramCurrentBank;
343 case GB_REGION_EXTERNAL_RAM:
344 case GB_REGION_EXTERNAL_RAM + 1:
345 return memory->sramCurrentBank;
346 case GB_REGION_WORKING_RAM_BANK0:
347 case GB_REGION_WORKING_RAM_BANK0 + 2:
348 return 0;
349 case GB_REGION_WORKING_RAM_BANK1:
350 return memory->wramCurrentBank;
351 default:
352 return 0;
353 }
354}
355
356uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
357 struct GB* gb = (struct GB*) cpu->master;
358 struct GBMemory* memory = &gb->memory;
359 switch (address >> 12) {
360 case GB_REGION_CART_BANK0:
361 case GB_REGION_CART_BANK0 + 1:
362 case GB_REGION_CART_BANK0 + 2:
363 case GB_REGION_CART_BANK0 + 3:
364 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
365 case GB_REGION_CART_BANK1:
366 case GB_REGION_CART_BANK1 + 1:
367 case GB_REGION_CART_BANK1 + 2:
368 case GB_REGION_CART_BANK1 + 3:
369 if (segment < 0) {
370 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
371 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
372 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
373 } else {
374 return 0xFF;
375 }
376 case GB_REGION_VRAM:
377 case GB_REGION_VRAM + 1:
378 if (segment < 0) {
379 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
380 } else if (segment < 2) {
381 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
382 } else {
383 return 0xFF;
384 }
385 case GB_REGION_EXTERNAL_RAM:
386 case GB_REGION_EXTERNAL_RAM + 1:
387 if (memory->rtcAccess) {
388 return memory->rtcRegs[memory->activeRtcReg];
389 } else if (memory->sramAccess) {
390 if (segment < 0) {
391 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
392 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
393 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
394 } else {
395 return 0xFF;
396 }
397 } else if (memory->mbcRead) {
398 return memory->mbcRead(memory, address);
399 } else if (memory->mbcType == GB_HuC3) {
400 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
401 }
402 return 0xFF;
403 case GB_REGION_WORKING_RAM_BANK0:
404 case GB_REGION_WORKING_RAM_BANK0 + 2:
405 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
406 case GB_REGION_WORKING_RAM_BANK1:
407 if (segment < 0) {
408 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
409 } else if (segment < 8) {
410 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
411 } else {
412 return 0xFF;
413 }
414 default:
415 if (address < GB_BASE_OAM) {
416 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
417 }
418 if (address < GB_BASE_UNUSABLE) {
419 if (gb->video.mode < 2) {
420 return gb->video.oam.raw[address & 0xFF];
421 }
422 return 0xFF;
423 }
424 if (address < GB_BASE_IO) {
425 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
426 return 0xFF;
427 }
428 if (address < GB_BASE_HRAM) {
429 return GBIORead(gb, address & (GB_SIZE_IO - 1));
430 }
431 if (address < GB_BASE_IE) {
432 return memory->hram[address & GB_SIZE_HRAM];
433 }
434 return GBIORead(gb, REG_IE);
435 }
436}
437
438void GBMemoryDMA(struct GB* gb, uint16_t base) {
439 if (base > 0xF100) {
440 return;
441 }
442 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
443 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
444 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
445 gb->cpu->nextEvent = gb->cpu->cycles + 8;
446 }
447 gb->memory.dmaSource = base;
448 gb->memory.dmaDest = 0;
449 gb->memory.dmaRemaining = 0xA0;
450}
451
452void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
453 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
454 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
455 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
456 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
457 gb->memory.hdmaSource &= 0xFFF0;
458 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
459 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
460 return;
461 }
462 gb->memory.hdmaDest &= 0x1FF0;
463 gb->memory.hdmaDest |= 0x8000;
464 bool wasHdma = gb->memory.isHdma;
465 gb->memory.isHdma = value & 0x80;
466 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
467 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
468 gb->cpuBlocked = true;
469 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
470 gb->cpu->nextEvent = gb->cpu->cycles;
471 }
472}
473
474void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
475 struct GB* gb = context;
476 int dmaRemaining = gb->memory.dmaRemaining;
477 gb->memory.dmaRemaining = 0;
478 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
479 // TODO: Can DMA write OAM during modes 2-3?
480 gb->video.oam.raw[gb->memory.dmaDest] = b;
481 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
482 ++gb->memory.dmaSource;
483 ++gb->memory.dmaDest;
484 gb->memory.dmaRemaining = dmaRemaining - 1;
485 if (gb->memory.dmaRemaining) {
486 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
487 }
488}
489
490void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
491 struct GB* gb = context;
492 gb->cpuBlocked = true;
493 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
494 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
495 ++gb->memory.hdmaSource;
496 ++gb->memory.hdmaDest;
497 --gb->memory.hdmaRemaining;
498 if (gb->memory.hdmaRemaining) {
499 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
500 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
501 } else {
502 gb->cpuBlocked = false;
503 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
504 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
505 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
506 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
507 if (gb->memory.isHdma) {
508 --gb->memory.io[REG_HDMA5];
509 if (gb->memory.io[REG_HDMA5] == 0xFF) {
510 gb->memory.isHdma = false;
511 }
512 } else {
513 gb->memory.io[REG_HDMA5] = 0xFF;
514 }
515 }
516}
517
518void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
519 struct GB* gb = (struct GB*) cpu->master;
520 struct GBMemory* memory = &gb->memory;
521 int8_t oldValue = -1;
522
523 switch (address >> 12) {
524 case GB_REGION_CART_BANK0:
525 case GB_REGION_CART_BANK0 + 1:
526 case GB_REGION_CART_BANK0 + 2:
527 case GB_REGION_CART_BANK0 + 3:
528 _pristineCow(gb);
529 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
530 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
531 break;
532 case GB_REGION_CART_BANK1:
533 case GB_REGION_CART_BANK1 + 1:
534 case GB_REGION_CART_BANK1 + 2:
535 case GB_REGION_CART_BANK1 + 3:
536 _pristineCow(gb);
537 if (segment < 0) {
538 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
539 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
540 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
541 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
542 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
543 } else {
544 return;
545 }
546 break;
547 case GB_REGION_VRAM:
548 case GB_REGION_VRAM + 1:
549 if (segment < 0) {
550 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
551 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
552 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
553 } else if (segment < 2) {
554 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
555 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
556 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
557 } else {
558 return;
559 }
560 break;
561 case GB_REGION_EXTERNAL_RAM:
562 case GB_REGION_EXTERNAL_RAM + 1:
563 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
564 return;
565 case GB_REGION_WORKING_RAM_BANK0:
566 case GB_REGION_WORKING_RAM_BANK0 + 2:
567 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
568 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
569 break;
570 case GB_REGION_WORKING_RAM_BANK1:
571 if (segment < 0) {
572 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
573 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
574 } else if (segment < 8) {
575 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
576 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
577 } else {
578 return;
579 }
580 break;
581 default:
582 if (address < GB_BASE_OAM) {
583 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
584 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
585 } else if (address < GB_BASE_UNUSABLE) {
586 oldValue = gb->video.oam.raw[address & 0xFF];
587 gb->video.oam.raw[address & 0xFF] = value;
588 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
589 } else if (address < GB_BASE_HRAM) {
590 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
591 return;
592 } else if (address < GB_BASE_IE) {
593 oldValue = memory->hram[address & GB_SIZE_HRAM];
594 memory->hram[address & GB_SIZE_HRAM] = value;
595 } else {
596 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
597 return;
598 }
599 }
600 if (old) {
601 *old = oldValue;
602 }
603}
604
605void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
606 const struct GBMemory* memory = &gb->memory;
607 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
608 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
609 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
610 state->memory.wramCurrentBank = memory->wramCurrentBank;
611 state->memory.sramCurrentBank = memory->sramCurrentBank;
612
613 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
614 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
615
616 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
617 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
618
619 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
620 state->memory.dmaRemaining = memory->dmaRemaining;
621 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
622
623 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
624 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
625
626 GBSerializedMemoryFlags flags = 0;
627 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
628 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
629 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
630 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
631 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
632 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
633 STORE_16LE(flags, 0, &state->memory.flags);
634
635 switch (memory->mbcType) {
636 case GB_MBC1:
637 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
638 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
639 break;
640 case GB_MBC3_RTC:
641 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
642 break;
643 case GB_MBC7:
644 state->memory.mbc7.state = memory->mbcState.mbc7.state;
645 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
646 state->memory.mbc7.address = memory->mbcState.mbc7.address;
647 state->memory.mbc7.access = memory->mbcState.mbc7.access;
648 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
649 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
650 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
651 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
652 break;
653 default:
654 break;
655 }
656}
657
658void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
659 struct GBMemory* memory = &gb->memory;
660 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
661 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
662 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
663 memory->wramCurrentBank = state->memory.wramCurrentBank;
664 memory->sramCurrentBank = state->memory.sramCurrentBank;
665
666 GBMBCSwitchBank(gb, memory->currentBank);
667 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
668 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
669
670 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
671 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
672
673 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
674 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
675
676 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
677 memory->dmaRemaining = state->memory.dmaRemaining;
678 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
679
680 uint32_t when;
681 LOAD_32LE(when, 0, &state->memory.dmaNext);
682 if (memory->dmaRemaining) {
683 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
684 }
685 LOAD_32LE(when, 0, &state->memory.hdmaNext);
686 if (memory->hdmaRemaining) {
687 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
688 }
689
690 GBSerializedMemoryFlags flags;
691 LOAD_16LE(flags, 0, &state->memory.flags);
692 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
693 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
694 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
695 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
696 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
697 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
698
699 switch (memory->mbcType) {
700 case GB_MBC1:
701 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
702 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
703 if (memory->mbcState.mbc1.mode) {
704 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
705 }
706 break;
707 case GB_MBC3_RTC:
708 // TODO?
709 //LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
710 break;
711 case GB_MBC7:
712 memory->mbcState.mbc7.state = state->memory.mbc7.state;
713 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
714 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
715 memory->mbcState.mbc7.access = state->memory.mbc7.access;
716 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
717 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
718 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
719 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
720 break;
721 default:
722 break;
723 }
724}
725
726void _pristineCow(struct GB* gb) {
727 if (!gb->isPristine) {
728 return;
729 }
730 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
731 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
732 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
733 if (gb->memory.rom == gb->memory.romBase) {
734 gb->memory.romBase = newRom;
735 }
736 gb->memory.rom = newRom;
737 GBMBCSwitchBank(gb, gb->memory.currentBank);
738 gb->isPristine = false;
739}