src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19struct OAMBlock {
20 uint16_t low;
21 uint16_t high;
22};
23
24static const struct OAMBlock _oamBlockDMG[] = {
25 { 0xA000, 0xFE00 },
26 { 0xA000, 0xFE00 },
27 { 0xA000, 0xFE00 },
28 { 0xA000, 0xFE00 },
29 { 0x8000, 0xA000 },
30 { 0xA000, 0xFE00 },
31 { 0xA000, 0xFE00 },
32 { 0xA000, 0xFE00 },
33};
34
35static const struct OAMBlock _oamBlockCGB[] = {
36 { 0xA000, 0xC000 },
37 { 0xA000, 0xC000 },
38 { 0xA000, 0xC000 },
39 { 0xA000, 0xC000 },
40 { 0x8000, 0xA000 },
41 { 0xA000, 0xC000 },
42 { 0xC000, 0xFE00 },
43 { 0xA000, 0xC000 },
44};
45
46static void _pristineCow(struct GB* gba);
47
48static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
49 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
50 cpu->memory.setActiveRegion(cpu, address);
51 return cpu->memory.cpuLoad8(cpu, address);
52 }
53 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
54}
55
56static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
57 struct GB* gb = (struct GB*) cpu->master;
58 struct GBMemory* memory = &gb->memory;
59 switch (address >> 12) {
60 case GB_REGION_CART_BANK0:
61 case GB_REGION_CART_BANK0 + 1:
62 case GB_REGION_CART_BANK0 + 2:
63 case GB_REGION_CART_BANK0 + 3:
64 cpu->memory.cpuLoad8 = GBFastLoad8;
65 cpu->memory.activeRegion = memory->romBase;
66 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
67 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
68 break;
69 case GB_REGION_CART_BANK1:
70 case GB_REGION_CART_BANK1 + 1:
71 case GB_REGION_CART_BANK1 + 2:
72 case GB_REGION_CART_BANK1 + 3:
73 cpu->memory.cpuLoad8 = GBFastLoad8;
74 cpu->memory.activeRegion = memory->romBank;
75 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
76 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
77 break;
78 default:
79 cpu->memory.cpuLoad8 = GBLoad8;
80 break;
81 }
82}
83
84static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
85static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
86
87void GBMemoryInit(struct GB* gb) {
88 struct LR35902Core* cpu = gb->cpu;
89 cpu->memory.cpuLoad8 = GBLoad8;
90 cpu->memory.load8 = GBLoad8;
91 cpu->memory.store8 = GBStore8;
92 cpu->memory.currentSegment = GBCurrentSegment;
93 cpu->memory.setActiveRegion = GBSetActiveRegion;
94
95 gb->memory.wram = 0;
96 gb->memory.wramBank = 0;
97 gb->memory.rom = 0;
98 gb->memory.romBank = 0;
99 gb->memory.romSize = 0;
100 gb->memory.sram = 0;
101 gb->memory.mbcType = GB_MBC_AUTODETECT;
102 gb->memory.mbcRead = NULL;
103 gb->memory.mbcWrite = NULL;
104
105 gb->memory.rtc = NULL;
106 gb->memory.rotation = NULL;
107 gb->memory.rumble = NULL;
108
109 GBIOInit(gb);
110}
111
112void GBMemoryDeinit(struct GB* gb) {
113 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
114 if (gb->memory.rom) {
115 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
116 }
117}
118
119void GBMemoryReset(struct GB* gb) {
120 if (gb->memory.wram) {
121 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
122 }
123 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
124 if (gb->model >= GB_MODEL_CGB) {
125 uint32_t* base = (uint32_t*) gb->memory.wram;
126 size_t i;
127 uint32_t pattern = 0;
128 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
129 if ((i & 0x1FF) == 0) {
130 pattern = ~pattern;
131 }
132 base[i + 0] = pattern;
133 base[i + 1] = pattern;
134 base[i + 2] = ~pattern;
135 base[i + 3] = ~pattern;
136 }
137 }
138 GBMemorySwitchWramBank(&gb->memory, 1);
139 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
140 gb->memory.currentBank = 1;
141 gb->memory.sramCurrentBank = 0;
142
143 gb->memory.ime = false;
144 gb->memory.ie = 0;
145
146 gb->memory.dmaRemaining = 0;
147 gb->memory.dmaSource = 0;
148 gb->memory.dmaDest = 0;
149 gb->memory.hdmaRemaining = 0;
150 gb->memory.hdmaSource = 0;
151 gb->memory.hdmaDest = 0;
152 gb->memory.isHdma = false;
153
154
155 gb->memory.dmaEvent.context = gb;
156 gb->memory.dmaEvent.name = "GB DMA";
157 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
158 gb->memory.dmaEvent.priority = 0x40;
159 gb->memory.hdmaEvent.context = gb;
160 gb->memory.hdmaEvent.name = "GB HDMA";
161 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
162 gb->memory.hdmaEvent.priority = 0x41;
163
164 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
165 switch (gb->memory.mbcType) {
166 case GB_MBC1:
167 gb->memory.mbcState.mbc1.mode = 0;
168 break;
169 default:
170 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
171 }
172
173 GBMBCInit(gb);
174 gb->memory.sramBank = gb->memory.sram;
175
176 if (!gb->memory.wram) {
177 GBMemoryDeinit(gb);
178 }
179}
180
181void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
182 bank &= 7;
183 if (!bank) {
184 bank = 1;
185 }
186 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
187 memory->wramCurrentBank = bank;
188}
189
190uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
191 struct GB* gb = (struct GB*) cpu->master;
192 struct GBMemory* memory = &gb->memory;
193 if (gb->memory.dmaRemaining) {
194 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
195 block = &block[memory->dmaSource >> 13];
196 if (address >= block->low && address < block->high) {
197 return 0xFF;
198 }
199 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
200 return 0xFF;
201 }
202 }
203 switch (address >> 12) {
204 case GB_REGION_CART_BANK0:
205 case GB_REGION_CART_BANK0 + 1:
206 case GB_REGION_CART_BANK0 + 2:
207 case GB_REGION_CART_BANK0 + 3:
208 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
209 case GB_REGION_CART_BANK1:
210 case GB_REGION_CART_BANK1 + 1:
211 case GB_REGION_CART_BANK1 + 2:
212 case GB_REGION_CART_BANK1 + 3:
213 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
214 case GB_REGION_VRAM:
215 case GB_REGION_VRAM + 1:
216 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
217 case GB_REGION_EXTERNAL_RAM:
218 case GB_REGION_EXTERNAL_RAM + 1:
219 if (memory->rtcAccess) {
220 return memory->rtcRegs[memory->activeRtcReg];
221 } else if (memory->mbcRead) {
222 return memory->mbcRead(memory, address);
223 } else if (memory->sramAccess && memory->sram) {
224 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
225 } else if (memory->mbcType == GB_HuC3) {
226 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
227 }
228 return 0xFF;
229 case GB_REGION_WORKING_RAM_BANK0:
230 case GB_REGION_WORKING_RAM_BANK0 + 2:
231 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
232 case GB_REGION_WORKING_RAM_BANK1:
233 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
234 default:
235 if (address < GB_BASE_OAM) {
236 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
237 }
238 if (address < GB_BASE_UNUSABLE) {
239 if (gb->video.mode < 2) {
240 return gb->video.oam.raw[address & 0xFF];
241 }
242 return 0xFF;
243 }
244 if (address < GB_BASE_IO) {
245 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
246 return 0xFF;
247 }
248 if (address < GB_BASE_HRAM) {
249 return GBIORead(gb, address & (GB_SIZE_IO - 1));
250 }
251 if (address < GB_BASE_IE) {
252 return memory->hram[address & GB_SIZE_HRAM];
253 }
254 return GBIORead(gb, REG_IE);
255 }
256}
257
258void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
259 struct GB* gb = (struct GB*) cpu->master;
260 struct GBMemory* memory = &gb->memory;
261 if (gb->memory.dmaRemaining) {
262 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
263 block = &block[memory->dmaSource >> 13];
264 if (address >= block->low && address < block->high) {
265 return;
266 }
267 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
268 return;
269 }
270 }
271 switch (address >> 12) {
272 case GB_REGION_CART_BANK0:
273 case GB_REGION_CART_BANK0 + 1:
274 case GB_REGION_CART_BANK0 + 2:
275 case GB_REGION_CART_BANK0 + 3:
276 case GB_REGION_CART_BANK1:
277 case GB_REGION_CART_BANK1 + 1:
278 case GB_REGION_CART_BANK1 + 2:
279 case GB_REGION_CART_BANK1 + 3:
280 memory->mbcWrite(gb, address, value);
281 cpu->memory.setActiveRegion(cpu, cpu->pc);
282 return;
283 case GB_REGION_VRAM:
284 case GB_REGION_VRAM + 1:
285 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
286 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
287 return;
288 case GB_REGION_EXTERNAL_RAM:
289 case GB_REGION_EXTERNAL_RAM + 1:
290 if (memory->rtcAccess) {
291 memory->rtcRegs[memory->activeRtcReg] = value;
292 } else if (memory->sramAccess && memory->sram) {
293 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
294 } else if (memory->mbcType == GB_MBC7) {
295 GBMBC7Write(memory, address, value);
296 }
297 gb->sramDirty |= GB_SRAM_DIRT_NEW;
298 return;
299 case GB_REGION_WORKING_RAM_BANK0:
300 case GB_REGION_WORKING_RAM_BANK0 + 2:
301 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
302 return;
303 case GB_REGION_WORKING_RAM_BANK1:
304 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
305 return;
306 default:
307 if (address < GB_BASE_OAM) {
308 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
309 } else if (address < GB_BASE_UNUSABLE) {
310 if (gb->video.mode < 2) {
311 gb->video.oam.raw[address & 0xFF] = value;
312 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
313 }
314 } else if (address < GB_BASE_IO) {
315 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
316 } else if (address < GB_BASE_HRAM) {
317 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
318 } else if (address < GB_BASE_IE) {
319 memory->hram[address & GB_SIZE_HRAM] = value;
320 } else {
321 GBIOWrite(gb, REG_IE, value);
322 }
323 }
324}
325
326int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
327 struct GB* gb = (struct GB*) cpu->master;
328 struct GBMemory* memory = &gb->memory;
329 switch (address >> 12) {
330 case GB_REGION_CART_BANK0:
331 case GB_REGION_CART_BANK0 + 1:
332 case GB_REGION_CART_BANK0 + 2:
333 case GB_REGION_CART_BANK0 + 3:
334 return 0;
335 case GB_REGION_CART_BANK1:
336 case GB_REGION_CART_BANK1 + 1:
337 case GB_REGION_CART_BANK1 + 2:
338 case GB_REGION_CART_BANK1 + 3:
339 return memory->currentBank;
340 case GB_REGION_VRAM:
341 case GB_REGION_VRAM + 1:
342 return gb->video.vramCurrentBank;
343 case GB_REGION_EXTERNAL_RAM:
344 case GB_REGION_EXTERNAL_RAM + 1:
345 return memory->sramCurrentBank;
346 case GB_REGION_WORKING_RAM_BANK0:
347 case GB_REGION_WORKING_RAM_BANK0 + 2:
348 return 0;
349 case GB_REGION_WORKING_RAM_BANK1:
350 return memory->wramCurrentBank;
351 default:
352 return 0;
353 }
354}
355
356uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
357 struct GB* gb = (struct GB*) cpu->master;
358 struct GBMemory* memory = &gb->memory;
359 switch (address >> 12) {
360 case GB_REGION_CART_BANK0:
361 case GB_REGION_CART_BANK0 + 1:
362 case GB_REGION_CART_BANK0 + 2:
363 case GB_REGION_CART_BANK0 + 3:
364 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
365 case GB_REGION_CART_BANK1:
366 case GB_REGION_CART_BANK1 + 1:
367 case GB_REGION_CART_BANK1 + 2:
368 case GB_REGION_CART_BANK1 + 3:
369 if (segment < 0) {
370 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
371 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
372 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
373 } else {
374 return 0xFF;
375 }
376 case GB_REGION_VRAM:
377 case GB_REGION_VRAM + 1:
378 if (segment < 0) {
379 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
380 } else if (segment < 2) {
381 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
382 } else {
383 return 0xFF;
384 }
385 case GB_REGION_EXTERNAL_RAM:
386 case GB_REGION_EXTERNAL_RAM + 1:
387 if (memory->rtcAccess) {
388 return memory->rtcRegs[memory->activeRtcReg];
389 } else if (memory->sramAccess) {
390 if (segment < 0 && memory->sram) {
391 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
392 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
393 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
394 } else {
395 return 0xFF;
396 }
397 } else if (memory->mbcRead) {
398 return memory->mbcRead(memory, address);
399 } else if (memory->mbcType == GB_HuC3) {
400 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
401 }
402 return 0xFF;
403 case GB_REGION_WORKING_RAM_BANK0:
404 case GB_REGION_WORKING_RAM_BANK0 + 2:
405 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
406 case GB_REGION_WORKING_RAM_BANK1:
407 if (segment < 0) {
408 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
409 } else if (segment < 8) {
410 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
411 } else {
412 return 0xFF;
413 }
414 default:
415 if (address < GB_BASE_OAM) {
416 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
417 }
418 if (address < GB_BASE_UNUSABLE) {
419 if (gb->video.mode < 2) {
420 return gb->video.oam.raw[address & 0xFF];
421 }
422 return 0xFF;
423 }
424 if (address < GB_BASE_IO) {
425 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
426 return 0xFF;
427 }
428 if (address < GB_BASE_HRAM) {
429 return GBIORead(gb, address & (GB_SIZE_IO - 1));
430 }
431 if (address < GB_BASE_IE) {
432 return memory->hram[address & GB_SIZE_HRAM];
433 }
434 return GBIORead(gb, REG_IE);
435 }
436}
437
438void GBMemoryDMA(struct GB* gb, uint16_t base) {
439 if (base > 0xF100) {
440 return;
441 }
442 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
443 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
444 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
445 gb->cpu->nextEvent = gb->cpu->cycles + 8;
446 }
447 gb->memory.dmaSource = base;
448 gb->memory.dmaDest = 0;
449 gb->memory.dmaRemaining = 0xA0;
450}
451
452uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
453 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
454 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
455 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
456 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
457 gb->memory.hdmaSource &= 0xFFF0;
458 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
459 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
460 return value | 0x80;
461 }
462 gb->memory.hdmaDest &= 0x1FF0;
463 gb->memory.hdmaDest |= 0x8000;
464 bool wasHdma = gb->memory.isHdma;
465 gb->memory.isHdma = value & 0x80;
466 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
467 if (gb->memory.isHdma) {
468 gb->memory.hdmaRemaining = 0x10;
469 } else {
470 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
471 }
472 gb->cpuBlocked = true;
473 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
474 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
475 return 0x80 | ((value + 1) & 0x7F);
476 }
477 return value & 0x7F;
478}
479
480void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
481 struct GB* gb = context;
482 int dmaRemaining = gb->memory.dmaRemaining;
483 gb->memory.dmaRemaining = 0;
484 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
485 // TODO: Can DMA write OAM during modes 2-3?
486 gb->video.oam.raw[gb->memory.dmaDest] = b;
487 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
488 ++gb->memory.dmaSource;
489 ++gb->memory.dmaDest;
490 gb->memory.dmaRemaining = dmaRemaining - 1;
491 if (gb->memory.dmaRemaining) {
492 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
493 }
494}
495
496void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
497 struct GB* gb = context;
498 gb->cpuBlocked = true;
499 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
500 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
501 ++gb->memory.hdmaSource;
502 ++gb->memory.hdmaDest;
503 --gb->memory.hdmaRemaining;
504 if (gb->memory.hdmaRemaining) {
505 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
506 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
507 } else {
508 gb->cpuBlocked = false;
509 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
510 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
511 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
512 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
513 if (gb->memory.isHdma) {
514 --gb->memory.io[REG_HDMA5];
515 if (gb->memory.io[REG_HDMA5] == 0xFF) {
516 gb->memory.isHdma = false;
517 }
518 } else {
519 gb->memory.io[REG_HDMA5] = 0xFF;
520 }
521 }
522}
523
524void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
525 struct GB* gb = (struct GB*) cpu->master;
526 struct GBMemory* memory = &gb->memory;
527 int8_t oldValue = -1;
528
529 switch (address >> 12) {
530 case GB_REGION_CART_BANK0:
531 case GB_REGION_CART_BANK0 + 1:
532 case GB_REGION_CART_BANK0 + 2:
533 case GB_REGION_CART_BANK0 + 3:
534 _pristineCow(gb);
535 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
536 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
537 break;
538 case GB_REGION_CART_BANK1:
539 case GB_REGION_CART_BANK1 + 1:
540 case GB_REGION_CART_BANK1 + 2:
541 case GB_REGION_CART_BANK1 + 3:
542 _pristineCow(gb);
543 if (segment < 0) {
544 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
545 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
546 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
547 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
548 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
549 } else {
550 return;
551 }
552 break;
553 case GB_REGION_VRAM:
554 case GB_REGION_VRAM + 1:
555 if (segment < 0) {
556 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
557 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
558 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
559 } else if (segment < 2) {
560 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
561 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
562 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
563 } else {
564 return;
565 }
566 break;
567 case GB_REGION_EXTERNAL_RAM:
568 case GB_REGION_EXTERNAL_RAM + 1:
569 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
570 return;
571 case GB_REGION_WORKING_RAM_BANK0:
572 case GB_REGION_WORKING_RAM_BANK0 + 2:
573 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
574 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
575 break;
576 case GB_REGION_WORKING_RAM_BANK1:
577 if (segment < 0) {
578 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
579 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
580 } else if (segment < 8) {
581 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
582 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
583 } else {
584 return;
585 }
586 break;
587 default:
588 if (address < GB_BASE_OAM) {
589 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
590 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
591 } else if (address < GB_BASE_UNUSABLE) {
592 oldValue = gb->video.oam.raw[address & 0xFF];
593 gb->video.oam.raw[address & 0xFF] = value;
594 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
595 } else if (address < GB_BASE_HRAM) {
596 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
597 return;
598 } else if (address < GB_BASE_IE) {
599 oldValue = memory->hram[address & GB_SIZE_HRAM];
600 memory->hram[address & GB_SIZE_HRAM] = value;
601 } else {
602 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
603 return;
604 }
605 }
606 if (old) {
607 *old = oldValue;
608 }
609}
610
611void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
612 const struct GBMemory* memory = &gb->memory;
613 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
614 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
615 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
616 state->memory.wramCurrentBank = memory->wramCurrentBank;
617 state->memory.sramCurrentBank = memory->sramCurrentBank;
618
619 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
620 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
621
622 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
623 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
624
625 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
626 state->memory.dmaRemaining = memory->dmaRemaining;
627 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
628
629 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
630 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
631
632 GBSerializedMemoryFlags flags = 0;
633 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
634 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
635 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
636 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
637 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
638 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
639 STORE_16LE(flags, 0, &state->memory.flags);
640
641 switch (memory->mbcType) {
642 case GB_MBC1:
643 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
644 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
645 break;
646 case GB_MBC3_RTC:
647 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
648 break;
649 case GB_MBC7:
650 state->memory.mbc7.state = memory->mbcState.mbc7.state;
651 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
652 state->memory.mbc7.address = memory->mbcState.mbc7.address;
653 state->memory.mbc7.access = memory->mbcState.mbc7.access;
654 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
655 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
656 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
657 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
658 break;
659 default:
660 break;
661 }
662}
663
664void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
665 struct GBMemory* memory = &gb->memory;
666 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
667 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
668 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
669 memory->wramCurrentBank = state->memory.wramCurrentBank;
670 memory->sramCurrentBank = state->memory.sramCurrentBank;
671
672 GBMBCSwitchBank(gb, memory->currentBank);
673 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
674 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
675
676 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
677 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
678
679 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
680 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
681
682 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
683 memory->dmaRemaining = state->memory.dmaRemaining;
684 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
685
686 uint32_t when;
687 LOAD_32LE(when, 0, &state->memory.dmaNext);
688 if (memory->dmaRemaining) {
689 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
690 }
691 LOAD_32LE(when, 0, &state->memory.hdmaNext);
692 if (memory->hdmaRemaining) {
693 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
694 }
695
696 GBSerializedMemoryFlags flags;
697 LOAD_16LE(flags, 0, &state->memory.flags);
698 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
699 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
700 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
701 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
702 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
703 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
704
705 switch (memory->mbcType) {
706 case GB_MBC1:
707 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
708 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
709 if (memory->mbcState.mbc1.mode) {
710 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
711 }
712 break;
713 case GB_MBC3_RTC:
714 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
715 break;
716 case GB_MBC7:
717 memory->mbcState.mbc7.state = state->memory.mbc7.state;
718 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
719 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
720 memory->mbcState.mbc7.access = state->memory.mbc7.access;
721 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
722 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
723 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
724 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
725 break;
726 default:
727 break;
728 }
729}
730
731void _pristineCow(struct GB* gb) {
732 if (!gb->isPristine) {
733 return;
734 }
735 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
736 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
737 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
738 if (gb->memory.rom == gb->memory.romBase) {
739 gb->memory.romBase = newRom;
740 }
741 gb->memory.rom = newRom;
742 GBMBCSwitchBank(gb, gb->memory.currentBank);
743 gb->isPristine = false;
744}