all repos — mgba @ 3ceadd4ccdb70574bc09d1a1270ab5d9341de776

mGBA Game Boy Advance Emulator

src/gb/memory.c (view raw)

  1/* Copyright (c) 2013-2016 Jeffrey Pfau
  2 *
  3 * This Source Code Form is subject to the terms of the Mozilla Public
  4 * License, v. 2.0. If a copy of the MPL was not distributed with this
  5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6#include "memory.h"
  7
  8#include "core/interface.h"
  9#include "gb/gb.h"
 10#include "gb/io.h"
 11#include "gb/mbc.h"
 12#include "gb/serialize.h"
 13
 14#include "util/memory.h"
 15
 16mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
 17
 18static void _pristineCow(struct GB* gba);
 19
 20static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
 21	if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
 22		cpu->memory.setActiveRegion(cpu, address);
 23		return cpu->memory.cpuLoad8(cpu, address);
 24	}
 25	return cpu->memory.activeRegion[address & cpu->memory.activeMask];
 26}
 27
 28static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
 29	struct GB* gb = (struct GB*) cpu->master;
 30	struct GBMemory* memory = &gb->memory;
 31	switch (address >> 12) {
 32	case GB_REGION_CART_BANK0:
 33	case GB_REGION_CART_BANK0 + 1:
 34	case GB_REGION_CART_BANK0 + 2:
 35	case GB_REGION_CART_BANK0 + 3:
 36		cpu->memory.cpuLoad8 = GBFastLoad8;
 37		cpu->memory.activeRegion = memory->romBase;
 38		cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
 39		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 40		break;
 41	case GB_REGION_CART_BANK1:
 42	case GB_REGION_CART_BANK1 + 1:
 43	case GB_REGION_CART_BANK1 + 2:
 44	case GB_REGION_CART_BANK1 + 3:
 45		cpu->memory.cpuLoad8 = GBFastLoad8;
 46		cpu->memory.activeRegion = memory->romBank;
 47		cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 48		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 49		break;
 50	default:
 51		cpu->memory.cpuLoad8 = GBLoad8;
 52		break;
 53	}
 54}
 55
 56static void _GBMemoryDMAService(struct GB* gb);
 57static void _GBMemoryHDMAService(struct GB* gb);
 58
 59void GBMemoryInit(struct GB* gb) {
 60	struct LR35902Core* cpu = gb->cpu;
 61	cpu->memory.cpuLoad8 = GBLoad8;
 62	cpu->memory.load8 = GBLoad8;
 63	cpu->memory.store8 = GBStore8;
 64	cpu->memory.setActiveRegion = GBSetActiveRegion;
 65
 66	gb->memory.wram = 0;
 67	gb->memory.wramBank = 0;
 68	gb->memory.rom = 0;
 69	gb->memory.romBank = 0;
 70	gb->memory.romSize = 0;
 71	gb->memory.sram = 0;
 72	gb->memory.mbcType = GB_MBC_AUTODETECT;
 73	gb->memory.mbc = 0;
 74
 75	gb->memory.rtc = NULL;
 76
 77	GBIOInit(gb);
 78}
 79
 80void GBMemoryDeinit(struct GB* gb) {
 81	mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
 82	if (gb->memory.rom) {
 83		mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
 84	}
 85}
 86
 87void GBMemoryReset(struct GB* gb) {
 88	if (gb->memory.wram) {
 89		mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
 90	}
 91	gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
 92	if (gb->model >= GB_MODEL_CGB) {
 93		uint32_t* base = (uint32_t*) gb->memory.wram;
 94		size_t i;
 95		uint32_t pattern = 0;
 96		for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
 97			if ((i & 0x1FF) == 0) {
 98				pattern = ~pattern;
 99			}
100			base[i + 0] = pattern;
101			base[i + 1] = pattern;
102			base[i + 2] = ~pattern;
103			base[i + 3] = ~pattern;
104		}
105	}
106	GBMemorySwitchWramBank(&gb->memory, 1);
107	gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
108	gb->memory.currentBank = 1;
109	gb->memory.sramCurrentBank = 0;
110
111	gb->memory.ime = false;
112	gb->memory.ie = 0;
113
114	gb->memory.dmaNext = INT_MAX;
115	gb->memory.dmaRemaining = 0;
116	gb->memory.dmaSource = 0;
117	gb->memory.dmaDest = 0;
118	gb->memory.hdmaNext = INT_MAX;
119	gb->memory.hdmaRemaining = 0;
120	gb->memory.hdmaSource = 0;
121	gb->memory.hdmaDest = 0;
122	gb->memory.isHdma = false;
123
124	gb->memory.sramAccess = false;
125	gb->memory.rtcAccess = false;
126	gb->memory.activeRtcReg = 0;
127	gb->memory.rtcLatched = false;
128	memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
129
130	memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
131	memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
132
133	GBMBCInit(gb);
134	gb->memory.sramBank = gb->memory.sram;
135
136	if (!gb->memory.wram) {
137		GBMemoryDeinit(gb);
138	}
139}
140
141void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
142	bank &= 7;
143	if (!bank) {
144		bank = 1;
145	}
146	memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
147	memory->wramCurrentBank = bank;
148}
149
150uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
151	struct GB* gb = (struct GB*) cpu->master;
152	struct GBMemory* memory = &gb->memory;
153	switch (address >> 12) {
154	case GB_REGION_CART_BANK0:
155	case GB_REGION_CART_BANK0 + 1:
156	case GB_REGION_CART_BANK0 + 2:
157	case GB_REGION_CART_BANK0 + 3:
158		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
159	case GB_REGION_CART_BANK1:
160	case GB_REGION_CART_BANK1 + 1:
161	case GB_REGION_CART_BANK1 + 2:
162	case GB_REGION_CART_BANK1 + 3:
163		return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
164	case GB_REGION_VRAM:
165	case GB_REGION_VRAM + 1:
166		return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
167	case GB_REGION_EXTERNAL_RAM:
168	case GB_REGION_EXTERNAL_RAM + 1:
169		if (memory->rtcAccess) {
170			return memory->rtcRegs[memory->activeRtcReg];
171		} else if (memory->sramAccess) {
172			return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
173		} else if (memory->mbcType == GB_MBC7) {
174			return GBMBC7Read(memory, address);
175		} else if (memory->mbcType == GB_HuC3) {
176			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
177		}
178		return 0xFF;
179	case GB_REGION_WORKING_RAM_BANK0:
180	case GB_REGION_WORKING_RAM_BANK0 + 2:
181		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
182	case GB_REGION_WORKING_RAM_BANK1:
183		return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
184	default:
185		if (address < GB_BASE_OAM) {
186			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
187		}
188		if (address < GB_BASE_UNUSABLE) {
189			if (gb->video.mode < 2) {
190				return gb->video.oam.raw[address & 0xFF];
191			}
192			return 0xFF;
193		}
194		if (address < GB_BASE_IO) {
195			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
196			return 0xFF;
197		}
198		if (address < GB_BASE_HRAM) {
199			return GBIORead(gb, address & (GB_SIZE_IO - 1));
200		}
201		if (address < GB_BASE_IE) {
202			return memory->hram[address & GB_SIZE_HRAM];
203		}
204		return GBIORead(gb, REG_IE);
205	}
206}
207
208void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
209	struct GB* gb = (struct GB*) cpu->master;
210	struct GBMemory* memory = &gb->memory;
211	switch (address >> 12) {
212	case GB_REGION_CART_BANK0:
213	case GB_REGION_CART_BANK0 + 1:
214	case GB_REGION_CART_BANK0 + 2:
215	case GB_REGION_CART_BANK0 + 3:
216	case GB_REGION_CART_BANK1:
217	case GB_REGION_CART_BANK1 + 1:
218	case GB_REGION_CART_BANK1 + 2:
219	case GB_REGION_CART_BANK1 + 3:
220		memory->mbc(gb, address, value);
221		cpu->memory.setActiveRegion(cpu, cpu->pc);
222		return;
223	case GB_REGION_VRAM:
224	case GB_REGION_VRAM + 1:
225		// TODO: Block access in wrong modes
226		gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
227		return;
228	case GB_REGION_EXTERNAL_RAM:
229	case GB_REGION_EXTERNAL_RAM + 1:
230		if (memory->rtcAccess) {
231			memory->rtcRegs[memory->activeRtcReg] = value;
232		} else if (memory->sramAccess) {
233			memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
234		} else if (memory->mbcType == GB_MBC7) {
235			GBMBC7Write(memory, address, value);
236		}
237		gb->sramDirty |= GB_SRAM_DIRT_NEW;
238		return;
239	case GB_REGION_WORKING_RAM_BANK0:
240	case GB_REGION_WORKING_RAM_BANK0 + 2:
241		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
242		return;
243	case GB_REGION_WORKING_RAM_BANK1:
244		memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
245		return;
246	default:
247		if (address < GB_BASE_OAM) {
248			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
249		} else if (address < GB_BASE_UNUSABLE) {
250			if (gb->video.mode < 2) {
251				gb->video.oam.raw[address & 0xFF] = value;
252			}
253		} else if (address < GB_BASE_IO) {
254			mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
255		} else if (address < GB_BASE_HRAM) {
256			GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
257		} else if (address < GB_BASE_IE) {
258			memory->hram[address & GB_SIZE_HRAM] = value;
259		} else {
260			GBIOWrite(gb, REG_IE, value);
261		}
262	}
263}
264uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
265	struct GB* gb = (struct GB*) cpu->master;
266	struct GBMemory* memory = &gb->memory;
267	switch (address >> 12) {
268	case GB_REGION_CART_BANK0:
269	case GB_REGION_CART_BANK0 + 1:
270	case GB_REGION_CART_BANK0 + 2:
271	case GB_REGION_CART_BANK0 + 3:
272		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
273	case GB_REGION_CART_BANK1:
274	case GB_REGION_CART_BANK1 + 1:
275	case GB_REGION_CART_BANK1 + 2:
276	case GB_REGION_CART_BANK1 + 3:
277		if (segment < 0) {
278			return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
279		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
280			return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
281		} else {
282			return 0xFF;
283		}
284	case GB_REGION_VRAM:
285	case GB_REGION_VRAM + 1:
286		if (segment < 0) {
287			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
288		} else if (segment < 2) {
289			return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
290		} else {
291			return 0xFF;
292		}
293	case GB_REGION_EXTERNAL_RAM:
294	case GB_REGION_EXTERNAL_RAM + 1:
295		if (memory->rtcAccess) {
296			return memory->rtcRegs[memory->activeRtcReg];
297		} else if (memory->sramAccess) {
298			if (segment < 0) {
299				return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
300			} else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
301				return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
302			} else {
303				return 0xFF;
304			}
305		} else if (memory->mbcType == GB_MBC7) {
306			return GBMBC7Read(memory, address);
307		} else if (memory->mbcType == GB_HuC3) {
308			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
309		}
310		return 0xFF;
311	case GB_REGION_WORKING_RAM_BANK0:
312	case GB_REGION_WORKING_RAM_BANK0 + 2:
313		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
314	case GB_REGION_WORKING_RAM_BANK1:
315		if (segment < 0) {
316			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
317		} else if (segment < 8) {
318			return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
319		} else {
320			return 0xFF;
321		}
322	default:
323		if (address < GB_BASE_OAM) {
324			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
325		}
326		if (address < GB_BASE_UNUSABLE) {
327			if (gb->video.mode < 2) {
328				return gb->video.oam.raw[address & 0xFF];
329			}
330			return 0xFF;
331		}
332		if (address < GB_BASE_IO) {
333			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
334			return 0xFF;
335		}
336		if (address < GB_BASE_HRAM) {
337			return GBIORead(gb, address & (GB_SIZE_IO - 1));
338		}
339		if (address < GB_BASE_IE) {
340			return memory->hram[address & GB_SIZE_HRAM];
341		}
342		return GBIORead(gb, REG_IE);
343	}
344}
345
346int32_t GBMemoryProcessEvents(struct GB* gb, int32_t cycles) {
347	int nextEvent = INT_MAX;
348	if (gb->memory.dmaRemaining) {
349		gb->memory.dmaNext -= cycles;
350		if (gb->memory.dmaNext <= 0) {
351			_GBMemoryDMAService(gb);
352		}
353		nextEvent = gb->memory.dmaNext;
354	}
355	if (gb->memory.hdmaRemaining) {
356		gb->memory.hdmaNext -= cycles;
357		if (gb->memory.hdmaNext <= 0) {
358			_GBMemoryHDMAService(gb);
359		}
360		if (gb->memory.hdmaNext < nextEvent) {
361			nextEvent = gb->memory.hdmaNext;
362		}
363	}
364	return nextEvent;
365}
366
367void GBMemoryDMA(struct GB* gb, uint16_t base) {
368	if (base > 0xF100) {
369		return;
370	}
371	gb->cpu->memory.store8 = GBDMAStore8;
372	gb->cpu->memory.load8 = GBDMALoad8;
373	gb->cpu->memory.cpuLoad8 = GBDMALoad8;
374	gb->memory.dmaNext = gb->cpu->cycles + 8;
375	if (gb->memory.dmaNext < gb->cpu->nextEvent) {
376		gb->cpu->nextEvent = gb->memory.dmaNext;
377	}
378	gb->memory.dmaSource = base;
379	gb->memory.dmaDest = 0;
380	gb->memory.dmaRemaining = 0xA0;
381}
382
383void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
384	gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
385	gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
386	gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
387	gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
388	gb->memory.hdmaSource &= 0xFFF0;
389	if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
390		mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
391		return;
392	}
393	gb->memory.hdmaDest &= 0x1FF0;
394	gb->memory.hdmaDest |= 0x8000;
395	bool wasHdma = gb->memory.isHdma;
396	gb->memory.isHdma = value & 0x80;
397	if (!wasHdma && !gb->memory.isHdma) {
398		gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
399		gb->memory.hdmaNext = gb->cpu->cycles;
400		gb->cpu->nextEvent = gb->cpu->cycles;
401	}
402}
403
404void _GBMemoryDMAService(struct GB* gb) {
405	uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
406	// TODO: Can DMA write OAM during modes 2-3?
407	gb->video.oam.raw[gb->memory.dmaDest] = b;
408	++gb->memory.dmaSource;
409	++gb->memory.dmaDest;
410	--gb->memory.dmaRemaining;
411	if (gb->memory.dmaRemaining) {
412		gb->memory.dmaNext += 4;
413	} else {
414		gb->memory.dmaNext = INT_MAX;
415		gb->cpu->memory.store8 = GBStore8;
416		gb->cpu->memory.load8 = GBLoad8;
417	}
418}
419
420void _GBMemoryHDMAService(struct GB* gb) {
421	uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
422	gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
423	++gb->memory.hdmaSource;
424	++gb->memory.hdmaDest;
425	--gb->memory.hdmaRemaining;
426	gb->cpu->cycles += 2;
427	if (gb->memory.hdmaRemaining) {
428		gb->memory.hdmaNext += 2;
429	} else {
430		gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
431		gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
432		gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
433		gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
434		if (gb->memory.isHdma) {
435			--gb->memory.io[REG_HDMA5];
436			if (gb->memory.io[REG_HDMA5] == 0xFF) {
437				gb->memory.isHdma = false;
438			}
439		} else {
440			gb->memory.io[REG_HDMA5] |= 0x80;
441		}
442	}
443}
444
445struct OAMBlock {
446	uint16_t low;
447	uint16_t high;
448};
449
450static const struct OAMBlock _oamBlockDMG[] = {
451	{ 0xA000, 0xFE00 },
452	{ 0xA000, 0xFE00 },
453	{ 0xA000, 0xFE00 },
454	{ 0xA000, 0xFE00 },
455	{ 0x8000, 0xA000 },
456	{ 0xA000, 0xFE00 },
457	{ 0xA000, 0xFE00 },
458	{ 0xA000, 0xFE00 },
459};
460
461static const struct OAMBlock _oamBlockCGB[] = {
462	{ 0xA000, 0xC000 },
463	{ 0xA000, 0xC000 },
464	{ 0xA000, 0xC000 },
465	{ 0xA000, 0xC000 },
466	{ 0x8000, 0xA000 },
467	{ 0xA000, 0xC000 },
468	{ 0xC000, 0xFE00 },
469	{ 0xA000, 0xC000 },
470};
471
472uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
473	struct GB* gb = (struct GB*) cpu->master;
474	struct GBMemory* memory = &gb->memory;
475	const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
476	block = &block[memory->dmaSource >> 13];
477	if (address >= block->low && address < block->high) {
478		return 0xFF;
479	}
480	if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
481		return 0xFF;
482	}
483	return GBLoad8(cpu, address);
484}
485
486void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
487	struct GB* gb = (struct GB*) cpu->master;
488	struct GBMemory* memory = &gb->memory;
489	const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
490	block = &block[memory->dmaSource >> 13];
491	if (address >= block->low && address < block->high) {
492		return;
493	}
494	if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
495		return;
496	}
497	GBStore8(cpu, address, value);
498}
499
500void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
501	struct GB* gb = (struct GB*) cpu->master;
502	struct GBMemory* memory = &gb->memory;
503	int8_t oldValue = -1;
504
505	switch (address >> 12) {
506	case GB_REGION_CART_BANK0:
507	case GB_REGION_CART_BANK0 + 1:
508	case GB_REGION_CART_BANK0 + 2:
509	case GB_REGION_CART_BANK0 + 3:
510		_pristineCow(gb);
511		oldValue = memory->rom[address & (GB_SIZE_CART_BANK0 - 1)];
512		memory->rom[address & (GB_SIZE_CART_BANK0 - 1)] =  value;
513		break;
514	case GB_REGION_CART_BANK1:
515	case GB_REGION_CART_BANK1 + 1:
516	case GB_REGION_CART_BANK1 + 2:
517	case GB_REGION_CART_BANK1 + 3:
518		_pristineCow(gb);
519		if (segment < 0) {
520			oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
521			memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
522		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
523			oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
524			memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
525		} else {
526			return;
527		}
528		break;
529	case GB_REGION_VRAM:
530	case GB_REGION_VRAM + 1:
531		if (segment < 0) {
532			oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
533			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
534		} else if (segment < 2) {
535			oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
536			gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
537		} else {
538			return;
539		}
540		break;
541	case GB_REGION_EXTERNAL_RAM:
542	case GB_REGION_EXTERNAL_RAM + 1:
543		mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
544		return;
545	case GB_REGION_WORKING_RAM_BANK0:
546	case GB_REGION_WORKING_RAM_BANK0 + 2:
547		oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
548		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
549		break;
550	case GB_REGION_WORKING_RAM_BANK1:
551		if (segment < 0) {
552			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
553			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
554		} else if (segment < 8) {
555			oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
556			memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
557		} else {
558			return;
559		}
560		break;
561	default:
562		if (address < GB_BASE_OAM) {
563			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
564			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
565		} else if (address < GB_BASE_UNUSABLE) {
566			oldValue = gb->video.oam.raw[address & 0xFF];
567			gb->video.oam.raw[address & 0xFF] = value;
568		} else if (address < GB_BASE_HRAM) {
569			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
570			return;
571		} else if (address < GB_BASE_IE) {
572			oldValue = memory->hram[address & GB_SIZE_HRAM];
573			memory->hram[address & GB_SIZE_HRAM] = value;
574		} else {
575			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
576			return;
577		}
578	}
579	if (old) {
580		*old = oldValue;
581	}
582}
583
584void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
585	const struct GBMemory* memory = &gb->memory;
586	memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
587	memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
588	STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
589	state->memory.wramCurrentBank = memory->wramCurrentBank;
590	state->memory.sramCurrentBank = memory->sramCurrentBank;
591
592	STORE_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
593	STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
594	STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
595
596	STORE_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
597	STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
598	STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
599
600	STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
601	state->memory.dmaRemaining = memory->dmaRemaining;
602	memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
603
604	GBSerializedMemoryFlags flags = 0;
605	flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
606	flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
607	flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
608	flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
609	flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
610	flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
611	STORE_16LE(flags, 0, &state->memory.flags);
612}
613
614void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
615	struct GBMemory* memory = &gb->memory;
616	memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
617	memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
618	LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
619	memory->wramCurrentBank = state->memory.wramCurrentBank;
620	memory->sramCurrentBank = state->memory.sramCurrentBank;
621
622	GBMBCSwitchBank(memory, memory->currentBank);
623	GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
624	GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
625
626	LOAD_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
627	LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
628	LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
629
630	LOAD_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
631	LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
632	LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
633
634	LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
635	memory->dmaRemaining = state->memory.dmaRemaining;
636	memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
637
638	GBSerializedMemoryFlags flags;
639	LOAD_16LE(flags, 0, &state->memory.flags);
640	memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
641	memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
642	memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
643	memory->ime = GBSerializedMemoryFlagsGetIme(flags);
644	memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
645	memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
646}
647
648void _pristineCow(struct GB* gb) {
649	if (gb->memory.rom != gb->pristineRom) {
650		return;
651	}
652	gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
653	memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
654	memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
655	GBMBCSwitchBank(&gb->memory, gb->memory.currentBank);
656}