all repos — mgba @ fcc8b5c8052ae6bd4b4e7931ceb60461cee69605

mGBA Game Boy Advance Emulator

src/gb/memory.c (view raw)

  1/* Copyright (c) 2013-2016 Jeffrey Pfau
  2 *
  3 * This Source Code Form is subject to the terms of the Mozilla Public
  4 * License, v. 2.0. If a copy of the MPL was not distributed with this
  5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6#include <mgba/internal/gb/memory.h>
  7
  8#include <mgba/core/interface.h>
  9#include <mgba/internal/gb/gb.h>
 10#include <mgba/internal/gb/io.h>
 11#include <mgba/internal/gb/mbc.h>
 12#include <mgba/internal/gb/serialize.h>
 13#include <mgba/internal/lr35902/lr35902.h>
 14
 15#include <mgba-util/memory.h>
 16
 17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
 18
 19static void _pristineCow(struct GB* gba);
 20
 21static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
 22	if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
 23		cpu->memory.setActiveRegion(cpu, address);
 24		return cpu->memory.cpuLoad8(cpu, address);
 25	}
 26	return cpu->memory.activeRegion[address & cpu->memory.activeMask];
 27}
 28
 29static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
 30	struct GB* gb = (struct GB*) cpu->master;
 31	struct GBMemory* memory = &gb->memory;
 32	switch (address >> 12) {
 33	case GB_REGION_CART_BANK0:
 34	case GB_REGION_CART_BANK0 + 1:
 35	case GB_REGION_CART_BANK0 + 2:
 36	case GB_REGION_CART_BANK0 + 3:
 37		cpu->memory.cpuLoad8 = GBFastLoad8;
 38		cpu->memory.activeRegion = memory->romBase;
 39		cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
 40		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 41		break;
 42	case GB_REGION_CART_BANK1:
 43	case GB_REGION_CART_BANK1 + 1:
 44	case GB_REGION_CART_BANK1 + 2:
 45	case GB_REGION_CART_BANK1 + 3:
 46		cpu->memory.cpuLoad8 = GBFastLoad8;
 47		cpu->memory.activeRegion = memory->romBank;
 48		cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 49		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 50		break;
 51	default:
 52		cpu->memory.cpuLoad8 = GBLoad8;
 53		break;
 54	}
 55}
 56
 57static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 58static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 59
 60void GBMemoryInit(struct GB* gb) {
 61	struct LR35902Core* cpu = gb->cpu;
 62	cpu->memory.cpuLoad8 = GBLoad8;
 63	cpu->memory.load8 = GBLoad8;
 64	cpu->memory.store8 = GBStore8;
 65	cpu->memory.currentSegment = GBCurrentSegment;
 66	cpu->memory.setActiveRegion = GBSetActiveRegion;
 67
 68	gb->memory.wram = 0;
 69	gb->memory.wramBank = 0;
 70	gb->memory.rom = 0;
 71	gb->memory.romBank = 0;
 72	gb->memory.romSize = 0;
 73	gb->memory.sram = 0;
 74	gb->memory.mbcType = GB_MBC_AUTODETECT;
 75	gb->memory.mbc = 0;
 76
 77	gb->memory.rtc = NULL;
 78
 79	GBIOInit(gb);
 80}
 81
 82void GBMemoryDeinit(struct GB* gb) {
 83	mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
 84	if (gb->memory.rom) {
 85		mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
 86	}
 87}
 88
 89void GBMemoryReset(struct GB* gb) {
 90	if (gb->memory.wram) {
 91		mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
 92	}
 93	gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
 94	if (gb->model >= GB_MODEL_CGB) {
 95		uint32_t* base = (uint32_t*) gb->memory.wram;
 96		size_t i;
 97		uint32_t pattern = 0;
 98		for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
 99			if ((i & 0x1FF) == 0) {
100				pattern = ~pattern;
101			}
102			base[i + 0] = pattern;
103			base[i + 1] = pattern;
104			base[i + 2] = ~pattern;
105			base[i + 3] = ~pattern;
106		}
107	}
108	GBMemorySwitchWramBank(&gb->memory, 1);
109	gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
110	gb->memory.currentBank = 1;
111	gb->memory.sramCurrentBank = 0;
112
113	gb->memory.ime = false;
114	gb->memory.ie = 0;
115
116	gb->memory.dmaRemaining = 0;
117	gb->memory.dmaSource = 0;
118	gb->memory.dmaDest = 0;
119	gb->memory.hdmaRemaining = 0;
120	gb->memory.hdmaSource = 0;
121	gb->memory.hdmaDest = 0;
122	gb->memory.isHdma = false;
123
124
125	gb->memory.dmaEvent.context = gb;
126	gb->memory.dmaEvent.name = "GB DMA";
127	gb->memory.dmaEvent.callback = _GBMemoryDMAService;
128	gb->memory.dmaEvent.priority = 0x40;
129	gb->memory.hdmaEvent.context = gb;
130	gb->memory.hdmaEvent.name = "GB HDMA";
131	gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
132	gb->memory.hdmaEvent.priority = 0x41;
133
134	gb->memory.sramAccess = false;
135	gb->memory.rtcAccess = false;
136	gb->memory.activeRtcReg = 0;
137	gb->memory.rtcLatched = false;
138	memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
139
140	memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
141	memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
142
143	GBMBCInit(gb);
144	gb->memory.sramBank = gb->memory.sram;
145
146	if (!gb->memory.wram) {
147		GBMemoryDeinit(gb);
148	}
149}
150
151void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
152	bank &= 7;
153	if (!bank) {
154		bank = 1;
155	}
156	memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
157	memory->wramCurrentBank = bank;
158}
159
160uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
161	struct GB* gb = (struct GB*) cpu->master;
162	struct GBMemory* memory = &gb->memory;
163	switch (address >> 12) {
164	case GB_REGION_CART_BANK0:
165	case GB_REGION_CART_BANK0 + 1:
166	case GB_REGION_CART_BANK0 + 2:
167	case GB_REGION_CART_BANK0 + 3:
168		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
169	case GB_REGION_CART_BANK1:
170	case GB_REGION_CART_BANK1 + 1:
171	case GB_REGION_CART_BANK1 + 2:
172	case GB_REGION_CART_BANK1 + 3:
173		return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
174	case GB_REGION_VRAM:
175	case GB_REGION_VRAM + 1:
176		return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
177	case GB_REGION_EXTERNAL_RAM:
178	case GB_REGION_EXTERNAL_RAM + 1:
179		if (memory->rtcAccess) {
180			return memory->rtcRegs[memory->activeRtcReg];
181		} else if (memory->sramAccess) {
182			return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
183		} else if (memory->mbcType == GB_MBC7) {
184			return GBMBC7Read(memory, address);
185		} else if (memory->mbcType == GB_HuC3) {
186			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
187		}
188		return 0xFF;
189	case GB_REGION_WORKING_RAM_BANK0:
190	case GB_REGION_WORKING_RAM_BANK0 + 2:
191		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
192	case GB_REGION_WORKING_RAM_BANK1:
193		return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
194	default:
195		if (address < GB_BASE_OAM) {
196			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
197		}
198		if (address < GB_BASE_UNUSABLE) {
199			if (gb->video.mode < 2) {
200				return gb->video.oam.raw[address & 0xFF];
201			}
202			return 0xFF;
203		}
204		if (address < GB_BASE_IO) {
205			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
206			return 0xFF;
207		}
208		if (address < GB_BASE_HRAM) {
209			return GBIORead(gb, address & (GB_SIZE_IO - 1));
210		}
211		if (address < GB_BASE_IE) {
212			return memory->hram[address & GB_SIZE_HRAM];
213		}
214		return GBIORead(gb, REG_IE);
215	}
216}
217
218void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
219	struct GB* gb = (struct GB*) cpu->master;
220	struct GBMemory* memory = &gb->memory;
221	switch (address >> 12) {
222	case GB_REGION_CART_BANK0:
223	case GB_REGION_CART_BANK0 + 1:
224	case GB_REGION_CART_BANK0 + 2:
225	case GB_REGION_CART_BANK0 + 3:
226	case GB_REGION_CART_BANK1:
227	case GB_REGION_CART_BANK1 + 1:
228	case GB_REGION_CART_BANK1 + 2:
229	case GB_REGION_CART_BANK1 + 3:
230		memory->mbc(gb, address, value);
231		cpu->memory.setActiveRegion(cpu, cpu->pc);
232		return;
233	case GB_REGION_VRAM:
234	case GB_REGION_VRAM + 1:
235		gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
236		gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
237		return;
238	case GB_REGION_EXTERNAL_RAM:
239	case GB_REGION_EXTERNAL_RAM + 1:
240		if (memory->rtcAccess) {
241			memory->rtcRegs[memory->activeRtcReg] = value;
242		} else if (memory->sramAccess) {
243			memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
244		} else if (memory->mbcType == GB_MBC7) {
245			GBMBC7Write(memory, address, value);
246		}
247		gb->sramDirty |= GB_SRAM_DIRT_NEW;
248		return;
249	case GB_REGION_WORKING_RAM_BANK0:
250	case GB_REGION_WORKING_RAM_BANK0 + 2:
251		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
252		return;
253	case GB_REGION_WORKING_RAM_BANK1:
254		memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
255		return;
256	default:
257		if (address < GB_BASE_OAM) {
258			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
259		} else if (address < GB_BASE_UNUSABLE) {
260			if (gb->video.mode < 2) {
261				gb->video.oam.raw[address & 0xFF] = value;
262				gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
263			}
264		} else if (address < GB_BASE_IO) {
265			mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
266		} else if (address < GB_BASE_HRAM) {
267			GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
268		} else if (address < GB_BASE_IE) {
269			memory->hram[address & GB_SIZE_HRAM] = value;
270		} else {
271			GBIOWrite(gb, REG_IE, value);
272		}
273	}
274}
275
276int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
277	struct GB* gb = (struct GB*) cpu->master;
278	struct GBMemory* memory = &gb->memory;
279	switch (address >> 12) {
280	case GB_REGION_CART_BANK0:
281	case GB_REGION_CART_BANK0 + 1:
282	case GB_REGION_CART_BANK0 + 2:
283	case GB_REGION_CART_BANK0 + 3:
284		return 0;
285	case GB_REGION_CART_BANK1:
286	case GB_REGION_CART_BANK1 + 1:
287	case GB_REGION_CART_BANK1 + 2:
288	case GB_REGION_CART_BANK1 + 3:
289		return memory->currentBank;
290	case GB_REGION_VRAM:
291	case GB_REGION_VRAM + 1:
292		return gb->video.vramCurrentBank;
293	case GB_REGION_EXTERNAL_RAM:
294	case GB_REGION_EXTERNAL_RAM + 1:
295		return memory->sramCurrentBank;
296	case GB_REGION_WORKING_RAM_BANK0:
297	case GB_REGION_WORKING_RAM_BANK0 + 2:
298		return 0;
299	case GB_REGION_WORKING_RAM_BANK1:
300		return memory->wramCurrentBank;
301	default:
302		return 0;
303	}
304}
305
306uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
307	struct GB* gb = (struct GB*) cpu->master;
308	struct GBMemory* memory = &gb->memory;
309	switch (address >> 12) {
310	case GB_REGION_CART_BANK0:
311	case GB_REGION_CART_BANK0 + 1:
312	case GB_REGION_CART_BANK0 + 2:
313	case GB_REGION_CART_BANK0 + 3:
314		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
315	case GB_REGION_CART_BANK1:
316	case GB_REGION_CART_BANK1 + 1:
317	case GB_REGION_CART_BANK1 + 2:
318	case GB_REGION_CART_BANK1 + 3:
319		if (segment < 0) {
320			return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
321		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
322			return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
323		} else {
324			return 0xFF;
325		}
326	case GB_REGION_VRAM:
327	case GB_REGION_VRAM + 1:
328		if (segment < 0) {
329			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
330		} else if (segment < 2) {
331			return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
332		} else {
333			return 0xFF;
334		}
335	case GB_REGION_EXTERNAL_RAM:
336	case GB_REGION_EXTERNAL_RAM + 1:
337		if (memory->rtcAccess) {
338			return memory->rtcRegs[memory->activeRtcReg];
339		} else if (memory->sramAccess) {
340			if (segment < 0) {
341				return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
342			} else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
343				return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
344			} else {
345				return 0xFF;
346			}
347		} else if (memory->mbcType == GB_MBC7) {
348			return GBMBC7Read(memory, address);
349		} else if (memory->mbcType == GB_HuC3) {
350			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
351		}
352		return 0xFF;
353	case GB_REGION_WORKING_RAM_BANK0:
354	case GB_REGION_WORKING_RAM_BANK0 + 2:
355		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
356	case GB_REGION_WORKING_RAM_BANK1:
357		if (segment < 0) {
358			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
359		} else if (segment < 8) {
360			return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
361		} else {
362			return 0xFF;
363		}
364	default:
365		if (address < GB_BASE_OAM) {
366			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
367		}
368		if (address < GB_BASE_UNUSABLE) {
369			if (gb->video.mode < 2) {
370				return gb->video.oam.raw[address & 0xFF];
371			}
372			return 0xFF;
373		}
374		if (address < GB_BASE_IO) {
375			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
376			return 0xFF;
377		}
378		if (address < GB_BASE_HRAM) {
379			return GBIORead(gb, address & (GB_SIZE_IO - 1));
380		}
381		if (address < GB_BASE_IE) {
382			return memory->hram[address & GB_SIZE_HRAM];
383		}
384		return GBIORead(gb, REG_IE);
385	}
386}
387
388void GBMemoryDMA(struct GB* gb, uint16_t base) {
389	if (base > 0xF100) {
390		return;
391	}
392	gb->cpu->memory.store8 = GBDMAStore8;
393	gb->cpu->memory.load8 = GBDMALoad8;
394	gb->cpu->memory.cpuLoad8 = GBDMALoad8;
395	mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
396	if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
397		gb->cpu->nextEvent = gb->cpu->cycles + 8;
398	}
399	gb->memory.dmaSource = base;
400	gb->memory.dmaDest = 0;
401	gb->memory.dmaRemaining = 0xA0;
402}
403
404void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
405	gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
406	gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
407	gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
408	gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
409	gb->memory.hdmaSource &= 0xFFF0;
410	if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
411		mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
412		return;
413	}
414	gb->memory.hdmaDest &= 0x1FF0;
415	gb->memory.hdmaDest |= 0x8000;
416	bool wasHdma = gb->memory.isHdma;
417	gb->memory.isHdma = value & 0x80;
418	if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
419		gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
420		gb->cpuBlocked = true;
421		mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
422		gb->cpu->nextEvent = gb->cpu->cycles;
423	}
424}
425
426void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
427	struct GB* gb = context;
428	uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
429	// TODO: Can DMA write OAM during modes 2-3?
430	gb->video.oam.raw[gb->memory.dmaDest] = b;
431	gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
432	++gb->memory.dmaSource;
433	++gb->memory.dmaDest;
434	--gb->memory.dmaRemaining;
435	if (gb->memory.dmaRemaining) {
436		mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
437	} else {
438		gb->cpu->memory.store8 = GBStore8;
439		gb->cpu->memory.load8 = GBLoad8;
440	}
441}
442
443void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
444	struct GB* gb = context;
445	gb->cpuBlocked = true;
446	uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
447	gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
448	++gb->memory.hdmaSource;
449	++gb->memory.hdmaDest;
450	--gb->memory.hdmaRemaining;
451	if (gb->memory.hdmaRemaining) {
452		mTimingDeschedule(timing, &gb->memory.hdmaEvent);
453		mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
454	} else {
455		gb->cpuBlocked = false;
456		gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
457		gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
458		gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
459		gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
460		if (gb->memory.isHdma) {
461			--gb->memory.io[REG_HDMA5];
462			if (gb->memory.io[REG_HDMA5] == 0xFF) {
463				gb->memory.isHdma = false;
464			}
465		} else {
466			gb->memory.io[REG_HDMA5] = 0xFF;
467		}
468	}
469}
470
471struct OAMBlock {
472	uint16_t low;
473	uint16_t high;
474};
475
476static const struct OAMBlock _oamBlockDMG[] = {
477	{ 0xA000, 0xFE00 },
478	{ 0xA000, 0xFE00 },
479	{ 0xA000, 0xFE00 },
480	{ 0xA000, 0xFE00 },
481	{ 0x8000, 0xA000 },
482	{ 0xA000, 0xFE00 },
483	{ 0xA000, 0xFE00 },
484	{ 0xA000, 0xFE00 },
485};
486
487static const struct OAMBlock _oamBlockCGB[] = {
488	{ 0xA000, 0xC000 },
489	{ 0xA000, 0xC000 },
490	{ 0xA000, 0xC000 },
491	{ 0xA000, 0xC000 },
492	{ 0x8000, 0xA000 },
493	{ 0xA000, 0xC000 },
494	{ 0xC000, 0xFE00 },
495	{ 0xA000, 0xC000 },
496};
497
498uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
499	struct GB* gb = (struct GB*) cpu->master;
500	struct GBMemory* memory = &gb->memory;
501	const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
502	block = &block[memory->dmaSource >> 13];
503	if (address >= block->low && address < block->high) {
504		return 0xFF;
505	}
506	if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
507		return 0xFF;
508	}
509	return GBLoad8(cpu, address);
510}
511
512void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
513	struct GB* gb = (struct GB*) cpu->master;
514	struct GBMemory* memory = &gb->memory;
515	const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
516	block = &block[memory->dmaSource >> 13];
517	if (address >= block->low && address < block->high) {
518		return;
519	}
520	if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
521		return;
522	}
523	GBStore8(cpu, address, value);
524}
525
526void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
527	struct GB* gb = (struct GB*) cpu->master;
528	struct GBMemory* memory = &gb->memory;
529	int8_t oldValue = -1;
530
531	switch (address >> 12) {
532	case GB_REGION_CART_BANK0:
533	case GB_REGION_CART_BANK0 + 1:
534	case GB_REGION_CART_BANK0 + 2:
535	case GB_REGION_CART_BANK0 + 3:
536		_pristineCow(gb);
537		oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
538		memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] =  value;
539		break;
540	case GB_REGION_CART_BANK1:
541	case GB_REGION_CART_BANK1 + 1:
542	case GB_REGION_CART_BANK1 + 2:
543	case GB_REGION_CART_BANK1 + 3:
544		_pristineCow(gb);
545		if (segment < 0) {
546			oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
547			memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
548		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
549			oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
550			memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
551		} else {
552			return;
553		}
554		break;
555	case GB_REGION_VRAM:
556	case GB_REGION_VRAM + 1:
557		if (segment < 0) {
558			oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
559			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
560			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
561		} else if (segment < 2) {
562			oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
563			gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
564			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
565		} else {
566			return;
567		}
568		break;
569	case GB_REGION_EXTERNAL_RAM:
570	case GB_REGION_EXTERNAL_RAM + 1:
571		mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
572		return;
573	case GB_REGION_WORKING_RAM_BANK0:
574	case GB_REGION_WORKING_RAM_BANK0 + 2:
575		oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
576		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
577		break;
578	case GB_REGION_WORKING_RAM_BANK1:
579		if (segment < 0) {
580			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
581			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
582		} else if (segment < 8) {
583			oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
584			memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
585		} else {
586			return;
587		}
588		break;
589	default:
590		if (address < GB_BASE_OAM) {
591			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
592			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
593		} else if (address < GB_BASE_UNUSABLE) {
594			oldValue = gb->video.oam.raw[address & 0xFF];
595			gb->video.oam.raw[address & 0xFF] = value;
596			gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
597		} else if (address < GB_BASE_HRAM) {
598			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
599			return;
600		} else if (address < GB_BASE_IE) {
601			oldValue = memory->hram[address & GB_SIZE_HRAM];
602			memory->hram[address & GB_SIZE_HRAM] = value;
603		} else {
604			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
605			return;
606		}
607	}
608	if (old) {
609		*old = oldValue;
610	}
611}
612
613void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
614	const struct GBMemory* memory = &gb->memory;
615	memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
616	memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
617	STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
618	state->memory.wramCurrentBank = memory->wramCurrentBank;
619	state->memory.sramCurrentBank = memory->sramCurrentBank;
620
621	STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
622	STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
623
624	STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
625	STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
626
627	STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
628	state->memory.dmaRemaining = memory->dmaRemaining;
629	memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
630
631	STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
632	STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
633
634	GBSerializedMemoryFlags flags = 0;
635	flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
636	flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
637	flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
638	flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
639	flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
640	flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
641	STORE_16LE(flags, 0, &state->memory.flags);
642}
643
644void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
645	struct GBMemory* memory = &gb->memory;
646	memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
647	memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
648	LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
649	memory->wramCurrentBank = state->memory.wramCurrentBank;
650	memory->sramCurrentBank = state->memory.sramCurrentBank;
651
652	GBMBCSwitchBank(gb, memory->currentBank);
653	GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
654	GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
655
656	LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
657	LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
658
659	LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
660	LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
661
662	LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
663	memory->dmaRemaining = state->memory.dmaRemaining;
664	memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
665
666	uint32_t when;
667	LOAD_32LE(when, 0, &state->memory.dmaNext);
668	if (memory->dmaRemaining) {
669		mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
670	}
671	LOAD_32LE(when, 0, &state->memory.hdmaNext);
672	if (memory->hdmaRemaining) {
673		mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
674	}
675
676	GBSerializedMemoryFlags flags;
677	LOAD_16LE(flags, 0, &state->memory.flags);
678	memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
679	memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
680	memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
681	memory->ime = GBSerializedMemoryFlagsGetIme(flags);
682	memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
683	memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
684}
685
686void _pristineCow(struct GB* gb) {
687	if (!gb->isPristine) {
688		return;
689	}
690	void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
691	memcpy(newRom, gb->memory.rom, gb->memory.romSize);
692	memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
693	if (gb->memory.rom == gb->memory.romBase) {
694		gb->memory.romBase = newRom;
695	}
696	gb->memory.rom = newRom;
697	GBMBCSwitchBank(gb, gb->memory.currentBank);
698}