all repos — mgba @ 362c572009ac74f34eda03b4b4b7b5f93faa1daf

mGBA Game Boy Advance Emulator

src/gb/memory.c (view raw)

  1/* Copyright (c) 2013-2016 Jeffrey Pfau
  2 *
  3 * This Source Code Form is subject to the terms of the Mozilla Public
  4 * License, v. 2.0. If a copy of the MPL was not distributed with this
  5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6#include <mgba/internal/gb/memory.h>
  7
  8#include <mgba/core/interface.h>
  9#include <mgba/internal/gb/gb.h>
 10#include <mgba/internal/gb/io.h>
 11#include <mgba/internal/gb/mbc.h>
 12#include <mgba/internal/gb/serialize.h>
 13#include <mgba/internal/lr35902/lr35902.h>
 14
 15#include <mgba-util/memory.h>
 16
 17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
 18
 19struct OAMBlock {
 20	uint16_t low;
 21	uint16_t high;
 22};
 23
 24static const struct OAMBlock _oamBlockDMG[] = {
 25	{ 0xA000, 0xFE00 },
 26	{ 0xA000, 0xFE00 },
 27	{ 0xA000, 0xFE00 },
 28	{ 0xA000, 0xFE00 },
 29	{ 0x8000, 0xA000 },
 30	{ 0xA000, 0xFE00 },
 31	{ 0xA000, 0xFE00 },
 32	{ 0xA000, 0xFE00 },
 33};
 34
 35static const struct OAMBlock _oamBlockCGB[] = {
 36	{ 0xA000, 0xC000 },
 37	{ 0xA000, 0xC000 },
 38	{ 0xA000, 0xC000 },
 39	{ 0xA000, 0xC000 },
 40	{ 0x8000, 0xA000 },
 41	{ 0xA000, 0xC000 },
 42	{ 0xC000, 0xFE00 },
 43	{ 0xA000, 0xC000 },
 44};
 45
 46static void _pristineCow(struct GB* gba);
 47
 48static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
 49	if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
 50		cpu->memory.setActiveRegion(cpu, address);
 51		return cpu->memory.cpuLoad8(cpu, address);
 52	}
 53	return cpu->memory.activeRegion[address & cpu->memory.activeMask];
 54}
 55
 56static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
 57	struct GB* gb = (struct GB*) cpu->master;
 58	struct GBMemory* memory = &gb->memory;
 59	switch (address >> 12) {
 60	case GB_REGION_CART_BANK0:
 61	case GB_REGION_CART_BANK0 + 1:
 62	case GB_REGION_CART_BANK0 + 2:
 63	case GB_REGION_CART_BANK0 + 3:
 64		cpu->memory.cpuLoad8 = GBFastLoad8;
 65		cpu->memory.activeRegion = memory->romBase;
 66		cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
 67		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 68		break;
 69	case GB_REGION_CART_BANK1:
 70	case GB_REGION_CART_BANK1 + 1:
 71	case GB_REGION_CART_BANK1 + 2:
 72	case GB_REGION_CART_BANK1 + 3:
 73		cpu->memory.cpuLoad8 = GBFastLoad8;
 74		cpu->memory.activeRegion = memory->romBank;
 75		cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 76		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 77		break;
 78	default:
 79		cpu->memory.cpuLoad8 = GBLoad8;
 80		break;
 81	}
 82}
 83
 84static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 85static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 86
 87void GBMemoryInit(struct GB* gb) {
 88	struct LR35902Core* cpu = gb->cpu;
 89	cpu->memory.cpuLoad8 = GBLoad8;
 90	cpu->memory.load8 = GBLoad8;
 91	cpu->memory.store8 = GBStore8;
 92	cpu->memory.currentSegment = GBCurrentSegment;
 93	cpu->memory.setActiveRegion = GBSetActiveRegion;
 94
 95	gb->memory.wram = 0;
 96	gb->memory.wramBank = 0;
 97	gb->memory.rom = 0;
 98	gb->memory.romBank = 0;
 99	gb->memory.romSize = 0;
100	gb->memory.sram = 0;
101	gb->memory.mbcType = GB_MBC_AUTODETECT;
102	gb->memory.mbc = 0;
103
104	gb->memory.rtc = NULL;
105
106	GBIOInit(gb);
107}
108
109void GBMemoryDeinit(struct GB* gb) {
110	mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
111	if (gb->memory.rom) {
112		mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
113	}
114}
115
116void GBMemoryReset(struct GB* gb) {
117	if (gb->memory.wram) {
118		mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
119	}
120	gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
121	if (gb->model >= GB_MODEL_CGB) {
122		uint32_t* base = (uint32_t*) gb->memory.wram;
123		size_t i;
124		uint32_t pattern = 0;
125		for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
126			if ((i & 0x1FF) == 0) {
127				pattern = ~pattern;
128			}
129			base[i + 0] = pattern;
130			base[i + 1] = pattern;
131			base[i + 2] = ~pattern;
132			base[i + 3] = ~pattern;
133		}
134	}
135	GBMemorySwitchWramBank(&gb->memory, 1);
136	gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
137	gb->memory.currentBank = 1;
138	gb->memory.sramCurrentBank = 0;
139
140	gb->memory.ime = false;
141	gb->memory.ie = 0;
142
143	gb->memory.dmaRemaining = 0;
144	gb->memory.dmaSource = 0;
145	gb->memory.dmaDest = 0;
146	gb->memory.hdmaRemaining = 0;
147	gb->memory.hdmaSource = 0;
148	gb->memory.hdmaDest = 0;
149	gb->memory.isHdma = false;
150
151
152	gb->memory.dmaEvent.context = gb;
153	gb->memory.dmaEvent.name = "GB DMA";
154	gb->memory.dmaEvent.callback = _GBMemoryDMAService;
155	gb->memory.dmaEvent.priority = 0x40;
156	gb->memory.hdmaEvent.context = gb;
157	gb->memory.hdmaEvent.name = "GB HDMA";
158	gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
159	gb->memory.hdmaEvent.priority = 0x41;
160
161	gb->memory.sramAccess = false;
162	gb->memory.rtcAccess = false;
163	gb->memory.activeRtcReg = 0;
164	gb->memory.rtcLatched = false;
165	memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
166
167	memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
168	switch (gb->memory.mbcType) {
169	case GB_MBC1:
170		gb->memory.mbcState.mbc1.mode = 0;
171		break;
172	default:
173		memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
174	}
175
176	GBMBCInit(gb);
177	gb->memory.sramBank = gb->memory.sram;
178
179	if (!gb->memory.wram) {
180		GBMemoryDeinit(gb);
181	}
182}
183
184void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
185	bank &= 7;
186	if (!bank) {
187		bank = 1;
188	}
189	memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
190	memory->wramCurrentBank = bank;
191}
192
193uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
194	struct GB* gb = (struct GB*) cpu->master;
195	struct GBMemory* memory = &gb->memory;
196	if (gb->memory.dmaRemaining) {
197		const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
198		block = &block[memory->dmaSource >> 13];
199		if (address >= block->low && address < block->high) {
200			return 0xFF;
201		}
202		if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
203			return 0xFF;
204		}
205	}
206	switch (address >> 12) {
207	case GB_REGION_CART_BANK0:
208	case GB_REGION_CART_BANK0 + 1:
209	case GB_REGION_CART_BANK0 + 2:
210	case GB_REGION_CART_BANK0 + 3:
211		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
212	case GB_REGION_CART_BANK1:
213	case GB_REGION_CART_BANK1 + 1:
214	case GB_REGION_CART_BANK1 + 2:
215	case GB_REGION_CART_BANK1 + 3:
216		return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
217	case GB_REGION_VRAM:
218	case GB_REGION_VRAM + 1:
219		return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
220	case GB_REGION_EXTERNAL_RAM:
221	case GB_REGION_EXTERNAL_RAM + 1:
222		if (memory->rtcAccess) {
223			return memory->rtcRegs[memory->activeRtcReg];
224		} else if (memory->sramAccess) {
225			return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
226		} else if (memory->mbcType == GB_MBC7) {
227			return GBMBC7Read(memory, address);
228		} else if (memory->mbcType == GB_HuC3) {
229			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
230		}
231		return 0xFF;
232	case GB_REGION_WORKING_RAM_BANK0:
233	case GB_REGION_WORKING_RAM_BANK0 + 2:
234		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
235	case GB_REGION_WORKING_RAM_BANK1:
236		return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
237	default:
238		if (address < GB_BASE_OAM) {
239			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
240		}
241		if (address < GB_BASE_UNUSABLE) {
242			if (gb->video.mode < 2) {
243				return gb->video.oam.raw[address & 0xFF];
244			}
245			return 0xFF;
246		}
247		if (address < GB_BASE_IO) {
248			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
249			return 0xFF;
250		}
251		if (address < GB_BASE_HRAM) {
252			return GBIORead(gb, address & (GB_SIZE_IO - 1));
253		}
254		if (address < GB_BASE_IE) {
255			return memory->hram[address & GB_SIZE_HRAM];
256		}
257		return GBIORead(gb, REG_IE);
258	}
259}
260
261void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
262	struct GB* gb = (struct GB*) cpu->master;
263	struct GBMemory* memory = &gb->memory;
264	if (gb->memory.dmaRemaining) {
265		const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
266		block = &block[memory->dmaSource >> 13];
267		if (address >= block->low && address < block->high) {
268			return;
269		}
270		if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
271			return;
272		}
273	}
274	switch (address >> 12) {
275	case GB_REGION_CART_BANK0:
276	case GB_REGION_CART_BANK0 + 1:
277	case GB_REGION_CART_BANK0 + 2:
278	case GB_REGION_CART_BANK0 + 3:
279	case GB_REGION_CART_BANK1:
280	case GB_REGION_CART_BANK1 + 1:
281	case GB_REGION_CART_BANK1 + 2:
282	case GB_REGION_CART_BANK1 + 3:
283		memory->mbc(gb, address, value);
284		cpu->memory.setActiveRegion(cpu, cpu->pc);
285		return;
286	case GB_REGION_VRAM:
287	case GB_REGION_VRAM + 1:
288		gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
289		gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
290		return;
291	case GB_REGION_EXTERNAL_RAM:
292	case GB_REGION_EXTERNAL_RAM + 1:
293		if (memory->rtcAccess) {
294			memory->rtcRegs[memory->activeRtcReg] = value;
295		} else if (memory->sramAccess) {
296			memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
297		} else if (memory->mbcType == GB_MBC7) {
298			GBMBC7Write(memory, address, value);
299		}
300		gb->sramDirty |= GB_SRAM_DIRT_NEW;
301		return;
302	case GB_REGION_WORKING_RAM_BANK0:
303	case GB_REGION_WORKING_RAM_BANK0 + 2:
304		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
305		return;
306	case GB_REGION_WORKING_RAM_BANK1:
307		memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
308		return;
309	default:
310		if (address < GB_BASE_OAM) {
311			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
312		} else if (address < GB_BASE_UNUSABLE) {
313			if (gb->video.mode < 2) {
314				gb->video.oam.raw[address & 0xFF] = value;
315				gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
316			}
317		} else if (address < GB_BASE_IO) {
318			mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
319		} else if (address < GB_BASE_HRAM) {
320			GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
321		} else if (address < GB_BASE_IE) {
322			memory->hram[address & GB_SIZE_HRAM] = value;
323		} else {
324			GBIOWrite(gb, REG_IE, value);
325		}
326	}
327}
328
329int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
330	struct GB* gb = (struct GB*) cpu->master;
331	struct GBMemory* memory = &gb->memory;
332	switch (address >> 12) {
333	case GB_REGION_CART_BANK0:
334	case GB_REGION_CART_BANK0 + 1:
335	case GB_REGION_CART_BANK0 + 2:
336	case GB_REGION_CART_BANK0 + 3:
337		return 0;
338	case GB_REGION_CART_BANK1:
339	case GB_REGION_CART_BANK1 + 1:
340	case GB_REGION_CART_BANK1 + 2:
341	case GB_REGION_CART_BANK1 + 3:
342		return memory->currentBank;
343	case GB_REGION_VRAM:
344	case GB_REGION_VRAM + 1:
345		return gb->video.vramCurrentBank;
346	case GB_REGION_EXTERNAL_RAM:
347	case GB_REGION_EXTERNAL_RAM + 1:
348		return memory->sramCurrentBank;
349	case GB_REGION_WORKING_RAM_BANK0:
350	case GB_REGION_WORKING_RAM_BANK0 + 2:
351		return 0;
352	case GB_REGION_WORKING_RAM_BANK1:
353		return memory->wramCurrentBank;
354	default:
355		return 0;
356	}
357}
358
359uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
360	struct GB* gb = (struct GB*) cpu->master;
361	struct GBMemory* memory = &gb->memory;
362	switch (address >> 12) {
363	case GB_REGION_CART_BANK0:
364	case GB_REGION_CART_BANK0 + 1:
365	case GB_REGION_CART_BANK0 + 2:
366	case GB_REGION_CART_BANK0 + 3:
367		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
368	case GB_REGION_CART_BANK1:
369	case GB_REGION_CART_BANK1 + 1:
370	case GB_REGION_CART_BANK1 + 2:
371	case GB_REGION_CART_BANK1 + 3:
372		if (segment < 0) {
373			return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
374		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
375			return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
376		} else {
377			return 0xFF;
378		}
379	case GB_REGION_VRAM:
380	case GB_REGION_VRAM + 1:
381		if (segment < 0) {
382			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
383		} else if (segment < 2) {
384			return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
385		} else {
386			return 0xFF;
387		}
388	case GB_REGION_EXTERNAL_RAM:
389	case GB_REGION_EXTERNAL_RAM + 1:
390		if (memory->rtcAccess) {
391			return memory->rtcRegs[memory->activeRtcReg];
392		} else if (memory->sramAccess) {
393			if (segment < 0) {
394				return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
395			} else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
396				return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
397			} else {
398				return 0xFF;
399			}
400		} else if (memory->mbcType == GB_MBC7) {
401			return GBMBC7Read(memory, address);
402		} else if (memory->mbcType == GB_HuC3) {
403			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
404		}
405		return 0xFF;
406	case GB_REGION_WORKING_RAM_BANK0:
407	case GB_REGION_WORKING_RAM_BANK0 + 2:
408		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
409	case GB_REGION_WORKING_RAM_BANK1:
410		if (segment < 0) {
411			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
412		} else if (segment < 8) {
413			return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
414		} else {
415			return 0xFF;
416		}
417	default:
418		if (address < GB_BASE_OAM) {
419			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
420		}
421		if (address < GB_BASE_UNUSABLE) {
422			if (gb->video.mode < 2) {
423				return gb->video.oam.raw[address & 0xFF];
424			}
425			return 0xFF;
426		}
427		if (address < GB_BASE_IO) {
428			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
429			return 0xFF;
430		}
431		if (address < GB_BASE_HRAM) {
432			return GBIORead(gb, address & (GB_SIZE_IO - 1));
433		}
434		if (address < GB_BASE_IE) {
435			return memory->hram[address & GB_SIZE_HRAM];
436		}
437		return GBIORead(gb, REG_IE);
438	}
439}
440
441void GBMemoryDMA(struct GB* gb, uint16_t base) {
442	if (base > 0xF100) {
443		return;
444	}
445	mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
446	if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
447		gb->cpu->nextEvent = gb->cpu->cycles + 8;
448	}
449	gb->memory.dmaSource = base;
450	gb->memory.dmaDest = 0;
451	gb->memory.dmaRemaining = 0xA0;
452}
453
454void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
455	gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
456	gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
457	gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
458	gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
459	gb->memory.hdmaSource &= 0xFFF0;
460	if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
461		mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
462		return;
463	}
464	gb->memory.hdmaDest &= 0x1FF0;
465	gb->memory.hdmaDest |= 0x8000;
466	bool wasHdma = gb->memory.isHdma;
467	gb->memory.isHdma = value & 0x80;
468	if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
469		gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
470		gb->cpuBlocked = true;
471		mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
472		gb->cpu->nextEvent = gb->cpu->cycles;
473	}
474}
475
476void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
477	struct GB* gb = context;
478	int dmaRemaining = gb->memory.dmaRemaining;
479	gb->memory.dmaRemaining = 0;
480	uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
481	// TODO: Can DMA write OAM during modes 2-3?
482	gb->video.oam.raw[gb->memory.dmaDest] = b;
483	gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
484	++gb->memory.dmaSource;
485	++gb->memory.dmaDest;
486	gb->memory.dmaRemaining = dmaRemaining - 1;
487	if (gb->memory.dmaRemaining) {
488		mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
489	}
490}
491
492void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
493	struct GB* gb = context;
494	gb->cpuBlocked = true;
495	uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
496	gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
497	++gb->memory.hdmaSource;
498	++gb->memory.hdmaDest;
499	--gb->memory.hdmaRemaining;
500	if (gb->memory.hdmaRemaining) {
501		mTimingDeschedule(timing, &gb->memory.hdmaEvent);
502		mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
503	} else {
504		gb->cpuBlocked = false;
505		gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
506		gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
507		gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
508		gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
509		if (gb->memory.isHdma) {
510			--gb->memory.io[REG_HDMA5];
511			if (gb->memory.io[REG_HDMA5] == 0xFF) {
512				gb->memory.isHdma = false;
513			}
514		} else {
515			gb->memory.io[REG_HDMA5] = 0xFF;
516		}
517	}
518}
519
520void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
521	struct GB* gb = (struct GB*) cpu->master;
522	struct GBMemory* memory = &gb->memory;
523	int8_t oldValue = -1;
524
525	switch (address >> 12) {
526	case GB_REGION_CART_BANK0:
527	case GB_REGION_CART_BANK0 + 1:
528	case GB_REGION_CART_BANK0 + 2:
529	case GB_REGION_CART_BANK0 + 3:
530		_pristineCow(gb);
531		oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
532		memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] =  value;
533		break;
534	case GB_REGION_CART_BANK1:
535	case GB_REGION_CART_BANK1 + 1:
536	case GB_REGION_CART_BANK1 + 2:
537	case GB_REGION_CART_BANK1 + 3:
538		_pristineCow(gb);
539		if (segment < 0) {
540			oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
541			memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
542		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
543			oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
544			memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
545		} else {
546			return;
547		}
548		break;
549	case GB_REGION_VRAM:
550	case GB_REGION_VRAM + 1:
551		if (segment < 0) {
552			oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
553			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
554			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
555		} else if (segment < 2) {
556			oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
557			gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
558			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
559		} else {
560			return;
561		}
562		break;
563	case GB_REGION_EXTERNAL_RAM:
564	case GB_REGION_EXTERNAL_RAM + 1:
565		mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
566		return;
567	case GB_REGION_WORKING_RAM_BANK0:
568	case GB_REGION_WORKING_RAM_BANK0 + 2:
569		oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
570		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
571		break;
572	case GB_REGION_WORKING_RAM_BANK1:
573		if (segment < 0) {
574			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
575			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
576		} else if (segment < 8) {
577			oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
578			memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
579		} else {
580			return;
581		}
582		break;
583	default:
584		if (address < GB_BASE_OAM) {
585			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
586			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
587		} else if (address < GB_BASE_UNUSABLE) {
588			oldValue = gb->video.oam.raw[address & 0xFF];
589			gb->video.oam.raw[address & 0xFF] = value;
590			gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
591		} else if (address < GB_BASE_HRAM) {
592			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
593			return;
594		} else if (address < GB_BASE_IE) {
595			oldValue = memory->hram[address & GB_SIZE_HRAM];
596			memory->hram[address & GB_SIZE_HRAM] = value;
597		} else {
598			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
599			return;
600		}
601	}
602	if (old) {
603		*old = oldValue;
604	}
605}
606
607void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
608	const struct GBMemory* memory = &gb->memory;
609	memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
610	memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
611	STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
612	state->memory.wramCurrentBank = memory->wramCurrentBank;
613	state->memory.sramCurrentBank = memory->sramCurrentBank;
614
615	STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
616	STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
617
618	STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
619	STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
620
621	STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
622	state->memory.dmaRemaining = memory->dmaRemaining;
623	memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
624
625	STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
626	STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
627
628	GBSerializedMemoryFlags flags = 0;
629	flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
630	flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
631	flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
632	flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
633	flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
634	flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
635	STORE_16LE(flags, 0, &state->memory.flags);
636}
637
638void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
639	struct GBMemory* memory = &gb->memory;
640	memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
641	memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
642	LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
643	memory->wramCurrentBank = state->memory.wramCurrentBank;
644	memory->sramCurrentBank = state->memory.sramCurrentBank;
645
646	GBMBCSwitchBank(gb, memory->currentBank);
647	GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
648	GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
649
650	LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
651	LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
652
653	LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
654	LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
655
656	LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
657	memory->dmaRemaining = state->memory.dmaRemaining;
658	memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
659
660	uint32_t when;
661	LOAD_32LE(when, 0, &state->memory.dmaNext);
662	if (memory->dmaRemaining) {
663		mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
664	}
665	LOAD_32LE(when, 0, &state->memory.hdmaNext);
666	if (memory->hdmaRemaining) {
667		mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
668	}
669
670	GBSerializedMemoryFlags flags;
671	LOAD_16LE(flags, 0, &state->memory.flags);
672	memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
673	memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
674	memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
675	memory->ime = GBSerializedMemoryFlagsGetIme(flags);
676	memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
677	memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
678}
679
680void _pristineCow(struct GB* gb) {
681	if (!gb->isPristine) {
682		return;
683	}
684	void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
685	memcpy(newRom, gb->memory.rom, gb->memory.romSize);
686	memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
687	if (gb->memory.rom == gb->memory.romBase) {
688		gb->memory.romBase = newRom;
689	}
690	gb->memory.rom = newRom;
691	GBMBCSwitchBank(gb, gb->memory.currentBank);
692	gb->isPristine = false;
693}