all repos — mgba @ 575adcaf4cb76a7e9537e6b4d41f017a924dbdf6

mGBA Game Boy Advance Emulator

src/gb/memory.c (view raw)

  1/* Copyright (c) 2013-2016 Jeffrey Pfau
  2 *
  3 * This Source Code Form is subject to the terms of the Mozilla Public
  4 * License, v. 2.0. If a copy of the MPL was not distributed with this
  5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6#include <mgba/internal/gb/memory.h>
  7
  8#include <mgba/core/interface.h>
  9#include <mgba/internal/gb/gb.h>
 10#include <mgba/internal/gb/io.h>
 11#include <mgba/internal/gb/mbc.h>
 12#include <mgba/internal/gb/serialize.h>
 13#include <mgba/internal/lr35902/lr35902.h>
 14
 15#include <mgba-util/memory.h>
 16
 17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
 18
 19enum GBBus {
 20	GB_BUS_CPU,
 21	GB_BUS_MAIN,
 22	GB_BUS_VRAM,
 23	GB_BUS_RAM
 24};
 25
 26static const enum GBBus _oamBlockDMG[] = {
 27	GB_BUS_MAIN, // 0x0000
 28	GB_BUS_MAIN, // 0x2000
 29	GB_BUS_MAIN, // 0x4000
 30	GB_BUS_MAIN, // 0x6000
 31	GB_BUS_VRAM, // 0x8000
 32	GB_BUS_MAIN, // 0xA000
 33	GB_BUS_MAIN, // 0xC000
 34	GB_BUS_CPU, // 0xE000
 35};
 36
 37static const enum GBBus _oamBlockCGB[] = {
 38	GB_BUS_MAIN, // 0x0000
 39	GB_BUS_MAIN, // 0x2000
 40	GB_BUS_MAIN, // 0x4000
 41	GB_BUS_MAIN, // 0x6000
 42	GB_BUS_VRAM, // 0x8000
 43	GB_BUS_MAIN, // 0xA000
 44	GB_BUS_RAM, // 0xC000
 45	GB_BUS_CPU // 0xE000
 46};
 47
 48static void _pristineCow(struct GB* gba);
 49
 50static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
 51	if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
 52		cpu->memory.setActiveRegion(cpu, address);
 53		return cpu->memory.cpuLoad8(cpu, address);
 54	}
 55	return cpu->memory.activeRegion[address & cpu->memory.activeMask];
 56}
 57
 58static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
 59	struct GB* gb = (struct GB*) cpu->master;
 60	struct GBMemory* memory = &gb->memory;
 61	switch (address >> 12) {
 62	case GB_REGION_CART_BANK0:
 63	case GB_REGION_CART_BANK0 + 1:
 64	case GB_REGION_CART_BANK0 + 2:
 65	case GB_REGION_CART_BANK0 + 3:
 66		cpu->memory.cpuLoad8 = GBFastLoad8;
 67		cpu->memory.activeRegion = memory->romBase;
 68		cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
 69		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 70		break;
 71	case GB_REGION_CART_BANK1:
 72	case GB_REGION_CART_BANK1 + 1:
 73	case GB_REGION_CART_BANK1 + 2:
 74	case GB_REGION_CART_BANK1 + 3:
 75		cpu->memory.cpuLoad8 = GBFastLoad8;
 76		if (gb->memory.mbcType != GB_MBC6) {
 77			cpu->memory.activeRegion = memory->romBank;
 78			cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 79			cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 80		} else {
 81			cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
 82			if (address & 0x2000) {
 83				cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
 84				cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 85			} else {
 86				cpu->memory.activeRegion = memory->romBank;
 87				cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
 88			}
 89		}
 90		break;
 91	default:
 92		cpu->memory.cpuLoad8 = GBLoad8;
 93		break;
 94	}
 95}
 96
 97static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 98static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
 99
100void GBMemoryInit(struct GB* gb) {
101	struct LR35902Core* cpu = gb->cpu;
102	cpu->memory.cpuLoad8 = GBLoad8;
103	cpu->memory.load8 = GBLoad8;
104	cpu->memory.store8 = GBStore8;
105	cpu->memory.currentSegment = GBCurrentSegment;
106	cpu->memory.setActiveRegion = GBSetActiveRegion;
107
108	gb->memory.wram = 0;
109	gb->memory.wramBank = 0;
110	gb->memory.rom = 0;
111	gb->memory.romBank = 0;
112	gb->memory.romSize = 0;
113	gb->memory.sram = 0;
114	gb->memory.mbcType = GB_MBC_AUTODETECT;
115	gb->memory.mbcRead = NULL;
116	gb->memory.mbcWrite = NULL;
117
118	gb->memory.rtc = NULL;
119	gb->memory.rotation = NULL;
120	gb->memory.rumble = NULL;
121	gb->memory.cam = NULL;
122
123	GBIOInit(gb);
124}
125
126void GBMemoryDeinit(struct GB* gb) {
127	mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
128	if (gb->memory.rom) {
129		mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
130	}
131}
132
133void GBMemoryReset(struct GB* gb) {
134	if (gb->memory.wram) {
135		mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
136	}
137	gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
138	if (gb->model >= GB_MODEL_CGB) {
139		uint32_t* base = (uint32_t*) gb->memory.wram;
140		size_t i;
141		uint32_t pattern = 0;
142		for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
143			if ((i & 0x1FF) == 0) {
144				pattern = ~pattern;
145			}
146			base[i + 0] = pattern;
147			base[i + 1] = pattern;
148			base[i + 2] = ~pattern;
149			base[i + 3] = ~pattern;
150		}
151	}
152	GBMemorySwitchWramBank(&gb->memory, 1);
153	gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
154	gb->memory.currentBank = 1;
155	gb->memory.sramCurrentBank = 0;
156
157	gb->memory.ime = false;
158	gb->memory.ie = 0;
159
160	gb->memory.dmaRemaining = 0;
161	gb->memory.dmaSource = 0;
162	gb->memory.dmaDest = 0;
163	gb->memory.hdmaRemaining = 0;
164	gb->memory.hdmaSource = 0;
165	gb->memory.hdmaDest = 0;
166	gb->memory.isHdma = false;
167
168
169	gb->memory.dmaEvent.context = gb;
170	gb->memory.dmaEvent.name = "GB DMA";
171	gb->memory.dmaEvent.callback = _GBMemoryDMAService;
172	gb->memory.dmaEvent.priority = 0x40;
173	gb->memory.hdmaEvent.context = gb;
174	gb->memory.hdmaEvent.name = "GB HDMA";
175	gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
176	gb->memory.hdmaEvent.priority = 0x41;
177
178	memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
179
180	GBMBCInit(gb);
181	switch (gb->memory.mbcType) {
182	case GB_MBC1:
183		gb->memory.mbcState.mbc1.mode = 0;
184		break;
185	case GB_MBC6:
186		GBMBCSwitchHalfBank(gb, 0, 2);
187		GBMBCSwitchHalfBank(gb, 1, 3);
188		gb->memory.mbcState.mbc6.sramAccess = false;
189		GBMBCSwitchSramHalfBank(gb, 0, 0);
190		GBMBCSwitchSramHalfBank(gb, 0, 1);
191		break;
192	case GB_MMM01:
193		GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
194		GBMBCSwitchBank(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 1);
195	default:
196		memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
197	}
198	gb->memory.sramBank = gb->memory.sram;
199
200	if (!gb->memory.wram) {
201		GBMemoryDeinit(gb);
202	}
203}
204
205void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
206	bank &= 7;
207	if (!bank) {
208		bank = 1;
209	}
210	memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
211	memory->wramCurrentBank = bank;
212}
213
214uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
215	struct GB* gb = (struct GB*) cpu->master;
216	struct GBMemory* memory = &gb->memory;
217	if (gb->memory.dmaRemaining) {
218		const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
219		enum GBBus dmaBus = block[memory->dmaSource >> 13];
220		enum GBBus accessBus = block[address >> 13];
221		if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
222			return 0xFF;
223		}
224		if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
225			return 0xFF;
226		}
227	}
228	switch (address >> 12) {
229	case GB_REGION_CART_BANK0:
230	case GB_REGION_CART_BANK0 + 1:
231	case GB_REGION_CART_BANK0 + 2:
232	case GB_REGION_CART_BANK0 + 3:
233		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
234	case GB_REGION_CART_BANK1 + 2:
235	case GB_REGION_CART_BANK1 + 3:
236		if (memory->mbcType == GB_MBC6) {
237			return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
238		}
239		// Fall through
240	case GB_REGION_CART_BANK1:
241	case GB_REGION_CART_BANK1 + 1:
242		return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
243	case GB_REGION_VRAM:
244	case GB_REGION_VRAM + 1:
245		if (gb->video.mode != 3) {
246			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
247		}
248		return 0xFF;
249	case GB_REGION_EXTERNAL_RAM:
250	case GB_REGION_EXTERNAL_RAM + 1:
251		if (memory->rtcAccess) {
252			return memory->rtcRegs[memory->activeRtcReg];
253		} else if (memory->mbcRead) {
254			return memory->mbcRead(memory, address);
255		} else if (memory->sramAccess && memory->sram) {
256			return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
257		} else if (memory->mbcType == GB_HuC3) {
258			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
259		}
260		return 0xFF;
261	case GB_REGION_WORKING_RAM_BANK0:
262	case GB_REGION_WORKING_RAM_BANK0 + 2:
263		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
264	case GB_REGION_WORKING_RAM_BANK1:
265		return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
266	default:
267		if (address < GB_BASE_OAM) {
268			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
269		}
270		if (address < GB_BASE_UNUSABLE) {
271			if (gb->video.mode < 2) {
272				return gb->video.oam.raw[address & 0xFF];
273			}
274			return 0xFF;
275		}
276		if (address < GB_BASE_IO) {
277			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
278			return 0xFF;
279		}
280		if (address < GB_BASE_HRAM) {
281			return GBIORead(gb, address & (GB_SIZE_IO - 1));
282		}
283		if (address < GB_BASE_IE) {
284			return memory->hram[address & GB_SIZE_HRAM];
285		}
286		return GBIORead(gb, REG_IE);
287	}
288}
289
290void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
291	struct GB* gb = (struct GB*) cpu->master;
292	struct GBMemory* memory = &gb->memory;
293	if (gb->memory.dmaRemaining) {
294		const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
295		enum GBBus dmaBus = block[memory->dmaSource >> 13];
296		enum GBBus accessBus = block[address >> 13];
297		if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
298			return;
299		}
300		if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
301			return;
302		}
303	}
304	switch (address >> 12) {
305	case GB_REGION_CART_BANK0:
306	case GB_REGION_CART_BANK0 + 1:
307	case GB_REGION_CART_BANK0 + 2:
308	case GB_REGION_CART_BANK0 + 3:
309	case GB_REGION_CART_BANK1:
310	case GB_REGION_CART_BANK1 + 1:
311	case GB_REGION_CART_BANK1 + 2:
312	case GB_REGION_CART_BANK1 + 3:
313		memory->mbcWrite(gb, address, value);
314		cpu->memory.setActiveRegion(cpu, cpu->pc);
315		return;
316	case GB_REGION_VRAM:
317	case GB_REGION_VRAM + 1:
318		if (gb->video.mode != 3) {
319			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
320			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
321		}
322		return;
323	case GB_REGION_EXTERNAL_RAM:
324	case GB_REGION_EXTERNAL_RAM + 1:
325		if (memory->rtcAccess) {
326			memory->rtcRegs[memory->activeRtcReg] = value;
327		} else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
328			memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
329		} else {
330			memory->mbcWrite(gb, address, value);
331		}
332		gb->sramDirty |= GB_SRAM_DIRT_NEW;
333		return;
334	case GB_REGION_WORKING_RAM_BANK0:
335	case GB_REGION_WORKING_RAM_BANK0 + 2:
336		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
337		return;
338	case GB_REGION_WORKING_RAM_BANK1:
339		memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
340		return;
341	default:
342		if (address < GB_BASE_OAM) {
343			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
344		} else if (address < GB_BASE_UNUSABLE) {
345			if (gb->video.mode < 2) {
346				gb->video.oam.raw[address & 0xFF] = value;
347				gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
348			}
349		} else if (address < GB_BASE_IO) {
350			mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
351		} else if (address < GB_BASE_HRAM) {
352			GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
353		} else if (address < GB_BASE_IE) {
354			memory->hram[address & GB_SIZE_HRAM] = value;
355		} else {
356			GBIOWrite(gb, REG_IE, value);
357		}
358	}
359}
360
361int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
362	struct GB* gb = (struct GB*) cpu->master;
363	struct GBMemory* memory = &gb->memory;
364	switch (address >> 12) {
365	case GB_REGION_CART_BANK0:
366	case GB_REGION_CART_BANK0 + 1:
367	case GB_REGION_CART_BANK0 + 2:
368	case GB_REGION_CART_BANK0 + 3:
369		return 0;
370	case GB_REGION_CART_BANK1:
371	case GB_REGION_CART_BANK1 + 1:
372	case GB_REGION_CART_BANK1 + 2:
373	case GB_REGION_CART_BANK1 + 3:
374		return memory->currentBank;
375	case GB_REGION_VRAM:
376	case GB_REGION_VRAM + 1:
377		return gb->video.vramCurrentBank;
378	case GB_REGION_EXTERNAL_RAM:
379	case GB_REGION_EXTERNAL_RAM + 1:
380		return memory->sramCurrentBank;
381	case GB_REGION_WORKING_RAM_BANK0:
382	case GB_REGION_WORKING_RAM_BANK0 + 2:
383		return 0;
384	case GB_REGION_WORKING_RAM_BANK1:
385		return memory->wramCurrentBank;
386	default:
387		return 0;
388	}
389}
390
391uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
392	struct GB* gb = (struct GB*) cpu->master;
393	struct GBMemory* memory = &gb->memory;
394	switch (address >> 12) {
395	case GB_REGION_CART_BANK0:
396	case GB_REGION_CART_BANK0 + 1:
397	case GB_REGION_CART_BANK0 + 2:
398	case GB_REGION_CART_BANK0 + 3:
399		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
400	case GB_REGION_CART_BANK1:
401	case GB_REGION_CART_BANK1 + 1:
402	case GB_REGION_CART_BANK1 + 2:
403	case GB_REGION_CART_BANK1 + 3:
404		if (segment < 0) {
405			return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
406		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
407			return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
408		} else {
409			return 0xFF;
410		}
411	case GB_REGION_VRAM:
412	case GB_REGION_VRAM + 1:
413		if (segment < 0) {
414			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
415		} else if (segment < 2) {
416			return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
417		} else {
418			return 0xFF;
419		}
420	case GB_REGION_EXTERNAL_RAM:
421	case GB_REGION_EXTERNAL_RAM + 1:
422		if (memory->rtcAccess) {
423			return memory->rtcRegs[memory->activeRtcReg];
424		} else if (memory->sramAccess) {
425			if (segment < 0 && memory->sram) {
426				return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
427			} else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
428				return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
429			} else {
430				return 0xFF;
431			}
432		} else if (memory->mbcRead) {
433			return memory->mbcRead(memory, address);
434		} else if (memory->mbcType == GB_HuC3) {
435			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
436		}
437		return 0xFF;
438	case GB_REGION_WORKING_RAM_BANK0:
439	case GB_REGION_WORKING_RAM_BANK0 + 2:
440		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
441	case GB_REGION_WORKING_RAM_BANK1:
442		if (segment < 0) {
443			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
444		} else if (segment < 8) {
445			return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
446		} else {
447			return 0xFF;
448		}
449	default:
450		if (address < GB_BASE_OAM) {
451			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
452		}
453		if (address < GB_BASE_UNUSABLE) {
454			if (gb->video.mode < 2) {
455				return gb->video.oam.raw[address & 0xFF];
456			}
457			return 0xFF;
458		}
459		if (address < GB_BASE_IO) {
460			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
461			return 0xFF;
462		}
463		if (address < GB_BASE_HRAM) {
464			return GBIORead(gb, address & (GB_SIZE_IO - 1));
465		}
466		if (address < GB_BASE_IE) {
467			return memory->hram[address & GB_SIZE_HRAM];
468		}
469		return GBIORead(gb, REG_IE);
470	}
471}
472
473void GBMemoryDMA(struct GB* gb, uint16_t base) {
474	if (base > 0xF100) {
475		return;
476	}
477	mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
478	mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
479	if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
480		gb->cpu->nextEvent = gb->cpu->cycles + 8;
481	}
482	gb->memory.dmaSource = base;
483	gb->memory.dmaDest = 0;
484	gb->memory.dmaRemaining = 0xA0;
485}
486
487uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
488	gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
489	gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
490	gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
491	gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
492	gb->memory.hdmaSource &= 0xFFF0;
493	if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
494		mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
495		return value | 0x80;
496	}
497	gb->memory.hdmaDest &= 0x1FF0;
498	gb->memory.hdmaDest |= 0x8000;
499	bool wasHdma = gb->memory.isHdma;
500	gb->memory.isHdma = value & 0x80;
501	if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
502		if (gb->memory.isHdma) {
503			gb->memory.hdmaRemaining = 0x10;
504		} else {
505			gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
506		}
507		gb->cpuBlocked = true;
508		mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
509	} else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
510		return 0x80 | ((value + 1) & 0x7F);
511	}
512	return value & 0x7F;
513}
514
515void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
516	struct GB* gb = context;
517	int dmaRemaining = gb->memory.dmaRemaining;
518	gb->memory.dmaRemaining = 0;
519	uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
520	// TODO: Can DMA write OAM during modes 2-3?
521	gb->video.oam.raw[gb->memory.dmaDest] = b;
522	gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
523	++gb->memory.dmaSource;
524	++gb->memory.dmaDest;
525	gb->memory.dmaRemaining = dmaRemaining - 1;
526	if (gb->memory.dmaRemaining) {
527		mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
528	}
529}
530
531void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
532	struct GB* gb = context;
533	gb->cpuBlocked = true;
534	uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
535	gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
536	++gb->memory.hdmaSource;
537	++gb->memory.hdmaDest;
538	--gb->memory.hdmaRemaining;
539	if (gb->memory.hdmaRemaining) {
540		mTimingDeschedule(timing, &gb->memory.hdmaEvent);
541		mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
542	} else {
543		gb->cpuBlocked = false;
544		gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
545		gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
546		gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
547		gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
548		if (gb->memory.isHdma) {
549			--gb->memory.io[REG_HDMA5];
550			if (gb->memory.io[REG_HDMA5] == 0xFF) {
551				gb->memory.isHdma = false;
552			}
553		} else {
554			gb->memory.io[REG_HDMA5] = 0xFF;
555		}
556	}
557}
558
559void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
560	struct GB* gb = (struct GB*) cpu->master;
561	struct GBMemory* memory = &gb->memory;
562	int8_t oldValue = -1;
563
564	switch (address >> 12) {
565	case GB_REGION_CART_BANK0:
566	case GB_REGION_CART_BANK0 + 1:
567	case GB_REGION_CART_BANK0 + 2:
568	case GB_REGION_CART_BANK0 + 3:
569		_pristineCow(gb);
570		oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
571		memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] =  value;
572		break;
573	case GB_REGION_CART_BANK1:
574	case GB_REGION_CART_BANK1 + 1:
575	case GB_REGION_CART_BANK1 + 2:
576	case GB_REGION_CART_BANK1 + 3:
577		_pristineCow(gb);
578		if (segment < 0) {
579			oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
580			memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
581		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
582			oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
583			memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
584		} else {
585			return;
586		}
587		break;
588	case GB_REGION_VRAM:
589	case GB_REGION_VRAM + 1:
590		if (segment < 0) {
591			oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
592			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
593			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
594		} else if (segment < 2) {
595			oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
596			gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
597			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
598		} else {
599			return;
600		}
601		break;
602	case GB_REGION_EXTERNAL_RAM:
603	case GB_REGION_EXTERNAL_RAM + 1:
604		mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
605		return;
606	case GB_REGION_WORKING_RAM_BANK0:
607	case GB_REGION_WORKING_RAM_BANK0 + 2:
608		oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
609		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
610		break;
611	case GB_REGION_WORKING_RAM_BANK1:
612		if (segment < 0) {
613			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
614			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
615		} else if (segment < 8) {
616			oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
617			memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
618		} else {
619			return;
620		}
621		break;
622	default:
623		if (address < GB_BASE_OAM) {
624			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
625			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
626		} else if (address < GB_BASE_UNUSABLE) {
627			oldValue = gb->video.oam.raw[address & 0xFF];
628			gb->video.oam.raw[address & 0xFF] = value;
629			gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
630		} else if (address < GB_BASE_HRAM) {
631			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
632			return;
633		} else if (address < GB_BASE_IE) {
634			oldValue = memory->hram[address & GB_SIZE_HRAM];
635			memory->hram[address & GB_SIZE_HRAM] = value;
636		} else {
637			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
638			return;
639		}
640	}
641	if (old) {
642		*old = oldValue;
643	}
644}
645
646void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
647	const struct GBMemory* memory = &gb->memory;
648	memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
649	memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
650	STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
651	state->memory.wramCurrentBank = memory->wramCurrentBank;
652	state->memory.sramCurrentBank = memory->sramCurrentBank;
653
654	STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
655	STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
656
657	STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
658	STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
659
660	STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
661	state->memory.dmaRemaining = memory->dmaRemaining;
662	memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
663
664	STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
665	STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
666
667	GBSerializedMemoryFlags flags = 0;
668	flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
669	flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
670	flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
671	flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
672	flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
673	flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
674	STORE_16LE(flags, 0, &state->memory.flags);
675
676	switch (memory->mbcType) {
677	case GB_MBC1:
678		state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
679		state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
680		break;
681	case GB_MBC3_RTC:
682		STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
683		break;
684	case GB_MBC7:
685		state->memory.mbc7.state = memory->mbcState.mbc7.state;
686		state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
687		state->memory.mbc7.address = memory->mbcState.mbc7.address;
688		state->memory.mbc7.access = memory->mbcState.mbc7.access;
689		state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
690		state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
691		STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
692		STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
693		break;
694	case GB_MMM01:
695		state->memory.mmm01.locked = memory->mbcState.mmm01.locked;
696		state->memory.mmm01.bank0 = memory->mbcState.mmm01.currentBank0;
697		break;
698	default:
699		break;
700	}
701}
702
703void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
704	struct GBMemory* memory = &gb->memory;
705	memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
706	memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
707	LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
708	memory->wramCurrentBank = state->memory.wramCurrentBank;
709	memory->sramCurrentBank = state->memory.sramCurrentBank;
710
711	GBMBCSwitchBank(gb, memory->currentBank);
712	GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
713	GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
714
715	LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
716	LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
717
718	LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
719	LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
720
721	LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
722	memory->dmaRemaining = state->memory.dmaRemaining;
723	memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
724
725	uint32_t when;
726	LOAD_32LE(when, 0, &state->memory.dmaNext);
727	if (memory->dmaRemaining) {
728		mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
729	}
730	LOAD_32LE(when, 0, &state->memory.hdmaNext);
731	if (memory->hdmaRemaining) {
732		mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
733	}
734
735	GBSerializedMemoryFlags flags;
736	LOAD_16LE(flags, 0, &state->memory.flags);
737	memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
738	memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
739	memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
740	memory->ime = GBSerializedMemoryFlagsGetIme(flags);
741	memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
742	memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
743
744	switch (memory->mbcType) {
745	case GB_MBC1:
746		memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
747		memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
748		if (memory->mbcState.mbc1.mode) {
749			GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
750		}
751		break;
752	case GB_MBC3_RTC:
753		LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
754		break;
755	case GB_MBC7:
756		memory->mbcState.mbc7.state = state->memory.mbc7.state;
757		memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
758		memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
759		memory->mbcState.mbc7.access = state->memory.mbc7.access;
760		memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
761		memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
762		LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
763		LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
764		break;
765	case GB_MMM01:
766		memory->mbcState.mmm01.locked = state->memory.mmm01.locked;
767		memory->mbcState.mmm01.currentBank0 = state->memory.mmm01.bank0;
768		if (memory->mbcState.mmm01.locked) {
769			GBMBCSwitchBank0(gb, memory->mbcState.mmm01.currentBank0);
770		} else {
771			GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
772		}
773		break;
774	default:
775		break;
776	}
777}
778
779void _pristineCow(struct GB* gb) {
780	if (!gb->isPristine) {
781		return;
782	}
783	void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
784	memcpy(newRom, gb->memory.rom, gb->memory.romSize);
785	memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
786	if (gb->memory.rom == gb->memory.romBase) {
787		gb->memory.romBase = newRom;
788	}
789	gb->memory.rom = newRom;
790	GBMBCSwitchBank(gb, gb->memory.currentBank);
791	gb->isPristine = false;
792}