all repos — mgba @ 7c8d253123733d667e271668f00bacceb390175d

mGBA Game Boy Advance Emulator

src/gb/memory.c (view raw)

  1/* Copyright (c) 2013-2016 Jeffrey Pfau
  2 *
  3 * This Source Code Form is subject to the terms of the Mozilla Public
  4 * License, v. 2.0. If a copy of the MPL was not distributed with this
  5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
  6#include <mgba/internal/gb/memory.h>
  7
  8#include <mgba/core/interface.h>
  9#include <mgba/internal/gb/gb.h>
 10#include <mgba/internal/gb/io.h>
 11#include <mgba/internal/gb/mbc.h>
 12#include <mgba/internal/gb/serialize.h>
 13#include <mgba/internal/lr35902/lr35902.h>
 14
 15#include <mgba-util/memory.h>
 16
 17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
 18
 19enum GBBus {
 20	GB_BUS_CPU,
 21	GB_BUS_MAIN,
 22	GB_BUS_VRAM,
 23	GB_BUS_RAM
 24};
 25
 26static const enum GBBus _oamBlockDMG[] = {
 27	GB_BUS_MAIN, // 0x0000
 28	GB_BUS_MAIN, // 0x2000
 29	GB_BUS_MAIN, // 0x4000
 30	GB_BUS_MAIN, // 0x6000
 31	GB_BUS_VRAM, // 0x8000
 32	GB_BUS_MAIN, // 0xA000
 33	GB_BUS_MAIN, // 0xC000
 34	GB_BUS_CPU, // 0xE000
 35};
 36
 37static const enum GBBus _oamBlockCGB[] = {
 38	GB_BUS_MAIN, // 0x0000
 39	GB_BUS_MAIN, // 0x2000
 40	GB_BUS_MAIN, // 0x4000
 41	GB_BUS_MAIN, // 0x6000
 42	GB_BUS_VRAM, // 0x8000
 43	GB_BUS_MAIN, // 0xA000
 44	GB_BUS_RAM, // 0xC000
 45	GB_BUS_CPU // 0xE000
 46};
 47
 48static const uint8_t _blockedRegion[1] = { 0xFF };
 49
 50static void _pristineCow(struct GB* gba);
 51
 52static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
 53	if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
 54		cpu->memory.setActiveRegion(cpu, address);
 55		return cpu->memory.cpuLoad8(cpu, address);
 56	}
 57	return cpu->memory.activeRegion[address & cpu->memory.activeMask];
 58}
 59
 60static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
 61	struct GB* gb = (struct GB*) cpu->master;
 62	struct GBMemory* memory = &gb->memory;
 63	switch (address >> 12) {
 64	case GB_REGION_CART_BANK0:
 65	case GB_REGION_CART_BANK0 + 1:
 66	case GB_REGION_CART_BANK0 + 2:
 67	case GB_REGION_CART_BANK0 + 3:
 68		cpu->memory.cpuLoad8 = GBFastLoad8;
 69		cpu->memory.activeRegion = memory->romBase;
 70		cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
 71		cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 72		break;
 73	case GB_REGION_CART_BANK1:
 74	case GB_REGION_CART_BANK1 + 1:
 75	case GB_REGION_CART_BANK1 + 2:
 76	case GB_REGION_CART_BANK1 + 3:
 77		cpu->memory.cpuLoad8 = GBFastLoad8;
 78		if (gb->memory.mbcType != GB_MBC6) {
 79			cpu->memory.activeRegion = memory->romBank;
 80			cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 81			cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
 82		} else {
 83			cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
 84			if (address & 0x2000) {
 85				cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
 86				cpu->memory.activeRegionEnd = GB_BASE_VRAM;
 87			} else {
 88				cpu->memory.activeRegion = memory->romBank;
 89				cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
 90			}
 91		}
 92		break;
 93	default:
 94		cpu->memory.cpuLoad8 = GBLoad8;
 95		break;
 96	}
 97	if (gb->memory.dmaRemaining) {
 98		const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
 99		enum GBBus dmaBus = block[memory->dmaSource >> 13];
100		enum GBBus accessBus = block[address >> 13];
101		if ((dmaBus != GB_BUS_CPU && dmaBus == accessBus) || (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE)) {
102			cpu->memory.activeRegion = _blockedRegion;
103			cpu->memory.activeMask = 0;
104		}
105	}
106}
107
108static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
109static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
110
111void GBMemoryInit(struct GB* gb) {
112	struct LR35902Core* cpu = gb->cpu;
113	cpu->memory.cpuLoad8 = GBLoad8;
114	cpu->memory.load8 = GBLoad8;
115	cpu->memory.store8 = GBStore8;
116	cpu->memory.currentSegment = GBCurrentSegment;
117	cpu->memory.setActiveRegion = GBSetActiveRegion;
118
119	gb->memory.wram = 0;
120	gb->memory.wramBank = 0;
121	gb->memory.rom = 0;
122	gb->memory.romBank = 0;
123	gb->memory.romSize = 0;
124	gb->memory.sram = 0;
125	gb->memory.mbcType = GB_MBC_AUTODETECT;
126	gb->memory.mbcRead = NULL;
127	gb->memory.mbcWrite = NULL;
128
129	gb->memory.rtc = NULL;
130	gb->memory.rotation = NULL;
131	gb->memory.rumble = NULL;
132	gb->memory.cam = NULL;
133
134	GBIOInit(gb);
135}
136
137void GBMemoryDeinit(struct GB* gb) {
138	mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
139	if (gb->memory.rom) {
140		mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
141	}
142}
143
144void GBMemoryReset(struct GB* gb) {
145	if (gb->memory.wram) {
146		mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
147	}
148	gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
149	if (gb->model >= GB_MODEL_CGB) {
150		uint32_t* base = (uint32_t*) gb->memory.wram;
151		size_t i;
152		uint32_t pattern = 0;
153		for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
154			if ((i & 0x1FF) == 0) {
155				pattern = ~pattern;
156			}
157			base[i + 0] = pattern;
158			base[i + 1] = pattern;
159			base[i + 2] = ~pattern;
160			base[i + 3] = ~pattern;
161		}
162	}
163	GBMemorySwitchWramBank(&gb->memory, 1);
164	gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
165	gb->memory.currentBank = 1;
166	gb->memory.sramCurrentBank = 0;
167
168	gb->memory.ime = false;
169	gb->memory.ie = 0;
170
171	gb->memory.dmaRemaining = 0;
172	gb->memory.dmaSource = 0;
173	gb->memory.dmaDest = 0;
174	gb->memory.hdmaRemaining = 0;
175	gb->memory.hdmaSource = 0;
176	gb->memory.hdmaDest = 0;
177	gb->memory.isHdma = false;
178
179
180	gb->memory.dmaEvent.context = gb;
181	gb->memory.dmaEvent.name = "GB DMA";
182	gb->memory.dmaEvent.callback = _GBMemoryDMAService;
183	gb->memory.dmaEvent.priority = 0x40;
184	gb->memory.hdmaEvent.context = gb;
185	gb->memory.hdmaEvent.name = "GB HDMA";
186	gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
187	gb->memory.hdmaEvent.priority = 0x41;
188
189	memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
190
191	memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
192	GBMBCInit(gb);
193	switch (gb->memory.mbcType) {
194	case GB_MBC1:
195		gb->memory.mbcState.mbc1.mode = 0;
196		break;
197	case GB_MBC6:
198		GBMBCSwitchHalfBank(gb, 0, 2);
199		GBMBCSwitchHalfBank(gb, 1, 3);
200		gb->memory.mbcState.mbc6.sramAccess = false;
201		GBMBCSwitchSramHalfBank(gb, 0, 0);
202		GBMBCSwitchSramHalfBank(gb, 0, 1);
203		break;
204	case GB_MMM01:
205		GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
206		GBMBCSwitchBank(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 1);
207		break;
208	default:
209		break;
210	}
211	gb->memory.sramBank = gb->memory.sram;
212
213	if (!gb->memory.wram) {
214		GBMemoryDeinit(gb);
215	}
216}
217
218void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
219	bank &= 7;
220	if (!bank) {
221		bank = 1;
222	}
223	memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
224	memory->wramCurrentBank = bank;
225}
226
227uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
228	struct GB* gb = (struct GB*) cpu->master;
229	struct GBMemory* memory = &gb->memory;
230	if (gb->memory.dmaRemaining) {
231		const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
232		enum GBBus dmaBus = block[memory->dmaSource >> 13];
233		enum GBBus accessBus = block[address >> 13];
234		if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
235			return 0xFF;
236		}
237		if (address >= GB_BASE_OAM && address < GB_BASE_IO) {
238			return 0xFF;
239		}
240	}
241	switch (address >> 12) {
242	case GB_REGION_CART_BANK0:
243	case GB_REGION_CART_BANK0 + 1:
244	case GB_REGION_CART_BANK0 + 2:
245	case GB_REGION_CART_BANK0 + 3:
246		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
247	case GB_REGION_CART_BANK1 + 2:
248	case GB_REGION_CART_BANK1 + 3:
249		if (memory->mbcType == GB_MBC6) {
250			return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
251		}
252		// Fall through
253	case GB_REGION_CART_BANK1:
254	case GB_REGION_CART_BANK1 + 1:
255		return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
256	case GB_REGION_VRAM:
257	case GB_REGION_VRAM + 1:
258		if (gb->video.mode != 3) {
259			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
260		}
261		return 0xFF;
262	case GB_REGION_EXTERNAL_RAM:
263	case GB_REGION_EXTERNAL_RAM + 1:
264		if (memory->rtcAccess) {
265			return memory->rtcRegs[memory->activeRtcReg];
266		} else if (memory->mbcRead) {
267			return memory->mbcRead(memory, address);
268		} else if (memory->sramAccess && memory->sram) {
269			return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
270		} else if (memory->mbcType == GB_HuC3) {
271			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
272		}
273		return 0xFF;
274	case GB_REGION_WORKING_RAM_BANK0:
275	case GB_REGION_WORKING_RAM_BANK0 + 2:
276		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
277	case GB_REGION_WORKING_RAM_BANK1:
278		return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
279	default:
280		if (address < GB_BASE_OAM) {
281			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
282		}
283		if (address < GB_BASE_UNUSABLE) {
284			if (gb->video.mode < 2) {
285				return gb->video.oam.raw[address & 0xFF];
286			}
287			return 0xFF;
288		}
289		if (address < GB_BASE_IO) {
290			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
291			return 0xFF;
292		}
293		if (address < GB_BASE_HRAM) {
294			return GBIORead(gb, address & (GB_SIZE_IO - 1));
295		}
296		if (address < GB_BASE_IE) {
297			return memory->hram[address & GB_SIZE_HRAM];
298		}
299		return GBIORead(gb, REG_IE);
300	}
301}
302
303void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
304	struct GB* gb = (struct GB*) cpu->master;
305	struct GBMemory* memory = &gb->memory;
306	if (gb->memory.dmaRemaining) {
307		const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
308		enum GBBus dmaBus = block[memory->dmaSource >> 13];
309		enum GBBus accessBus = block[address >> 13];
310		if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
311			return;
312		}
313		if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
314			return;
315		}
316	}
317	switch (address >> 12) {
318	case GB_REGION_CART_BANK0:
319	case GB_REGION_CART_BANK0 + 1:
320	case GB_REGION_CART_BANK0 + 2:
321	case GB_REGION_CART_BANK0 + 3:
322	case GB_REGION_CART_BANK1:
323	case GB_REGION_CART_BANK1 + 1:
324	case GB_REGION_CART_BANK1 + 2:
325	case GB_REGION_CART_BANK1 + 3:
326		memory->mbcWrite(gb, address, value);
327		cpu->memory.setActiveRegion(cpu, cpu->pc);
328		return;
329	case GB_REGION_VRAM:
330	case GB_REGION_VRAM + 1:
331		if (gb->video.mode != 3) {
332			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
333			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
334		}
335		return;
336	case GB_REGION_EXTERNAL_RAM:
337	case GB_REGION_EXTERNAL_RAM + 1:
338		if (memory->rtcAccess) {
339			memory->rtcRegs[memory->activeRtcReg] = value;
340		} else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
341			memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
342		} else {
343			memory->mbcWrite(gb, address, value);
344		}
345		gb->sramDirty |= GB_SRAM_DIRT_NEW;
346		return;
347	case GB_REGION_WORKING_RAM_BANK0:
348	case GB_REGION_WORKING_RAM_BANK0 + 2:
349		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
350		return;
351	case GB_REGION_WORKING_RAM_BANK1:
352		memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
353		return;
354	default:
355		if (address < GB_BASE_OAM) {
356			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
357		} else if (address < GB_BASE_UNUSABLE) {
358			if (gb->video.mode < 2) {
359				gb->video.oam.raw[address & 0xFF] = value;
360				gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
361			}
362		} else if (address < GB_BASE_IO) {
363			mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
364		} else if (address < GB_BASE_HRAM) {
365			GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
366		} else if (address < GB_BASE_IE) {
367			memory->hram[address & GB_SIZE_HRAM] = value;
368		} else {
369			GBIOWrite(gb, REG_IE, value);
370		}
371	}
372}
373
374int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
375	struct GB* gb = (struct GB*) cpu->master;
376	struct GBMemory* memory = &gb->memory;
377	switch (address >> 12) {
378	case GB_REGION_CART_BANK0:
379	case GB_REGION_CART_BANK0 + 1:
380	case GB_REGION_CART_BANK0 + 2:
381	case GB_REGION_CART_BANK0 + 3:
382		return 0;
383	case GB_REGION_CART_BANK1:
384	case GB_REGION_CART_BANK1 + 1:
385	case GB_REGION_CART_BANK1 + 2:
386	case GB_REGION_CART_BANK1 + 3:
387		return memory->currentBank;
388	case GB_REGION_VRAM:
389	case GB_REGION_VRAM + 1:
390		return gb->video.vramCurrentBank;
391	case GB_REGION_EXTERNAL_RAM:
392	case GB_REGION_EXTERNAL_RAM + 1:
393		return memory->sramCurrentBank;
394	case GB_REGION_WORKING_RAM_BANK0:
395	case GB_REGION_WORKING_RAM_BANK0 + 2:
396		return 0;
397	case GB_REGION_WORKING_RAM_BANK1:
398		return memory->wramCurrentBank;
399	default:
400		return 0;
401	}
402}
403
404uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
405	struct GB* gb = (struct GB*) cpu->master;
406	struct GBMemory* memory = &gb->memory;
407	switch (address >> 12) {
408	case GB_REGION_CART_BANK0:
409	case GB_REGION_CART_BANK0 + 1:
410	case GB_REGION_CART_BANK0 + 2:
411	case GB_REGION_CART_BANK0 + 3:
412		return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
413	case GB_REGION_CART_BANK1:
414	case GB_REGION_CART_BANK1 + 1:
415	case GB_REGION_CART_BANK1 + 2:
416	case GB_REGION_CART_BANK1 + 3:
417		if (segment < 0) {
418			return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
419		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
420			return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
421		} else {
422			return 0xFF;
423		}
424	case GB_REGION_VRAM:
425	case GB_REGION_VRAM + 1:
426		if (segment < 0) {
427			return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
428		} else if (segment < 2) {
429			return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
430		} else {
431			return 0xFF;
432		}
433	case GB_REGION_EXTERNAL_RAM:
434	case GB_REGION_EXTERNAL_RAM + 1:
435		if (memory->rtcAccess) {
436			return memory->rtcRegs[memory->activeRtcReg];
437		} else if (memory->sramAccess) {
438			if (segment < 0 && memory->sram) {
439				return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
440			} else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
441				return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
442			} else {
443				return 0xFF;
444			}
445		} else if (memory->mbcRead) {
446			return memory->mbcRead(memory, address);
447		} else if (memory->mbcType == GB_HuC3) {
448			return 0x01; // TODO: Is this supposed to be the current SRAM bank?
449		}
450		return 0xFF;
451	case GB_REGION_WORKING_RAM_BANK0:
452	case GB_REGION_WORKING_RAM_BANK0 + 2:
453		return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
454	case GB_REGION_WORKING_RAM_BANK1:
455		if (segment < 0) {
456			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
457		} else if (segment < 8) {
458			return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
459		} else {
460			return 0xFF;
461		}
462	default:
463		if (address < GB_BASE_OAM) {
464			return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
465		}
466		if (address < GB_BASE_UNUSABLE) {
467			if (gb->video.mode < 2) {
468				return gb->video.oam.raw[address & 0xFF];
469			}
470			return 0xFF;
471		}
472		if (address < GB_BASE_IO) {
473			mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
474			if (gb->video.mode < 2) {
475				switch (gb->model) {
476				case GB_MODEL_AGB:
477					return (address & 0xF0) | ((address >> 4) & 0xF);
478				case GB_MODEL_CGB:
479					// TODO: R/W behavior
480					return 0x00;
481				default:
482					return 0x00;
483				}
484			}
485			return 0xFF;
486		}
487		if (address < GB_BASE_HRAM) {
488			return GBIORead(gb, address & (GB_SIZE_IO - 1));
489		}
490		if (address < GB_BASE_IE) {
491			return memory->hram[address & GB_SIZE_HRAM];
492		}
493		return GBIORead(gb, REG_IE);
494	}
495}
496
497void GBMemoryDMA(struct GB* gb, uint16_t base) {
498	if (base > 0xF100) {
499		return;
500	}
501	mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
502	mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
503	if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
504		gb->cpu->nextEvent = gb->cpu->cycles + 8;
505	}
506	gb->memory.dmaSource = base;
507	gb->memory.dmaDest = 0;
508	gb->memory.dmaRemaining = 0xA0;
509}
510
511uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
512	gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
513	gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
514	gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
515	gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
516	gb->memory.hdmaSource &= 0xFFF0;
517	if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
518		mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
519		return value | 0x80;
520	}
521	gb->memory.hdmaDest &= 0x1FF0;
522	gb->memory.hdmaDest |= 0x8000;
523	bool wasHdma = gb->memory.isHdma;
524	gb->memory.isHdma = value & 0x80;
525	if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
526		if (gb->memory.isHdma) {
527			gb->memory.hdmaRemaining = 0x10;
528		} else {
529			gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
530		}
531		gb->cpuBlocked = true;
532		mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
533	} else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
534		return 0x80 | ((value + 1) & 0x7F);
535	}
536	return value & 0x7F;
537}
538
539void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
540	struct GB* gb = context;
541	int dmaRemaining = gb->memory.dmaRemaining;
542	gb->memory.dmaRemaining = 0;
543	uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
544	// TODO: Can DMA write OAM during modes 2-3?
545	gb->video.oam.raw[gb->memory.dmaDest] = b;
546	gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
547	++gb->memory.dmaSource;
548	++gb->memory.dmaDest;
549	gb->memory.dmaRemaining = dmaRemaining - 1;
550	if (gb->memory.dmaRemaining) {
551		mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
552	}
553}
554
555void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
556	struct GB* gb = context;
557	gb->cpuBlocked = true;
558	uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
559	gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
560	++gb->memory.hdmaSource;
561	++gb->memory.hdmaDest;
562	--gb->memory.hdmaRemaining;
563	if (gb->memory.hdmaRemaining) {
564		mTimingDeschedule(timing, &gb->memory.hdmaEvent);
565		mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
566	} else {
567		gb->cpuBlocked = false;
568		gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
569		gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
570		gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
571		gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
572		if (gb->memory.isHdma) {
573			--gb->memory.io[REG_HDMA5];
574			if (gb->memory.io[REG_HDMA5] == 0xFF) {
575				gb->memory.isHdma = false;
576			}
577		} else {
578			gb->memory.io[REG_HDMA5] = 0xFF;
579		}
580	}
581}
582
583void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
584	struct GB* gb = (struct GB*) cpu->master;
585	struct GBMemory* memory = &gb->memory;
586	int8_t oldValue = -1;
587
588	switch (address >> 12) {
589	case GB_REGION_CART_BANK0:
590	case GB_REGION_CART_BANK0 + 1:
591	case GB_REGION_CART_BANK0 + 2:
592	case GB_REGION_CART_BANK0 + 3:
593		_pristineCow(gb);
594		oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
595		memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] =  value;
596		break;
597	case GB_REGION_CART_BANK1:
598	case GB_REGION_CART_BANK1 + 1:
599	case GB_REGION_CART_BANK1 + 2:
600	case GB_REGION_CART_BANK1 + 3:
601		_pristineCow(gb);
602		if (segment < 0) {
603			oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
604			memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
605		} else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
606			oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
607			memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
608		} else {
609			return;
610		}
611		break;
612	case GB_REGION_VRAM:
613	case GB_REGION_VRAM + 1:
614		if (segment < 0) {
615			oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
616			gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
617			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
618		} else if (segment < 2) {
619			oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
620			gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
621			gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
622		} else {
623			return;
624		}
625		break;
626	case GB_REGION_EXTERNAL_RAM:
627	case GB_REGION_EXTERNAL_RAM + 1:
628		mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
629		return;
630	case GB_REGION_WORKING_RAM_BANK0:
631	case GB_REGION_WORKING_RAM_BANK0 + 2:
632		oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
633		memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
634		break;
635	case GB_REGION_WORKING_RAM_BANK1:
636		if (segment < 0) {
637			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
638			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
639		} else if (segment < 8) {
640			oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
641			memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
642		} else {
643			return;
644		}
645		break;
646	default:
647		if (address < GB_BASE_OAM) {
648			oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
649			memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
650		} else if (address < GB_BASE_UNUSABLE) {
651			oldValue = gb->video.oam.raw[address & 0xFF];
652			gb->video.oam.raw[address & 0xFF] = value;
653			gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
654		} else if (address < GB_BASE_HRAM) {
655			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
656			return;
657		} else if (address < GB_BASE_IE) {
658			oldValue = memory->hram[address & GB_SIZE_HRAM];
659			memory->hram[address & GB_SIZE_HRAM] = value;
660		} else {
661			mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
662			return;
663		}
664	}
665	if (old) {
666		*old = oldValue;
667	}
668}
669
670void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
671	const struct GBMemory* memory = &gb->memory;
672	memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
673	memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
674	STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
675	state->memory.wramCurrentBank = memory->wramCurrentBank;
676	state->memory.sramCurrentBank = memory->sramCurrentBank;
677
678	STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
679	STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
680
681	STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
682	STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
683
684	STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
685	state->memory.dmaRemaining = memory->dmaRemaining;
686	memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
687
688	STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
689	STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
690
691	GBSerializedMemoryFlags flags = 0;
692	flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
693	flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
694	flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
695	flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
696	flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
697	flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
698	STORE_16LE(flags, 0, &state->memory.flags);
699
700	switch (memory->mbcType) {
701	case GB_MBC1:
702		state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
703		state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
704		break;
705	case GB_MBC3_RTC:
706		STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
707		break;
708	case GB_MBC7:
709		state->memory.mbc7.state = memory->mbcState.mbc7.state;
710		state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
711		state->memory.mbc7.address = memory->mbcState.mbc7.address;
712		state->memory.mbc7.access = memory->mbcState.mbc7.access;
713		state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
714		state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
715		STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
716		STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
717		break;
718	case GB_MMM01:
719		state->memory.mmm01.locked = memory->mbcState.mmm01.locked;
720		state->memory.mmm01.bank0 = memory->mbcState.mmm01.currentBank0;
721		break;
722	default:
723		break;
724	}
725}
726
727void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
728	struct GBMemory* memory = &gb->memory;
729	memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
730	memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
731	LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
732	memory->wramCurrentBank = state->memory.wramCurrentBank;
733	memory->sramCurrentBank = state->memory.sramCurrentBank;
734
735	GBMBCSwitchBank(gb, memory->currentBank);
736	GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
737	GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
738
739	LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
740	LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
741
742	LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
743	LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
744
745	LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
746	memory->dmaRemaining = state->memory.dmaRemaining;
747	memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
748
749	uint32_t when;
750	LOAD_32LE(when, 0, &state->memory.dmaNext);
751	if (memory->dmaRemaining) {
752		mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
753	}
754	LOAD_32LE(when, 0, &state->memory.hdmaNext);
755	if (memory->hdmaRemaining) {
756		mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
757	}
758
759	GBSerializedMemoryFlags flags;
760	LOAD_16LE(flags, 0, &state->memory.flags);
761	memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
762	memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
763	memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
764	memory->ime = GBSerializedMemoryFlagsGetIme(flags);
765	memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
766	memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
767
768	switch (memory->mbcType) {
769	case GB_MBC1:
770		memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
771		memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
772		if (memory->mbcState.mbc1.mode) {
773			GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
774		}
775		break;
776	case GB_MBC3_RTC:
777		LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
778		break;
779	case GB_MBC7:
780		memory->mbcState.mbc7.state = state->memory.mbc7.state;
781		memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
782		memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
783		memory->mbcState.mbc7.access = state->memory.mbc7.access;
784		memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
785		memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
786		LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
787		LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
788		break;
789	case GB_MMM01:
790		memory->mbcState.mmm01.locked = state->memory.mmm01.locked;
791		memory->mbcState.mmm01.currentBank0 = state->memory.mmm01.bank0;
792		if (memory->mbcState.mmm01.locked) {
793			GBMBCSwitchBank0(gb, memory->mbcState.mmm01.currentBank0);
794		} else {
795			GBMBCSwitchBank0(gb, gb->memory.romSize / GB_SIZE_CART_BANK0 - 2);
796		}
797		break;
798	default:
799		break;
800	}
801}
802
803void _pristineCow(struct GB* gb) {
804	if (!gb->isPristine) {
805		return;
806	}
807	void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
808	memcpy(newRom, gb->memory.rom, gb->memory.romSize);
809	memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
810	if (gb->memory.rom == gb->memory.romBase) {
811		gb->memory.romBase = newRom;
812	}
813	gb->memory.rom = newRom;
814	GBMBCSwitchBank(gb, gb->memory.currentBank);
815	gb->isPristine = false;
816}