src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19enum GBBus {
20 GB_BUS_CPU,
21 GB_BUS_MAIN,
22 GB_BUS_VRAM,
23 GB_BUS_RAM
24};
25
26static const enum GBBus _oamBlockDMG[] = {
27 GB_BUS_MAIN, // 0x0000
28 GB_BUS_MAIN, // 0x2000
29 GB_BUS_MAIN, // 0x4000
30 GB_BUS_MAIN, // 0x6000
31 GB_BUS_VRAM, // 0x8000
32 GB_BUS_MAIN, // 0xA000
33 GB_BUS_MAIN, // 0xC000
34 GB_BUS_CPU, // 0xE000
35};
36
37static const enum GBBus _oamBlockCGB[] = {
38 GB_BUS_MAIN, // 0x0000
39 GB_BUS_MAIN, // 0x2000
40 GB_BUS_MAIN, // 0x4000
41 GB_BUS_MAIN, // 0x6000
42 GB_BUS_VRAM, // 0x8000
43 GB_BUS_MAIN, // 0xA000
44 GB_BUS_RAM, // 0xC000
45 GB_BUS_CPU // 0xE000
46};
47
48static void _pristineCow(struct GB* gba);
49
50static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
51 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
52 cpu->memory.setActiveRegion(cpu, address);
53 return cpu->memory.cpuLoad8(cpu, address);
54 }
55 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
56}
57
58static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
59 struct GB* gb = (struct GB*) cpu->master;
60 struct GBMemory* memory = &gb->memory;
61 switch (address >> 12) {
62 case GB_REGION_CART_BANK0:
63 case GB_REGION_CART_BANK0 + 1:
64 case GB_REGION_CART_BANK0 + 2:
65 case GB_REGION_CART_BANK0 + 3:
66 cpu->memory.cpuLoad8 = GBFastLoad8;
67 cpu->memory.activeRegion = memory->romBase;
68 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
69 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
70 break;
71 case GB_REGION_CART_BANK1:
72 case GB_REGION_CART_BANK1 + 1:
73 case GB_REGION_CART_BANK1 + 2:
74 case GB_REGION_CART_BANK1 + 3:
75 cpu->memory.cpuLoad8 = GBFastLoad8;
76 if (gb->memory.mbcType != GB_MBC6) {
77 cpu->memory.activeRegion = memory->romBank;
78 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
79 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
80 } else {
81 cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
82 if (address & 0x2000) {
83 cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
84 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
85 } else {
86 cpu->memory.activeRegion = memory->romBank;
87 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
88 }
89 }
90 break;
91 default:
92 cpu->memory.cpuLoad8 = GBLoad8;
93 break;
94 }
95}
96
97static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
98static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
99
100void GBMemoryInit(struct GB* gb) {
101 struct LR35902Core* cpu = gb->cpu;
102 cpu->memory.cpuLoad8 = GBLoad8;
103 cpu->memory.load8 = GBLoad8;
104 cpu->memory.store8 = GBStore8;
105 cpu->memory.currentSegment = GBCurrentSegment;
106 cpu->memory.setActiveRegion = GBSetActiveRegion;
107
108 gb->memory.wram = 0;
109 gb->memory.wramBank = 0;
110 gb->memory.rom = 0;
111 gb->memory.romBank = 0;
112 gb->memory.romSize = 0;
113 gb->memory.sram = 0;
114 gb->memory.mbcType = GB_MBC_AUTODETECT;
115 gb->memory.mbcRead = NULL;
116 gb->memory.mbcWrite = NULL;
117
118 gb->memory.rtc = NULL;
119 gb->memory.rotation = NULL;
120 gb->memory.rumble = NULL;
121 gb->memory.cam = NULL;
122
123 GBIOInit(gb);
124}
125
126void GBMemoryDeinit(struct GB* gb) {
127 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
128 if (gb->memory.rom) {
129 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
130 }
131}
132
133void GBMemoryReset(struct GB* gb) {
134 if (gb->memory.wram) {
135 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
136 }
137 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
138 if (gb->model >= GB_MODEL_CGB) {
139 uint32_t* base = (uint32_t*) gb->memory.wram;
140 size_t i;
141 uint32_t pattern = 0;
142 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
143 if ((i & 0x1FF) == 0) {
144 pattern = ~pattern;
145 }
146 base[i + 0] = pattern;
147 base[i + 1] = pattern;
148 base[i + 2] = ~pattern;
149 base[i + 3] = ~pattern;
150 }
151 }
152 GBMemorySwitchWramBank(&gb->memory, 1);
153 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
154 gb->memory.currentBank = 1;
155 gb->memory.sramCurrentBank = 0;
156
157 gb->memory.ime = false;
158 gb->memory.ie = 0;
159
160 gb->memory.dmaRemaining = 0;
161 gb->memory.dmaSource = 0;
162 gb->memory.dmaDest = 0;
163 gb->memory.hdmaRemaining = 0;
164 gb->memory.hdmaSource = 0;
165 gb->memory.hdmaDest = 0;
166 gb->memory.isHdma = false;
167
168
169 gb->memory.dmaEvent.context = gb;
170 gb->memory.dmaEvent.name = "GB DMA";
171 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
172 gb->memory.dmaEvent.priority = 0x40;
173 gb->memory.hdmaEvent.context = gb;
174 gb->memory.hdmaEvent.name = "GB HDMA";
175 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
176 gb->memory.hdmaEvent.priority = 0x41;
177
178 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
179 switch (gb->memory.mbcType) {
180 case GB_MBC1:
181 gb->memory.mbcState.mbc1.mode = 0;
182 break;
183 case GB_MBC6:
184 GBMBCSwitchHalfBank(gb, 0, 2);
185 GBMBCSwitchHalfBank(gb, 1, 3);
186 gb->memory.mbcState.mbc6.sramAccess = false;
187 GBMBCSwitchSramHalfBank(gb, 0, 0);
188 GBMBCSwitchSramHalfBank(gb, 0, 1);
189 break;
190 default:
191 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
192 }
193
194 GBMBCInit(gb);
195 gb->memory.sramBank = gb->memory.sram;
196
197 if (!gb->memory.wram) {
198 GBMemoryDeinit(gb);
199 }
200}
201
202void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
203 bank &= 7;
204 if (!bank) {
205 bank = 1;
206 }
207 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
208 memory->wramCurrentBank = bank;
209}
210
211uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
212 struct GB* gb = (struct GB*) cpu->master;
213 struct GBMemory* memory = &gb->memory;
214 if (gb->memory.dmaRemaining) {
215 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
216 enum GBBus dmaBus = block[memory->dmaSource >> 13];
217 enum GBBus accessBus = block[address >> 13];
218 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
219 return 0xFF;
220 }
221 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
222 return 0xFF;
223 }
224 }
225 switch (address >> 12) {
226 case GB_REGION_CART_BANK0:
227 case GB_REGION_CART_BANK0 + 1:
228 case GB_REGION_CART_BANK0 + 2:
229 case GB_REGION_CART_BANK0 + 3:
230 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
231 case GB_REGION_CART_BANK1 + 2:
232 case GB_REGION_CART_BANK1 + 3:
233 if (memory->mbcType == GB_MBC6) {
234 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
235 }
236 // Fall through
237 case GB_REGION_CART_BANK1:
238 case GB_REGION_CART_BANK1 + 1:
239 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
240 case GB_REGION_VRAM:
241 case GB_REGION_VRAM + 1:
242 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
243 case GB_REGION_EXTERNAL_RAM:
244 case GB_REGION_EXTERNAL_RAM + 1:
245 if (memory->rtcAccess) {
246 return memory->rtcRegs[memory->activeRtcReg];
247 } else if (memory->mbcRead) {
248 return memory->mbcRead(memory, address);
249 } else if (memory->sramAccess && memory->sram) {
250 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
251 } else if (memory->mbcType == GB_HuC3) {
252 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
253 }
254 return 0xFF;
255 case GB_REGION_WORKING_RAM_BANK0:
256 case GB_REGION_WORKING_RAM_BANK0 + 2:
257 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
258 case GB_REGION_WORKING_RAM_BANK1:
259 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
260 default:
261 if (address < GB_BASE_OAM) {
262 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
263 }
264 if (address < GB_BASE_UNUSABLE) {
265 if (gb->video.mode < 2) {
266 return gb->video.oam.raw[address & 0xFF];
267 }
268 return 0xFF;
269 }
270 if (address < GB_BASE_IO) {
271 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
272 return 0xFF;
273 }
274 if (address < GB_BASE_HRAM) {
275 return GBIORead(gb, address & (GB_SIZE_IO - 1));
276 }
277 if (address < GB_BASE_IE) {
278 return memory->hram[address & GB_SIZE_HRAM];
279 }
280 return GBIORead(gb, REG_IE);
281 }
282}
283
284void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
285 struct GB* gb = (struct GB*) cpu->master;
286 struct GBMemory* memory = &gb->memory;
287 if (gb->memory.dmaRemaining) {
288 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
289 enum GBBus dmaBus = block[memory->dmaSource >> 13];
290 enum GBBus accessBus = block[address >> 13];
291 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
292 return;
293 }
294 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
295 return;
296 }
297 }
298 switch (address >> 12) {
299 case GB_REGION_CART_BANK0:
300 case GB_REGION_CART_BANK0 + 1:
301 case GB_REGION_CART_BANK0 + 2:
302 case GB_REGION_CART_BANK0 + 3:
303 case GB_REGION_CART_BANK1:
304 case GB_REGION_CART_BANK1 + 1:
305 case GB_REGION_CART_BANK1 + 2:
306 case GB_REGION_CART_BANK1 + 3:
307 memory->mbcWrite(gb, address, value);
308 cpu->memory.setActiveRegion(cpu, cpu->pc);
309 return;
310 case GB_REGION_VRAM:
311 case GB_REGION_VRAM + 1:
312 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
313 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
314 return;
315 case GB_REGION_EXTERNAL_RAM:
316 case GB_REGION_EXTERNAL_RAM + 1:
317 if (memory->rtcAccess) {
318 memory->rtcRegs[memory->activeRtcReg] = value;
319 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
320 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
321 } else {
322 memory->mbcWrite(gb, address, value);
323 }
324 gb->sramDirty |= GB_SRAM_DIRT_NEW;
325 return;
326 case GB_REGION_WORKING_RAM_BANK0:
327 case GB_REGION_WORKING_RAM_BANK0 + 2:
328 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
329 return;
330 case GB_REGION_WORKING_RAM_BANK1:
331 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
332 return;
333 default:
334 if (address < GB_BASE_OAM) {
335 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
336 } else if (address < GB_BASE_UNUSABLE) {
337 if (gb->video.mode < 2) {
338 gb->video.oam.raw[address & 0xFF] = value;
339 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
340 }
341 } else if (address < GB_BASE_IO) {
342 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
343 } else if (address < GB_BASE_HRAM) {
344 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
345 } else if (address < GB_BASE_IE) {
346 memory->hram[address & GB_SIZE_HRAM] = value;
347 } else {
348 GBIOWrite(gb, REG_IE, value);
349 }
350 }
351}
352
353int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
354 struct GB* gb = (struct GB*) cpu->master;
355 struct GBMemory* memory = &gb->memory;
356 switch (address >> 12) {
357 case GB_REGION_CART_BANK0:
358 case GB_REGION_CART_BANK0 + 1:
359 case GB_REGION_CART_BANK0 + 2:
360 case GB_REGION_CART_BANK0 + 3:
361 return 0;
362 case GB_REGION_CART_BANK1:
363 case GB_REGION_CART_BANK1 + 1:
364 case GB_REGION_CART_BANK1 + 2:
365 case GB_REGION_CART_BANK1 + 3:
366 return memory->currentBank;
367 case GB_REGION_VRAM:
368 case GB_REGION_VRAM + 1:
369 return gb->video.vramCurrentBank;
370 case GB_REGION_EXTERNAL_RAM:
371 case GB_REGION_EXTERNAL_RAM + 1:
372 return memory->sramCurrentBank;
373 case GB_REGION_WORKING_RAM_BANK0:
374 case GB_REGION_WORKING_RAM_BANK0 + 2:
375 return 0;
376 case GB_REGION_WORKING_RAM_BANK1:
377 return memory->wramCurrentBank;
378 default:
379 return 0;
380 }
381}
382
383uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
384 struct GB* gb = (struct GB*) cpu->master;
385 struct GBMemory* memory = &gb->memory;
386 switch (address >> 12) {
387 case GB_REGION_CART_BANK0:
388 case GB_REGION_CART_BANK0 + 1:
389 case GB_REGION_CART_BANK0 + 2:
390 case GB_REGION_CART_BANK0 + 3:
391 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
392 case GB_REGION_CART_BANK1:
393 case GB_REGION_CART_BANK1 + 1:
394 case GB_REGION_CART_BANK1 + 2:
395 case GB_REGION_CART_BANK1 + 3:
396 if (segment < 0) {
397 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
398 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
399 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
400 } else {
401 return 0xFF;
402 }
403 case GB_REGION_VRAM:
404 case GB_REGION_VRAM + 1:
405 if (segment < 0) {
406 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
407 } else if (segment < 2) {
408 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
409 } else {
410 return 0xFF;
411 }
412 case GB_REGION_EXTERNAL_RAM:
413 case GB_REGION_EXTERNAL_RAM + 1:
414 if (memory->rtcAccess) {
415 return memory->rtcRegs[memory->activeRtcReg];
416 } else if (memory->sramAccess) {
417 if (segment < 0 && memory->sram) {
418 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
419 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
420 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
421 } else {
422 return 0xFF;
423 }
424 } else if (memory->mbcRead) {
425 return memory->mbcRead(memory, address);
426 } else if (memory->mbcType == GB_HuC3) {
427 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
428 }
429 return 0xFF;
430 case GB_REGION_WORKING_RAM_BANK0:
431 case GB_REGION_WORKING_RAM_BANK0 + 2:
432 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
433 case GB_REGION_WORKING_RAM_BANK1:
434 if (segment < 0) {
435 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
436 } else if (segment < 8) {
437 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
438 } else {
439 return 0xFF;
440 }
441 default:
442 if (address < GB_BASE_OAM) {
443 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
444 }
445 if (address < GB_BASE_UNUSABLE) {
446 if (gb->video.mode < 2) {
447 return gb->video.oam.raw[address & 0xFF];
448 }
449 return 0xFF;
450 }
451 if (address < GB_BASE_IO) {
452 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
453 return 0xFF;
454 }
455 if (address < GB_BASE_HRAM) {
456 return GBIORead(gb, address & (GB_SIZE_IO - 1));
457 }
458 if (address < GB_BASE_IE) {
459 return memory->hram[address & GB_SIZE_HRAM];
460 }
461 return GBIORead(gb, REG_IE);
462 }
463}
464
465void GBMemoryDMA(struct GB* gb, uint16_t base) {
466 if (base > 0xF100) {
467 return;
468 }
469 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
470 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
471 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
472 gb->cpu->nextEvent = gb->cpu->cycles + 8;
473 }
474 gb->memory.dmaSource = base;
475 gb->memory.dmaDest = 0;
476 gb->memory.dmaRemaining = 0xA0;
477}
478
479uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
480 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
481 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
482 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
483 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
484 gb->memory.hdmaSource &= 0xFFF0;
485 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
486 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
487 return value | 0x80;
488 }
489 gb->memory.hdmaDest &= 0x1FF0;
490 gb->memory.hdmaDest |= 0x8000;
491 bool wasHdma = gb->memory.isHdma;
492 gb->memory.isHdma = value & 0x80;
493 if ((!wasHdma && !gb->memory.isHdma) || gb->video.mode == 0) {
494 if (gb->memory.isHdma) {
495 gb->memory.hdmaRemaining = 0x10;
496 } else {
497 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
498 }
499 gb->cpuBlocked = true;
500 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
501 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
502 return 0x80 | ((value + 1) & 0x7F);
503 }
504 return value & 0x7F;
505}
506
507void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
508 struct GB* gb = context;
509 int dmaRemaining = gb->memory.dmaRemaining;
510 gb->memory.dmaRemaining = 0;
511 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
512 // TODO: Can DMA write OAM during modes 2-3?
513 gb->video.oam.raw[gb->memory.dmaDest] = b;
514 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
515 ++gb->memory.dmaSource;
516 ++gb->memory.dmaDest;
517 gb->memory.dmaRemaining = dmaRemaining - 1;
518 if (gb->memory.dmaRemaining) {
519 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
520 }
521}
522
523void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
524 struct GB* gb = context;
525 gb->cpuBlocked = true;
526 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
527 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
528 ++gb->memory.hdmaSource;
529 ++gb->memory.hdmaDest;
530 --gb->memory.hdmaRemaining;
531 if (gb->memory.hdmaRemaining) {
532 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
533 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
534 } else {
535 gb->cpuBlocked = false;
536 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
537 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
538 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
539 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
540 if (gb->memory.isHdma) {
541 --gb->memory.io[REG_HDMA5];
542 if (gb->memory.io[REG_HDMA5] == 0xFF) {
543 gb->memory.isHdma = false;
544 }
545 } else {
546 gb->memory.io[REG_HDMA5] = 0xFF;
547 }
548 }
549}
550
551void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
552 struct GB* gb = (struct GB*) cpu->master;
553 struct GBMemory* memory = &gb->memory;
554 int8_t oldValue = -1;
555
556 switch (address >> 12) {
557 case GB_REGION_CART_BANK0:
558 case GB_REGION_CART_BANK0 + 1:
559 case GB_REGION_CART_BANK0 + 2:
560 case GB_REGION_CART_BANK0 + 3:
561 _pristineCow(gb);
562 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
563 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
564 break;
565 case GB_REGION_CART_BANK1:
566 case GB_REGION_CART_BANK1 + 1:
567 case GB_REGION_CART_BANK1 + 2:
568 case GB_REGION_CART_BANK1 + 3:
569 _pristineCow(gb);
570 if (segment < 0) {
571 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
572 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
573 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
574 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
575 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
576 } else {
577 return;
578 }
579 break;
580 case GB_REGION_VRAM:
581 case GB_REGION_VRAM + 1:
582 if (segment < 0) {
583 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
584 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
585 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
586 } else if (segment < 2) {
587 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
588 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
589 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
590 } else {
591 return;
592 }
593 break;
594 case GB_REGION_EXTERNAL_RAM:
595 case GB_REGION_EXTERNAL_RAM + 1:
596 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
597 return;
598 case GB_REGION_WORKING_RAM_BANK0:
599 case GB_REGION_WORKING_RAM_BANK0 + 2:
600 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
601 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
602 break;
603 case GB_REGION_WORKING_RAM_BANK1:
604 if (segment < 0) {
605 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
606 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
607 } else if (segment < 8) {
608 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
609 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
610 } else {
611 return;
612 }
613 break;
614 default:
615 if (address < GB_BASE_OAM) {
616 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
617 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
618 } else if (address < GB_BASE_UNUSABLE) {
619 oldValue = gb->video.oam.raw[address & 0xFF];
620 gb->video.oam.raw[address & 0xFF] = value;
621 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
622 } else if (address < GB_BASE_HRAM) {
623 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
624 return;
625 } else if (address < GB_BASE_IE) {
626 oldValue = memory->hram[address & GB_SIZE_HRAM];
627 memory->hram[address & GB_SIZE_HRAM] = value;
628 } else {
629 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
630 return;
631 }
632 }
633 if (old) {
634 *old = oldValue;
635 }
636}
637
638void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
639 const struct GBMemory* memory = &gb->memory;
640 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
641 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
642 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
643 state->memory.wramCurrentBank = memory->wramCurrentBank;
644 state->memory.sramCurrentBank = memory->sramCurrentBank;
645
646 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
647 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
648
649 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
650 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
651
652 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
653 state->memory.dmaRemaining = memory->dmaRemaining;
654 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
655
656 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
657 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
658
659 GBSerializedMemoryFlags flags = 0;
660 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
661 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
662 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
663 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
664 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
665 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
666 STORE_16LE(flags, 0, &state->memory.flags);
667
668 switch (memory->mbcType) {
669 case GB_MBC1:
670 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
671 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
672 break;
673 case GB_MBC3_RTC:
674 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
675 break;
676 case GB_MBC7:
677 state->memory.mbc7.state = memory->mbcState.mbc7.state;
678 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
679 state->memory.mbc7.address = memory->mbcState.mbc7.address;
680 state->memory.mbc7.access = memory->mbcState.mbc7.access;
681 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
682 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
683 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
684 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
685 break;
686 default:
687 break;
688 }
689}
690
691void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
692 struct GBMemory* memory = &gb->memory;
693 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
694 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
695 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
696 memory->wramCurrentBank = state->memory.wramCurrentBank;
697 memory->sramCurrentBank = state->memory.sramCurrentBank;
698
699 GBMBCSwitchBank(gb, memory->currentBank);
700 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
701 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
702
703 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
704 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
705
706 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
707 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
708
709 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
710 memory->dmaRemaining = state->memory.dmaRemaining;
711 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
712
713 uint32_t when;
714 LOAD_32LE(when, 0, &state->memory.dmaNext);
715 if (memory->dmaRemaining) {
716 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
717 }
718 LOAD_32LE(when, 0, &state->memory.hdmaNext);
719 if (memory->hdmaRemaining) {
720 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
721 }
722
723 GBSerializedMemoryFlags flags;
724 LOAD_16LE(flags, 0, &state->memory.flags);
725 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
726 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
727 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
728 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
729 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
730 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
731
732 switch (memory->mbcType) {
733 case GB_MBC1:
734 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
735 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
736 if (memory->mbcState.mbc1.mode) {
737 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
738 }
739 break;
740 case GB_MBC3_RTC:
741 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
742 break;
743 case GB_MBC7:
744 memory->mbcState.mbc7.state = state->memory.mbc7.state;
745 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
746 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
747 memory->mbcState.mbc7.access = state->memory.mbc7.access;
748 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
749 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
750 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
751 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
752 break;
753 default:
754 break;
755 }
756}
757
758void _pristineCow(struct GB* gb) {
759 if (!gb->isPristine) {
760 return;
761 }
762 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
763 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
764 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
765 if (gb->memory.rom == gb->memory.romBase) {
766 gb->memory.romBase = newRom;
767 }
768 gb->memory.rom = newRom;
769 GBMBCSwitchBank(gb, gb->memory.currentBank);
770 gb->isPristine = false;
771}