src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include <mgba/internal/gb/memory.h>
7
8#include <mgba/core/interface.h>
9#include <mgba/internal/gb/gb.h>
10#include <mgba/internal/gb/io.h>
11#include <mgba/internal/gb/mbc.h>
12#include <mgba/internal/gb/serialize.h>
13#include <mgba/internal/lr35902/lr35902.h>
14
15#include <mgba-util/memory.h>
16
17mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory", "gb.memory");
18
19enum GBBus {
20 GB_BUS_CPU,
21 GB_BUS_MAIN,
22 GB_BUS_VRAM,
23 GB_BUS_RAM
24};
25
26static const enum GBBus _oamBlockDMG[] = {
27 GB_BUS_MAIN, // 0x0000
28 GB_BUS_MAIN, // 0x2000
29 GB_BUS_MAIN, // 0x4000
30 GB_BUS_MAIN, // 0x6000
31 GB_BUS_VRAM, // 0x8000
32 GB_BUS_MAIN, // 0xA000
33 GB_BUS_MAIN, // 0xC000
34 GB_BUS_CPU, // 0xE000
35};
36
37static const enum GBBus _oamBlockCGB[] = {
38 GB_BUS_MAIN, // 0x0000
39 GB_BUS_MAIN, // 0x2000
40 GB_BUS_MAIN, // 0x4000
41 GB_BUS_MAIN, // 0x6000
42 GB_BUS_VRAM, // 0x8000
43 GB_BUS_MAIN, // 0xA000
44 GB_BUS_RAM, // 0xC000
45 GB_BUS_CPU // 0xE000
46};
47
48static void _pristineCow(struct GB* gba);
49
50static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
51 if (UNLIKELY(address >= cpu->memory.activeRegionEnd)) {
52 cpu->memory.setActiveRegion(cpu, address);
53 return cpu->memory.cpuLoad8(cpu, address);
54 }
55 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
56}
57
58static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
59 struct GB* gb = (struct GB*) cpu->master;
60 struct GBMemory* memory = &gb->memory;
61 switch (address >> 12) {
62 case GB_REGION_CART_BANK0:
63 case GB_REGION_CART_BANK0 + 1:
64 case GB_REGION_CART_BANK0 + 2:
65 case GB_REGION_CART_BANK0 + 3:
66 cpu->memory.cpuLoad8 = GBFastLoad8;
67 cpu->memory.activeRegion = memory->romBase;
68 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
69 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
70 break;
71 case GB_REGION_CART_BANK1:
72 case GB_REGION_CART_BANK1 + 1:
73 case GB_REGION_CART_BANK1 + 2:
74 case GB_REGION_CART_BANK1 + 3:
75 cpu->memory.cpuLoad8 = GBFastLoad8;
76 if (gb->memory.mbcType != GB_MBC6) {
77 cpu->memory.activeRegion = memory->romBank;
78 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
79 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
80 } else {
81 cpu->memory.activeMask = GB_SIZE_CART_HALFBANK - 1;
82 if (address & 0x2000) {
83 cpu->memory.activeRegion = memory->mbcState.mbc6.romBank1;
84 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
85 } else {
86 cpu->memory.activeRegion = memory->romBank;
87 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1 + 0x2000;
88 }
89 }
90 break;
91 default:
92 cpu->memory.cpuLoad8 = GBLoad8;
93 break;
94 }
95}
96
97static void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
98static void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate);
99
100void GBMemoryInit(struct GB* gb) {
101 struct LR35902Core* cpu = gb->cpu;
102 cpu->memory.cpuLoad8 = GBLoad8;
103 cpu->memory.load8 = GBLoad8;
104 cpu->memory.store8 = GBStore8;
105 cpu->memory.currentSegment = GBCurrentSegment;
106 cpu->memory.setActiveRegion = GBSetActiveRegion;
107
108 gb->memory.wram = 0;
109 gb->memory.wramBank = 0;
110 gb->memory.rom = 0;
111 gb->memory.romBank = 0;
112 gb->memory.romSize = 0;
113 gb->memory.sram = 0;
114 gb->memory.mbcType = GB_MBC_AUTODETECT;
115 gb->memory.mbcRead = NULL;
116 gb->memory.mbcWrite = NULL;
117
118 gb->memory.rtc = NULL;
119 gb->memory.rotation = NULL;
120 gb->memory.rumble = NULL;
121 gb->memory.cam = NULL;
122
123 GBIOInit(gb);
124}
125
126void GBMemoryDeinit(struct GB* gb) {
127 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
128 if (gb->memory.rom) {
129 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
130 }
131}
132
133void GBMemoryReset(struct GB* gb) {
134 if (gb->memory.wram) {
135 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
136 }
137 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
138 if (gb->model >= GB_MODEL_CGB) {
139 uint32_t* base = (uint32_t*) gb->memory.wram;
140 size_t i;
141 uint32_t pattern = 0;
142 for (i = 0; i < GB_SIZE_WORKING_RAM / 4; i += 4) {
143 if ((i & 0x1FF) == 0) {
144 pattern = ~pattern;
145 }
146 base[i + 0] = pattern;
147 base[i + 1] = pattern;
148 base[i + 2] = ~pattern;
149 base[i + 3] = ~pattern;
150 }
151 }
152 GBMemorySwitchWramBank(&gb->memory, 1);
153 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
154 gb->memory.currentBank = 1;
155 gb->memory.sramCurrentBank = 0;
156
157 gb->memory.ime = false;
158 gb->memory.ie = 0;
159
160 gb->memory.dmaRemaining = 0;
161 gb->memory.dmaSource = 0;
162 gb->memory.dmaDest = 0;
163 gb->memory.hdmaRemaining = 0;
164 gb->memory.hdmaSource = 0;
165 gb->memory.hdmaDest = 0;
166 gb->memory.isHdma = false;
167
168
169 gb->memory.dmaEvent.context = gb;
170 gb->memory.dmaEvent.name = "GB DMA";
171 gb->memory.dmaEvent.callback = _GBMemoryDMAService;
172 gb->memory.dmaEvent.priority = 0x40;
173 gb->memory.hdmaEvent.context = gb;
174 gb->memory.hdmaEvent.name = "GB HDMA";
175 gb->memory.hdmaEvent.callback = _GBMemoryHDMAService;
176 gb->memory.hdmaEvent.priority = 0x41;
177
178 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
179 switch (gb->memory.mbcType) {
180 case GB_MBC1:
181 gb->memory.mbcState.mbc1.mode = 0;
182 break;
183 case GB_MBC6:
184 GBMBCSwitchHalfBank(gb, 0, 2);
185 GBMBCSwitchHalfBank(gb, 1, 3);
186 gb->memory.mbcState.mbc6.sramAccess = false;
187 GBMBCSwitchSramHalfBank(gb, 0, 0);
188 GBMBCSwitchSramHalfBank(gb, 0, 1);
189 break;
190 default:
191 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
192 }
193
194 GBMBCInit(gb);
195 gb->memory.sramBank = gb->memory.sram;
196
197 if (!gb->memory.wram) {
198 GBMemoryDeinit(gb);
199 }
200}
201
202void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
203 bank &= 7;
204 if (!bank) {
205 bank = 1;
206 }
207 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
208 memory->wramCurrentBank = bank;
209}
210
211uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
212 struct GB* gb = (struct GB*) cpu->master;
213 struct GBMemory* memory = &gb->memory;
214 if (gb->memory.dmaRemaining) {
215 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
216 enum GBBus dmaBus = block[memory->dmaSource >> 13];
217 enum GBBus accessBus = block[address >> 13];
218 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
219 return 0xFF;
220 }
221 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
222 return 0xFF;
223 }
224 }
225 switch (address >> 12) {
226 case GB_REGION_CART_BANK0:
227 case GB_REGION_CART_BANK0 + 1:
228 case GB_REGION_CART_BANK0 + 2:
229 case GB_REGION_CART_BANK0 + 3:
230 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
231 case GB_REGION_CART_BANK1 + 2:
232 case GB_REGION_CART_BANK1 + 3:
233 if (memory->mbcType == GB_MBC6) {
234 return memory->mbcState.mbc6.romBank1[address & (GB_SIZE_CART_HALFBANK - 1)];
235 }
236 // Fall through
237 case GB_REGION_CART_BANK1:
238 case GB_REGION_CART_BANK1 + 1:
239 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
240 case GB_REGION_VRAM:
241 case GB_REGION_VRAM + 1:
242 if (gb->video.mode != 3) {
243 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
244 }
245 return 0xFF;
246 case GB_REGION_EXTERNAL_RAM:
247 case GB_REGION_EXTERNAL_RAM + 1:
248 if (memory->rtcAccess) {
249 return memory->rtcRegs[memory->activeRtcReg];
250 } else if (memory->mbcRead) {
251 return memory->mbcRead(memory, address);
252 } else if (memory->sramAccess && memory->sram) {
253 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
254 } else if (memory->mbcType == GB_HuC3) {
255 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
256 }
257 return 0xFF;
258 case GB_REGION_WORKING_RAM_BANK0:
259 case GB_REGION_WORKING_RAM_BANK0 + 2:
260 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
261 case GB_REGION_WORKING_RAM_BANK1:
262 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
263 default:
264 if (address < GB_BASE_OAM) {
265 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
266 }
267 if (address < GB_BASE_UNUSABLE) {
268 if (gb->video.mode < 2) {
269 return gb->video.oam.raw[address & 0xFF];
270 }
271 return 0xFF;
272 }
273 if (address < GB_BASE_IO) {
274 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
275 return 0xFF;
276 }
277 if (address < GB_BASE_HRAM) {
278 return GBIORead(gb, address & (GB_SIZE_IO - 1));
279 }
280 if (address < GB_BASE_IE) {
281 return memory->hram[address & GB_SIZE_HRAM];
282 }
283 return GBIORead(gb, REG_IE);
284 }
285}
286
287void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
288 struct GB* gb = (struct GB*) cpu->master;
289 struct GBMemory* memory = &gb->memory;
290 if (gb->memory.dmaRemaining) {
291 const enum GBBus* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
292 enum GBBus dmaBus = block[memory->dmaSource >> 13];
293 enum GBBus accessBus = block[address >> 13];
294 if (dmaBus != GB_BUS_CPU && dmaBus == accessBus) {
295 return;
296 }
297 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
298 return;
299 }
300 }
301 switch (address >> 12) {
302 case GB_REGION_CART_BANK0:
303 case GB_REGION_CART_BANK0 + 1:
304 case GB_REGION_CART_BANK0 + 2:
305 case GB_REGION_CART_BANK0 + 3:
306 case GB_REGION_CART_BANK1:
307 case GB_REGION_CART_BANK1 + 1:
308 case GB_REGION_CART_BANK1 + 2:
309 case GB_REGION_CART_BANK1 + 3:
310 memory->mbcWrite(gb, address, value);
311 cpu->memory.setActiveRegion(cpu, cpu->pc);
312 return;
313 case GB_REGION_VRAM:
314 case GB_REGION_VRAM + 1:
315 if (gb->video.mode != 3) {
316 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) | (GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank));
317 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
318 }
319 return;
320 case GB_REGION_EXTERNAL_RAM:
321 case GB_REGION_EXTERNAL_RAM + 1:
322 if (memory->rtcAccess) {
323 memory->rtcRegs[memory->activeRtcReg] = value;
324 } else if (memory->sramAccess && memory->sram && memory->mbcType != GB_MBC2) {
325 memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
326 } else {
327 memory->mbcWrite(gb, address, value);
328 }
329 gb->sramDirty |= GB_SRAM_DIRT_NEW;
330 return;
331 case GB_REGION_WORKING_RAM_BANK0:
332 case GB_REGION_WORKING_RAM_BANK0 + 2:
333 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
334 return;
335 case GB_REGION_WORKING_RAM_BANK1:
336 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
337 return;
338 default:
339 if (address < GB_BASE_OAM) {
340 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
341 } else if (address < GB_BASE_UNUSABLE) {
342 if (gb->video.mode < 2) {
343 gb->video.oam.raw[address & 0xFF] = value;
344 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
345 }
346 } else if (address < GB_BASE_IO) {
347 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
348 } else if (address < GB_BASE_HRAM) {
349 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
350 } else if (address < GB_BASE_IE) {
351 memory->hram[address & GB_SIZE_HRAM] = value;
352 } else {
353 GBIOWrite(gb, REG_IE, value);
354 }
355 }
356}
357
358int GBCurrentSegment(struct LR35902Core* cpu, uint16_t address) {
359 struct GB* gb = (struct GB*) cpu->master;
360 struct GBMemory* memory = &gb->memory;
361 switch (address >> 12) {
362 case GB_REGION_CART_BANK0:
363 case GB_REGION_CART_BANK0 + 1:
364 case GB_REGION_CART_BANK0 + 2:
365 case GB_REGION_CART_BANK0 + 3:
366 return 0;
367 case GB_REGION_CART_BANK1:
368 case GB_REGION_CART_BANK1 + 1:
369 case GB_REGION_CART_BANK1 + 2:
370 case GB_REGION_CART_BANK1 + 3:
371 return memory->currentBank;
372 case GB_REGION_VRAM:
373 case GB_REGION_VRAM + 1:
374 return gb->video.vramCurrentBank;
375 case GB_REGION_EXTERNAL_RAM:
376 case GB_REGION_EXTERNAL_RAM + 1:
377 return memory->sramCurrentBank;
378 case GB_REGION_WORKING_RAM_BANK0:
379 case GB_REGION_WORKING_RAM_BANK0 + 2:
380 return 0;
381 case GB_REGION_WORKING_RAM_BANK1:
382 return memory->wramCurrentBank;
383 default:
384 return 0;
385 }
386}
387
388uint8_t GBView8(struct LR35902Core* cpu, uint16_t address, int segment) {
389 struct GB* gb = (struct GB*) cpu->master;
390 struct GBMemory* memory = &gb->memory;
391 switch (address >> 12) {
392 case GB_REGION_CART_BANK0:
393 case GB_REGION_CART_BANK0 + 1:
394 case GB_REGION_CART_BANK0 + 2:
395 case GB_REGION_CART_BANK0 + 3:
396 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
397 case GB_REGION_CART_BANK1:
398 case GB_REGION_CART_BANK1 + 1:
399 case GB_REGION_CART_BANK1 + 2:
400 case GB_REGION_CART_BANK1 + 3:
401 if (segment < 0) {
402 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
403 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
404 return memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
405 } else {
406 return 0xFF;
407 }
408 case GB_REGION_VRAM:
409 case GB_REGION_VRAM + 1:
410 if (segment < 0) {
411 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
412 } else if (segment < 2) {
413 return gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment *GB_SIZE_VRAM_BANK0];
414 } else {
415 return 0xFF;
416 }
417 case GB_REGION_EXTERNAL_RAM:
418 case GB_REGION_EXTERNAL_RAM + 1:
419 if (memory->rtcAccess) {
420 return memory->rtcRegs[memory->activeRtcReg];
421 } else if (memory->sramAccess) {
422 if (segment < 0 && memory->sram) {
423 return memory->sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
424 } else if ((size_t) segment * GB_SIZE_EXTERNAL_RAM < gb->sramSize) {
425 return memory->sram[(address & (GB_SIZE_EXTERNAL_RAM - 1)) + segment *GB_SIZE_EXTERNAL_RAM];
426 } else {
427 return 0xFF;
428 }
429 } else if (memory->mbcRead) {
430 return memory->mbcRead(memory, address);
431 } else if (memory->mbcType == GB_HuC3) {
432 return 0x01; // TODO: Is this supposed to be the current SRAM bank?
433 }
434 return 0xFF;
435 case GB_REGION_WORKING_RAM_BANK0:
436 case GB_REGION_WORKING_RAM_BANK0 + 2:
437 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
438 case GB_REGION_WORKING_RAM_BANK1:
439 if (segment < 0) {
440 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
441 } else if (segment < 8) {
442 return memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment *GB_SIZE_WORKING_RAM_BANK0];
443 } else {
444 return 0xFF;
445 }
446 default:
447 if (address < GB_BASE_OAM) {
448 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
449 }
450 if (address < GB_BASE_UNUSABLE) {
451 if (gb->video.mode < 2) {
452 return gb->video.oam.raw[address & 0xFF];
453 }
454 return 0xFF;
455 }
456 if (address < GB_BASE_IO) {
457 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
458 return 0xFF;
459 }
460 if (address < GB_BASE_HRAM) {
461 return GBIORead(gb, address & (GB_SIZE_IO - 1));
462 }
463 if (address < GB_BASE_IE) {
464 return memory->hram[address & GB_SIZE_HRAM];
465 }
466 return GBIORead(gb, REG_IE);
467 }
468}
469
470void GBMemoryDMA(struct GB* gb, uint16_t base) {
471 if (base > 0xF100) {
472 return;
473 }
474 mTimingDeschedule(&gb->timing, &gb->memory.dmaEvent);
475 mTimingSchedule(&gb->timing, &gb->memory.dmaEvent, 8);
476 if (gb->cpu->cycles + 8 < gb->cpu->nextEvent) {
477 gb->cpu->nextEvent = gb->cpu->cycles + 8;
478 }
479 gb->memory.dmaSource = base;
480 gb->memory.dmaDest = 0;
481 gb->memory.dmaRemaining = 0xA0;
482}
483
484uint8_t GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
485 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
486 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
487 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
488 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
489 gb->memory.hdmaSource &= 0xFFF0;
490 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
491 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
492 return value | 0x80;
493 }
494 gb->memory.hdmaDest &= 0x1FF0;
495 gb->memory.hdmaDest |= 0x8000;
496 bool wasHdma = gb->memory.isHdma;
497 gb->memory.isHdma = value & 0x80;
498 if ((!wasHdma && !gb->memory.isHdma) || (GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC]) && gb->video.mode == 0)) {
499 if (gb->memory.isHdma) {
500 gb->memory.hdmaRemaining = 0x10;
501 } else {
502 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
503 }
504 gb->cpuBlocked = true;
505 mTimingSchedule(&gb->timing, &gb->memory.hdmaEvent, 0);
506 } else if (gb->memory.isHdma && !GBRegisterLCDCIsEnable(gb->memory.io[REG_LCDC])) {
507 return 0x80 | ((value + 1) & 0x7F);
508 }
509 return value & 0x7F;
510}
511
512void _GBMemoryDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
513 struct GB* gb = context;
514 int dmaRemaining = gb->memory.dmaRemaining;
515 gb->memory.dmaRemaining = 0;
516 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
517 // TODO: Can DMA write OAM during modes 2-3?
518 gb->video.oam.raw[gb->memory.dmaDest] = b;
519 gb->video.renderer->writeOAM(gb->video.renderer, gb->memory.dmaDest);
520 ++gb->memory.dmaSource;
521 ++gb->memory.dmaDest;
522 gb->memory.dmaRemaining = dmaRemaining - 1;
523 if (gb->memory.dmaRemaining) {
524 mTimingSchedule(timing, &gb->memory.dmaEvent, 4 - cyclesLate);
525 }
526}
527
528void _GBMemoryHDMAService(struct mTiming* timing, void* context, uint32_t cyclesLate) {
529 struct GB* gb = context;
530 gb->cpuBlocked = true;
531 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
532 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
533 ++gb->memory.hdmaSource;
534 ++gb->memory.hdmaDest;
535 --gb->memory.hdmaRemaining;
536 if (gb->memory.hdmaRemaining) {
537 mTimingDeschedule(timing, &gb->memory.hdmaEvent);
538 mTimingSchedule(timing, &gb->memory.hdmaEvent, 2 - cyclesLate);
539 } else {
540 gb->cpuBlocked = false;
541 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
542 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
543 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
544 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
545 if (gb->memory.isHdma) {
546 --gb->memory.io[REG_HDMA5];
547 if (gb->memory.io[REG_HDMA5] == 0xFF) {
548 gb->memory.isHdma = false;
549 }
550 } else {
551 gb->memory.io[REG_HDMA5] = 0xFF;
552 }
553 }
554}
555
556void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old, int segment) {
557 struct GB* gb = (struct GB*) cpu->master;
558 struct GBMemory* memory = &gb->memory;
559 int8_t oldValue = -1;
560
561 switch (address >> 12) {
562 case GB_REGION_CART_BANK0:
563 case GB_REGION_CART_BANK0 + 1:
564 case GB_REGION_CART_BANK0 + 2:
565 case GB_REGION_CART_BANK0 + 3:
566 _pristineCow(gb);
567 oldValue = memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
568 memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)] = value;
569 break;
570 case GB_REGION_CART_BANK1:
571 case GB_REGION_CART_BANK1 + 1:
572 case GB_REGION_CART_BANK1 + 2:
573 case GB_REGION_CART_BANK1 + 3:
574 _pristineCow(gb);
575 if (segment < 0) {
576 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
577 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
578 } else if ((size_t) segment * GB_SIZE_CART_BANK0 < memory->romSize) {
579 oldValue = memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0];
580 memory->rom[(address & (GB_SIZE_CART_BANK0 - 1)) + segment * GB_SIZE_CART_BANK0] = value;
581 } else {
582 return;
583 }
584 break;
585 case GB_REGION_VRAM:
586 case GB_REGION_VRAM + 1:
587 if (segment < 0) {
588 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
589 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
590 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + GB_SIZE_VRAM_BANK0 * gb->video.vramCurrentBank);
591 } else if (segment < 2) {
592 oldValue = gb->video.vram[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0];
593 gb->video.vramBank[(address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0] = value;
594 gb->video.renderer->writeVRAM(gb->video.renderer, (address & (GB_SIZE_VRAM_BANK0 - 1)) + segment * GB_SIZE_VRAM_BANK0);
595 } else {
596 return;
597 }
598 break;
599 case GB_REGION_EXTERNAL_RAM:
600 case GB_REGION_EXTERNAL_RAM + 1:
601 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
602 return;
603 case GB_REGION_WORKING_RAM_BANK0:
604 case GB_REGION_WORKING_RAM_BANK0 + 2:
605 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
606 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
607 break;
608 case GB_REGION_WORKING_RAM_BANK1:
609 if (segment < 0) {
610 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
611 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
612 } else if (segment < 8) {
613 oldValue = memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0];
614 memory->wram[(address & (GB_SIZE_WORKING_RAM_BANK0 - 1)) + segment * GB_SIZE_WORKING_RAM_BANK0] = value;
615 } else {
616 return;
617 }
618 break;
619 default:
620 if (address < GB_BASE_OAM) {
621 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
622 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
623 } else if (address < GB_BASE_UNUSABLE) {
624 oldValue = gb->video.oam.raw[address & 0xFF];
625 gb->video.oam.raw[address & 0xFF] = value;
626 gb->video.renderer->writeOAM(gb->video.renderer, address & 0xFF);
627 } else if (address < GB_BASE_HRAM) {
628 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
629 return;
630 } else if (address < GB_BASE_IE) {
631 oldValue = memory->hram[address & GB_SIZE_HRAM];
632 memory->hram[address & GB_SIZE_HRAM] = value;
633 } else {
634 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
635 return;
636 }
637 }
638 if (old) {
639 *old = oldValue;
640 }
641}
642
643void GBMemorySerialize(const struct GB* gb, struct GBSerializedState* state) {
644 const struct GBMemory* memory = &gb->memory;
645 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
646 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
647 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
648 state->memory.wramCurrentBank = memory->wramCurrentBank;
649 state->memory.sramCurrentBank = memory->sramCurrentBank;
650
651 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
652 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
653
654 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
655 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
656
657 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
658 state->memory.dmaRemaining = memory->dmaRemaining;
659 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
660
661 STORE_32LE(memory->dmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.dmaNext);
662 STORE_32LE(memory->hdmaEvent.when - mTimingCurrentTime(&gb->timing), 0, &state->memory.hdmaNext);
663
664 GBSerializedMemoryFlags flags = 0;
665 flags = GBSerializedMemoryFlagsSetSramAccess(flags, memory->sramAccess);
666 flags = GBSerializedMemoryFlagsSetRtcAccess(flags, memory->rtcAccess);
667 flags = GBSerializedMemoryFlagsSetRtcLatched(flags, memory->rtcLatched);
668 flags = GBSerializedMemoryFlagsSetIme(flags, memory->ime);
669 flags = GBSerializedMemoryFlagsSetIsHdma(flags, memory->isHdma);
670 flags = GBSerializedMemoryFlagsSetActiveRtcReg(flags, memory->activeRtcReg);
671 STORE_16LE(flags, 0, &state->memory.flags);
672
673 switch (memory->mbcType) {
674 case GB_MBC1:
675 state->memory.mbc1.mode = memory->mbcState.mbc1.mode;
676 state->memory.mbc1.multicartStride = memory->mbcState.mbc1.multicartStride;
677 break;
678 case GB_MBC3_RTC:
679 STORE_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
680 break;
681 case GB_MBC7:
682 state->memory.mbc7.state = memory->mbcState.mbc7.state;
683 state->memory.mbc7.eeprom = memory->mbcState.mbc7.eeprom;
684 state->memory.mbc7.address = memory->mbcState.mbc7.address;
685 state->memory.mbc7.access = memory->mbcState.mbc7.access;
686 state->memory.mbc7.latch = memory->mbcState.mbc7.latch;
687 state->memory.mbc7.srBits = memory->mbcState.mbc7.srBits;
688 STORE_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
689 STORE_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
690 break;
691 default:
692 break;
693 }
694}
695
696void GBMemoryDeserialize(struct GB* gb, const struct GBSerializedState* state) {
697 struct GBMemory* memory = &gb->memory;
698 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
699 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
700 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
701 memory->wramCurrentBank = state->memory.wramCurrentBank;
702 memory->sramCurrentBank = state->memory.sramCurrentBank;
703
704 GBMBCSwitchBank(gb, memory->currentBank);
705 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
706 GBMBCSwitchSramBank(gb, memory->sramCurrentBank);
707
708 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
709 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
710
711 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
712 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
713
714 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
715 memory->dmaRemaining = state->memory.dmaRemaining;
716 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
717
718 uint32_t when;
719 LOAD_32LE(when, 0, &state->memory.dmaNext);
720 if (memory->dmaRemaining) {
721 mTimingSchedule(&gb->timing, &memory->dmaEvent, when);
722 }
723 LOAD_32LE(when, 0, &state->memory.hdmaNext);
724 if (memory->hdmaRemaining) {
725 mTimingSchedule(&gb->timing, &memory->hdmaEvent, when);
726 }
727
728 GBSerializedMemoryFlags flags;
729 LOAD_16LE(flags, 0, &state->memory.flags);
730 memory->sramAccess = GBSerializedMemoryFlagsGetSramAccess(flags);
731 memory->rtcAccess = GBSerializedMemoryFlagsGetRtcAccess(flags);
732 memory->rtcLatched = GBSerializedMemoryFlagsGetRtcLatched(flags);
733 memory->ime = GBSerializedMemoryFlagsGetIme(flags);
734 memory->isHdma = GBSerializedMemoryFlagsGetIsHdma(flags);
735 memory->activeRtcReg = GBSerializedMemoryFlagsGetActiveRtcReg(flags);
736
737 switch (memory->mbcType) {
738 case GB_MBC1:
739 memory->mbcState.mbc1.mode = state->memory.mbc1.mode;
740 memory->mbcState.mbc1.multicartStride = state->memory.mbc1.multicartStride;
741 if (memory->mbcState.mbc1.mode) {
742 GBMBCSwitchBank0(gb, memory->currentBank >> memory->mbcState.mbc1.multicartStride);
743 }
744 break;
745 case GB_MBC3_RTC:
746 LOAD_64LE(gb->memory.rtcLastLatch, 0, &state->memory.rtc.lastLatch);
747 break;
748 case GB_MBC7:
749 memory->mbcState.mbc7.state = state->memory.mbc7.state;
750 memory->mbcState.mbc7.eeprom = state->memory.mbc7.eeprom;
751 memory->mbcState.mbc7.address = state->memory.mbc7.address & 0x7F;
752 memory->mbcState.mbc7.access = state->memory.mbc7.access;
753 memory->mbcState.mbc7.latch = state->memory.mbc7.latch;
754 memory->mbcState.mbc7.srBits = state->memory.mbc7.srBits;
755 LOAD_16LE(memory->mbcState.mbc7.sr, 0, &state->memory.mbc7.sr);
756 LOAD_32LE(memory->mbcState.mbc7.writable, 0, &state->memory.mbc7.writable);
757 break;
758 default:
759 break;
760 }
761}
762
763void _pristineCow(struct GB* gb) {
764 if (!gb->isPristine) {
765 return;
766 }
767 void* newRom = anonymousMemoryMap(GB_SIZE_CART_MAX);
768 memcpy(newRom, gb->memory.rom, gb->memory.romSize);
769 memset(((uint8_t*) newRom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
770 if (gb->memory.rom == gb->memory.romBase) {
771 gb->memory.romBase = newRom;
772 }
773 gb->memory.rom = newRom;
774 GBMBCSwitchBank(gb, gb->memory.currentBank);
775 gb->isPristine = false;
776}