src/gb/memory.c (view raw)
1/* Copyright (c) 2013-2016 Jeffrey Pfau
2 *
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6#include "memory.h"
7
8#include "core/interface.h"
9#include "gb/gb.h"
10#include "gb/io.h"
11#include "gb/serialize.h"
12
13#include "util/memory.h"
14
15#include <time.h>
16
17mLOG_DEFINE_CATEGORY(GB_MBC, "GB MBC");
18mLOG_DEFINE_CATEGORY(GB_MEM, "GB Memory");
19
20static void _pristineCow(struct GB* gba);
21
22static void _GBMBCNone(struct GBMemory* memory, uint16_t address, uint8_t value) {
23 UNUSED(memory);
24 UNUSED(address);
25 UNUSED(value);
26
27 mLOG(GB_MBC, GAME_ERROR, "Wrote to invalid MBC");
28}
29
30static void _GBMBC1(struct GBMemory*, uint16_t address, uint8_t value);
31static void _GBMBC2(struct GBMemory*, uint16_t address, uint8_t value);
32static void _GBMBC3(struct GBMemory*, uint16_t address, uint8_t value);
33static void _GBMBC5(struct GBMemory*, uint16_t address, uint8_t value);
34static void _GBMBC6(struct GBMemory*, uint16_t address, uint8_t value);
35static void _GBMBC7(struct GBMemory*, uint16_t address, uint8_t value);
36static uint8_t _GBMBC7Read(struct GBMemory*, uint16_t address);
37static void _GBMBC7Write(struct GBMemory*, uint16_t address, uint8_t value);
38
39static uint8_t GBFastLoad8(struct LR35902Core* cpu, uint16_t address) {
40 if (UNLIKELY(address > cpu->memory.activeRegionEnd)) {
41 cpu->memory.setActiveRegion(cpu, address);
42 return cpu->memory.cpuLoad8(cpu, address);
43 }
44 return cpu->memory.activeRegion[address & cpu->memory.activeMask];
45}
46
47static void GBSetActiveRegion(struct LR35902Core* cpu, uint16_t address) {
48 struct GB* gb = (struct GB*) cpu->master;
49 struct GBMemory* memory = &gb->memory;
50 switch (address >> 12) {
51 case GB_REGION_CART_BANK0:
52 case GB_REGION_CART_BANK0 + 1:
53 case GB_REGION_CART_BANK0 + 2:
54 case GB_REGION_CART_BANK0 + 3:
55 cpu->memory.cpuLoad8 = GBFastLoad8;
56 cpu->memory.activeRegion = memory->romBase;
57 cpu->memory.activeRegionEnd = GB_BASE_CART_BANK1;
58 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
59 break;
60 case GB_REGION_CART_BANK1:
61 case GB_REGION_CART_BANK1 + 1:
62 case GB_REGION_CART_BANK1 + 2:
63 case GB_REGION_CART_BANK1 + 3:
64 cpu->memory.cpuLoad8 = GBFastLoad8;
65 cpu->memory.activeRegion = memory->romBank;
66 cpu->memory.activeRegionEnd = GB_BASE_VRAM;
67 cpu->memory.activeMask = GB_SIZE_CART_BANK0 - 1;
68 break;
69 default:
70 cpu->memory.cpuLoad8 = GBLoad8;
71 break;
72 }
73}
74
75static void _GBMemoryDMAService(struct GB* gb);
76static void _GBMemoryHDMAService(struct GB* gb);
77
78void GBMemoryInit(struct GB* gb) {
79 struct LR35902Core* cpu = gb->cpu;
80 cpu->memory.cpuLoad8 = GBLoad8;
81 cpu->memory.load8 = GBLoad8;
82 cpu->memory.store8 = GBStore8;
83 cpu->memory.setActiveRegion = GBSetActiveRegion;
84
85 gb->memory.wram = 0;
86 gb->memory.wramBank = 0;
87 gb->memory.rom = 0;
88 gb->memory.romBank = 0;
89 gb->memory.romSize = 0;
90 gb->memory.sram = 0;
91 gb->memory.mbcType = GB_MBC_NONE;
92 gb->memory.mbc = 0;
93
94 gb->memory.rtc = NULL;
95
96 GBIOInit(gb);
97}
98
99void GBMemoryDeinit(struct GB* gb) {
100 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
101 if (gb->memory.rom) {
102 mappedMemoryFree(gb->memory.rom, gb->memory.romSize);
103 }
104}
105
106void GBMemoryReset(struct GB* gb) {
107 if (gb->memory.wram) {
108 mappedMemoryFree(gb->memory.wram, GB_SIZE_WORKING_RAM);
109 }
110 gb->memory.wram = anonymousMemoryMap(GB_SIZE_WORKING_RAM);
111 GBMemorySwitchWramBank(&gb->memory, 1);
112 gb->memory.romBank = &gb->memory.rom[GB_SIZE_CART_BANK0];
113 gb->memory.currentBank = 1;
114 if (!gb->memory.sram) {
115 gb->memory.sram = anonymousMemoryMap(0x20000);
116 }
117 gb->memory.sramCurrentBank = 0;
118 gb->memory.sramBank = gb->memory.sram;
119
120 gb->memory.ime = false;
121 gb->memory.ie = 0;
122
123 gb->memory.dmaNext = INT_MAX;
124 gb->memory.dmaRemaining = 0;
125 gb->memory.dmaSource = 0;
126 gb->memory.dmaDest = 0;
127 gb->memory.hdmaNext = INT_MAX;
128 gb->memory.hdmaRemaining = 0;
129 gb->memory.hdmaSource = 0;
130 gb->memory.hdmaDest = 0;
131 gb->memory.isHdma = false;
132
133 gb->memory.sramAccess = false;
134 gb->memory.rtcAccess = false;
135 gb->memory.activeRtcReg = 0;
136 gb->memory.rtcLatched = false;
137 memset(&gb->memory.rtcRegs, 0, sizeof(gb->memory.rtcRegs));
138
139 memset(&gb->memory.hram, 0, sizeof(gb->memory.hram));
140 memset(&gb->memory.mbcState, 0, sizeof(gb->memory.mbcState));
141
142 const struct GBCartridge* cart = (const struct GBCartridge*) &gb->memory.rom[0x100];
143 switch (cart->type) {
144 case 0:
145 case 8:
146 case 9:
147 gb->memory.mbc = _GBMBCNone;
148 gb->memory.mbcType = GB_MBC_NONE;
149 break;
150 case 1:
151 case 2:
152 case 3:
153 gb->memory.mbc = _GBMBC1;
154 gb->memory.mbcType = GB_MBC1;
155 break;
156 case 5:
157 case 6:
158 gb->memory.mbc = _GBMBC2;
159 gb->memory.mbcType = GB_MBC2;
160 break;
161 case 0x0F:
162 case 0x10:
163 case 0x11:
164 case 0x12:
165 case 0x13:
166 gb->memory.mbc = _GBMBC3;
167 gb->memory.mbcType = GB_MBC3;
168 break;
169 default:
170 mLOG(GB_MBC, WARN, "Unknown MBC type: %02X", cart->type);
171 case 0x19:
172 case 0x1A:
173 case 0x1B:
174 gb->memory.mbc = _GBMBC5;
175 gb->memory.mbcType = GB_MBC5;
176 break;
177 case 0x1C:
178 case 0x1D:
179 case 0x1E:
180 gb->memory.mbc = _GBMBC5;
181 gb->memory.mbcType = GB_MBC5_RUMBLE;
182 break;
183 case 0x20:
184 gb->memory.mbc = _GBMBC6;
185 gb->memory.mbcType = GB_MBC6;
186 break;
187 case 0x22:
188 gb->memory.mbc = _GBMBC7;
189 gb->memory.mbcType = GB_MBC7;
190 break;
191 }
192
193 if (!gb->memory.wram) {
194 GBMemoryDeinit(gb);
195 }
196}
197
198void GBMemorySwitchWramBank(struct GBMemory* memory, int bank) {
199 bank &= 7;
200 if (!bank) {
201 bank = 1;
202 }
203 memory->wramBank = &memory->wram[GB_SIZE_WORKING_RAM_BANK0 * bank];
204 memory->wramCurrentBank = bank;
205}
206
207uint8_t GBLoad8(struct LR35902Core* cpu, uint16_t address) {
208 struct GB* gb = (struct GB*) cpu->master;
209 struct GBMemory* memory = &gb->memory;
210 switch (address >> 12) {
211 case GB_REGION_CART_BANK0:
212 case GB_REGION_CART_BANK0 + 1:
213 case GB_REGION_CART_BANK0 + 2:
214 case GB_REGION_CART_BANK0 + 3:
215 return memory->romBase[address & (GB_SIZE_CART_BANK0 - 1)];
216 case GB_REGION_CART_BANK1:
217 case GB_REGION_CART_BANK1 + 1:
218 case GB_REGION_CART_BANK1 + 2:
219 case GB_REGION_CART_BANK1 + 3:
220 return memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
221 case GB_REGION_VRAM:
222 case GB_REGION_VRAM + 1:
223 return gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
224 case GB_REGION_EXTERNAL_RAM:
225 case GB_REGION_EXTERNAL_RAM + 1:
226 if (memory->rtcAccess) {
227 return gb->memory.rtcRegs[memory->activeRtcReg];
228 } else if (memory->sramAccess) {
229 return gb->memory.sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)];
230 } else if (memory->mbcType == GB_MBC7) {
231 return _GBMBC7Read(memory, address);
232 }
233 return 0xFF;
234 case GB_REGION_WORKING_RAM_BANK0:
235 case GB_REGION_WORKING_RAM_BANK0 + 2:
236 return memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
237 case GB_REGION_WORKING_RAM_BANK1:
238 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
239 default:
240 if (address < GB_BASE_OAM) {
241 return memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
242 }
243 if (address < GB_BASE_UNUSABLE) {
244 if (gb->video.mode < 2) {
245 return gb->video.oam.raw[address & 0xFF];
246 }
247 return 0xFF;
248 }
249 if (address < GB_BASE_IO) {
250 mLOG(GB_MEM, GAME_ERROR, "Attempt to read from unusable memory: %04X", address);
251 return 0xFF;
252 }
253 if (address < GB_BASE_HRAM) {
254 return GBIORead(gb, address & (GB_SIZE_IO - 1));
255 }
256 if (address < GB_BASE_IE) {
257 return memory->hram[address & GB_SIZE_HRAM];
258 }
259 return GBIORead(gb, REG_IE);
260 }
261}
262
263void GBStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
264 struct GB* gb = (struct GB*) cpu->master;
265 struct GBMemory* memory = &gb->memory;
266 switch (address >> 12) {
267 case GB_REGION_CART_BANK0:
268 case GB_REGION_CART_BANK0 + 1:
269 case GB_REGION_CART_BANK0 + 2:
270 case GB_REGION_CART_BANK0 + 3:
271 case GB_REGION_CART_BANK1:
272 case GB_REGION_CART_BANK1 + 1:
273 case GB_REGION_CART_BANK1 + 2:
274 case GB_REGION_CART_BANK1 + 3:
275 memory->mbc(memory, address, value);
276 cpu->memory.setActiveRegion(cpu, cpu->pc);
277 return;
278 case GB_REGION_VRAM:
279 case GB_REGION_VRAM + 1:
280 // TODO: Block access in wrong modes
281 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
282 return;
283 case GB_REGION_EXTERNAL_RAM:
284 case GB_REGION_EXTERNAL_RAM + 1:
285 if (memory->rtcAccess) {
286 gb->memory.rtcRegs[memory->activeRtcReg] = value;
287 } else if (memory->sramAccess) {
288 gb->memory.sramBank[address & (GB_SIZE_EXTERNAL_RAM - 1)] = value;
289 } else if (gb->memory.mbcType == GB_MBC7) {
290 _GBMBC7Write(&gb->memory, address, value);
291 }
292 return;
293 case GB_REGION_WORKING_RAM_BANK0:
294 case GB_REGION_WORKING_RAM_BANK0 + 2:
295 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
296 return;
297 case GB_REGION_WORKING_RAM_BANK1:
298 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
299 return;
300 default:
301 if (address < GB_BASE_OAM) {
302 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
303 } else if (address < GB_BASE_UNUSABLE) {
304 if (gb->video.mode < 2) {
305 gb->video.oam.raw[address & 0xFF] = value;
306 }
307 } else if (address < GB_BASE_IO) {
308 mLOG(GB_MEM, GAME_ERROR, "Attempt to write to unusable memory: %04X:%02X", address, value);
309 } else if (address < GB_BASE_HRAM) {
310 GBIOWrite(gb, address & (GB_SIZE_IO - 1), value);
311 } else if (address < GB_BASE_IE) {
312 memory->hram[address & GB_SIZE_HRAM] = value;
313 } else {
314 GBIOWrite(gb, REG_IE, value);
315 }
316 }
317}
318
319int32_t GBMemoryProcessEvents(struct GB* gb, int32_t cycles) {
320 int nextEvent = INT_MAX;
321 if (gb->memory.dmaRemaining) {
322 gb->memory.dmaNext -= cycles;
323 if (gb->memory.dmaNext <= 0) {
324 _GBMemoryDMAService(gb);
325 }
326 nextEvent = gb->memory.dmaNext;
327 }
328 if (gb->memory.hdmaRemaining) {
329 gb->memory.hdmaNext -= cycles;
330 if (gb->memory.hdmaNext <= 0) {
331 _GBMemoryHDMAService(gb);
332 }
333 if (gb->memory.hdmaNext < nextEvent) {
334 nextEvent = gb->memory.hdmaNext;
335 }
336 }
337 return nextEvent;
338}
339
340void GBMemoryDMA(struct GB* gb, uint16_t base) {
341 if (base > 0xF100) {
342 return;
343 }
344 gb->cpu->memory.store8 = GBDMAStore8;
345 gb->cpu->memory.load8 = GBDMALoad8;
346 gb->cpu->memory.cpuLoad8 = GBDMALoad8;
347 gb->memory.dmaNext = gb->cpu->cycles + 8;
348 if (gb->memory.dmaNext < gb->cpu->nextEvent) {
349 gb->cpu->nextEvent = gb->memory.dmaNext;
350 }
351 gb->memory.dmaSource = base;
352 gb->memory.dmaDest = 0;
353 gb->memory.dmaRemaining = 0xA0;
354}
355
356void GBMemoryWriteHDMA5(struct GB* gb, uint8_t value) {
357 gb->memory.hdmaSource = gb->memory.io[REG_HDMA1] << 8;
358 gb->memory.hdmaSource |= gb->memory.io[REG_HDMA2];
359 gb->memory.hdmaDest = gb->memory.io[REG_HDMA3] << 8;
360 gb->memory.hdmaDest |= gb->memory.io[REG_HDMA4];
361 gb->memory.hdmaSource &= 0xFFF0;
362 if (gb->memory.hdmaSource >= 0x8000 && gb->memory.hdmaSource < 0xA000) {
363 mLOG(GB_MEM, GAME_ERROR, "Invalid HDMA source: %04X", gb->memory.hdmaSource);
364 return;
365 }
366 gb->memory.hdmaDest &= 0x1FF0;
367 gb->memory.hdmaDest |= 0x8000;
368 bool wasHdma = gb->memory.isHdma;
369 gb->memory.isHdma = value & 0x80;
370 if (!wasHdma && !gb->memory.isHdma) {
371 gb->memory.hdmaRemaining = ((value & 0x7F) + 1) * 0x10;
372 gb->memory.hdmaNext = gb->cpu->cycles;
373 gb->cpu->nextEvent = gb->cpu->cycles;
374 }
375}
376
377void _GBMemoryDMAService(struct GB* gb) {
378 uint8_t b = GBLoad8(gb->cpu, gb->memory.dmaSource);
379 // TODO: Can DMA write OAM during modes 2-3?
380 gb->video.oam.raw[gb->memory.dmaDest] = b;
381 ++gb->memory.dmaSource;
382 ++gb->memory.dmaDest;
383 --gb->memory.dmaRemaining;
384 if (gb->memory.dmaRemaining) {
385 gb->memory.dmaNext += 4;
386 } else {
387 gb->memory.dmaNext = INT_MAX;
388 gb->cpu->memory.store8 = GBStore8;
389 gb->cpu->memory.load8 = GBLoad8;
390 }
391}
392
393void _GBMemoryHDMAService(struct GB* gb) {
394 uint8_t b = gb->cpu->memory.load8(gb->cpu, gb->memory.hdmaSource);
395 gb->cpu->memory.store8(gb->cpu, gb->memory.hdmaDest, b);
396 ++gb->memory.hdmaSource;
397 ++gb->memory.hdmaDest;
398 --gb->memory.hdmaRemaining;
399 gb->cpu->cycles += 2;
400 if (gb->memory.hdmaRemaining) {
401 gb->memory.hdmaNext += 2;
402 } else {
403 gb->memory.io[REG_HDMA1] = gb->memory.hdmaSource >> 8;
404 gb->memory.io[REG_HDMA2] = gb->memory.hdmaSource;
405 gb->memory.io[REG_HDMA3] = gb->memory.hdmaDest >> 8;
406 gb->memory.io[REG_HDMA4] = gb->memory.hdmaDest;
407 if (gb->memory.isHdma) {
408 --gb->memory.io[REG_HDMA5];
409 if (gb->memory.io[REG_HDMA5] == 0xFF) {
410 gb->memory.isHdma = false;
411 }
412 } else {
413 gb->memory.io[REG_HDMA5] |= 0x80;
414 }
415 }
416}
417
418struct OAMBlock {
419 uint16_t low;
420 uint16_t high;
421};
422
423static const struct OAMBlock _oamBlockDMG[] = {
424 { 0xA000, 0xFE00 },
425 { 0xA000, 0xFE00 },
426 { 0xA000, 0xFE00 },
427 { 0xA000, 0xFE00 },
428 { 0x8000, 0xA000 },
429 { 0xA000, 0xFE00 },
430 { 0xA000, 0xFE00 },
431 { 0xA000, 0xFE00 },
432};
433
434static const struct OAMBlock _oamBlockCGB[] = {
435 { 0xA000, 0xC000 },
436 { 0xA000, 0xC000 },
437 { 0xA000, 0xC000 },
438 { 0xA000, 0xC000 },
439 { 0x8000, 0xA000 },
440 { 0xA000, 0xC000 },
441 { 0xC000, 0xFE00 },
442 { 0xA000, 0xC000 },
443};
444
445uint8_t GBDMALoad8(struct LR35902Core* cpu, uint16_t address) {
446 struct GB* gb = (struct GB*) cpu->master;
447 struct GBMemory* memory = &gb->memory;
448 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
449 block = &block[memory->dmaSource >> 13];
450 if (address >= block->low && address < block->high) {
451 return 0xFF;
452 }
453 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
454 return 0xFF;
455 }
456 return GBLoad8(cpu, address);
457}
458
459void GBDMAStore8(struct LR35902Core* cpu, uint16_t address, int8_t value) {
460 struct GB* gb = (struct GB*) cpu->master;
461 struct GBMemory* memory = &gb->memory;
462 const struct OAMBlock* block = gb->model < GB_MODEL_CGB ? _oamBlockDMG : _oamBlockCGB;
463 block = &block[memory->dmaSource >> 13];
464 if (address >= block->low && address < block->high) {
465 return;
466 }
467 if (address >= GB_BASE_OAM && address < GB_BASE_UNUSABLE) {
468 return;
469 }
470 GBStore8(cpu, address, value);
471}
472
473void GBPatch8(struct LR35902Core* cpu, uint16_t address, int8_t value, int8_t* old) {
474 struct GB* gb = (struct GB*) cpu->master;
475 struct GBMemory* memory = &gb->memory;
476 int8_t oldValue = -1;
477
478 switch (address >> 12) {
479 case GB_REGION_CART_BANK0:
480 case GB_REGION_CART_BANK0 + 1:
481 case GB_REGION_CART_BANK0 + 2:
482 case GB_REGION_CART_BANK0 + 3:
483 _pristineCow(gb);
484 oldValue = memory->rom[address & (GB_SIZE_CART_BANK0 - 1)];
485 memory->rom[address & (GB_SIZE_CART_BANK0 - 1)] = value;
486 break;
487 case GB_REGION_CART_BANK1:
488 case GB_REGION_CART_BANK1 + 1:
489 case GB_REGION_CART_BANK1 + 2:
490 case GB_REGION_CART_BANK1 + 3:
491 _pristineCow(gb);
492 oldValue = memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)];
493 memory->romBank[address & (GB_SIZE_CART_BANK0 - 1)] = value;
494 break;
495 case GB_REGION_VRAM:
496 case GB_REGION_VRAM + 1:
497 oldValue = gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)];
498 gb->video.vramBank[address & (GB_SIZE_VRAM_BANK0 - 1)] = value;
499 break;
500 case GB_REGION_EXTERNAL_RAM:
501 case GB_REGION_EXTERNAL_RAM + 1:
502 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
503 return;
504 case GB_REGION_WORKING_RAM_BANK0:
505 case GB_REGION_WORKING_RAM_BANK0 + 2:
506 oldValue = memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
507 memory->wram[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
508 break;
509 case GB_REGION_WORKING_RAM_BANK1:
510 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
511 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
512 break;
513 default:
514 if (address < GB_BASE_OAM) {
515 oldValue = memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)];
516 memory->wramBank[address & (GB_SIZE_WORKING_RAM_BANK0 - 1)] = value;
517 } else if (address < GB_BASE_UNUSABLE) {
518 oldValue = gb->video.oam.raw[address & 0xFF];
519 gb->video.oam.raw[address & 0xFF] = value;
520 } else if (address < GB_BASE_HRAM) {
521 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
522 return;
523 } else if (address < GB_BASE_IE) {
524 oldValue = memory->hram[address & GB_SIZE_HRAM];
525 memory->hram[address & GB_SIZE_HRAM] = value;
526 } else {
527 mLOG(GB_MEM, STUB, "Unimplemented memory Patch8: 0x%08X", address);
528 return;
529 }
530 }
531 if (old) {
532 *old = oldValue;
533 }
534}
535
536static void _switchBank(struct GBMemory* memory, int bank) {
537 size_t bankStart = bank * GB_SIZE_CART_BANK0;
538 if (bankStart + GB_SIZE_CART_BANK0 > memory->romSize) {
539 mLOG(GB_MBC, GAME_ERROR, "Attempting to switch to an invalid ROM bank: %0X", bank);
540 bankStart &= (memory->romSize - 1);
541 bank = bankStart / GB_SIZE_CART_BANK0;
542 }
543 memory->romBank = &memory->rom[bankStart];
544 memory->currentBank = bank;
545}
546
547static void _switchSramBank(struct GBMemory* memory, int bank) {
548 size_t bankStart = bank * GB_SIZE_EXTERNAL_RAM;
549 memory->sramBank = &memory->sram[bankStart];
550 memory->sramCurrentBank = bank;
551}
552
553static void _latchRtc(struct GBMemory* memory) {
554 time_t t;
555 struct mRTCSource* rtc = memory->rtc;
556 if (rtc) {
557 if (rtc->sample) {
558 rtc->sample(rtc);
559 }
560 t = rtc->unixTime(rtc);
561 } else {
562 t = time(0);
563 }
564 struct tm date;
565 localtime_r(&t, &date);
566 memory->rtcRegs[0] = date.tm_sec;
567 memory->rtcRegs[1] = date.tm_min;
568 memory->rtcRegs[2] = date.tm_hour;
569 memory->rtcRegs[3] = date.tm_yday; // TODO: Persist day counter
570 memory->rtcRegs[4] &= 0xF0;
571 memory->rtcRegs[4] |= date.tm_yday >> 8;
572}
573
574void _GBMBC1(struct GBMemory* memory, uint16_t address, uint8_t value) {
575 int bank = value & 0x1F;
576 switch (address >> 13) {
577 case 0x0:
578 switch (value) {
579 case 0:
580 memory->sramAccess = false;
581 break;
582 case 0xA:
583 memory->sramAccess = true;
584 _switchSramBank(memory, memory->sramCurrentBank);
585 break;
586 default:
587 // TODO
588 mLOG(GB_MBC, STUB, "MBC1 unknown value %02X", value);
589 break;
590 }
591 break;
592 case 0x1:
593 if (!bank) {
594 ++bank;
595 }
596 _switchBank(memory, bank | (memory->currentBank & 0x60));
597 break;
598 case 0x2:
599 bank &= 3;
600 if (!memory->mbcState.mbc1.mode) {
601 _switchBank(memory, (bank << 5) | (memory->currentBank & 0x1F));
602 } else {
603 _switchSramBank(memory, bank);
604 }
605 break;
606 case 0x3:
607 memory->mbcState.mbc1.mode = value & 1;
608 if (memory->mbcState.mbc1.mode) {
609 _switchBank(memory, memory->currentBank & 0x1F);
610 } else {
611 _switchSramBank(memory, 0);
612 }
613 break;
614 default:
615 // TODO
616 mLOG(GB_MBC, STUB, "MBC1 unknown address: %04X:%02X", address, value);
617 break;
618 }
619}
620
621void _GBMBC2(struct GBMemory* memory, uint16_t address, uint8_t value) {
622 int bank = value & 0xF;
623 switch (address >> 13) {
624 case 0x0:
625 switch (value) {
626 case 0:
627 memory->sramAccess = false;
628 break;
629 case 0xA:
630 memory->sramAccess = true;
631 _switchSramBank(memory, memory->sramCurrentBank);
632 break;
633 default:
634 // TODO
635 mLOG(GB_MBC, STUB, "MBC1 unknown value %02X", value);
636 break;
637 }
638 break;
639 case 0x1:
640 if (!bank) {
641 ++bank;
642 }
643 _switchBank(memory, bank);
644 break;
645 default:
646 // TODO
647 mLOG(GB_MBC, STUB, "MBC2 unknown address: %04X:%02X", address, value);
648 break;
649 }}
650
651void _GBMBC3(struct GBMemory* memory, uint16_t address, uint8_t value) {
652 int bank = value & 0x7F;
653 switch (address >> 13) {
654 case 0x0:
655 switch (value) {
656 case 0:
657 memory->sramAccess = false;
658 break;
659 case 0xA:
660 memory->sramAccess = true;
661 _switchSramBank(memory, memory->sramCurrentBank);
662 break;
663 default:
664 // TODO
665 mLOG(GB_MBC, STUB, "MBC3 unknown value %02X", value);
666 break;
667 }
668 break;
669 case 0x1:
670 if (!bank) {
671 ++bank;
672 }
673 _switchBank(memory, bank);
674 break;
675 case 0x2:
676 if (value < 4) {
677 _switchSramBank(memory, value);
678 memory->rtcAccess = false;
679 } else if (value >= 8 && value <= 0xC) {
680 memory->activeRtcReg = value - 8;
681 memory->rtcAccess = true;
682 }
683 break;
684 case 0x3:
685 if (memory->rtcLatched && value == 0) {
686 memory->rtcLatched = false;
687 } else if (!memory->rtcLatched && value == 1) {
688 _latchRtc(memory);
689 memory->rtcLatched = true;
690 }
691 break;
692 }
693}
694
695void _GBMBC5(struct GBMemory* memory, uint16_t address, uint8_t value) {
696 int bank;
697 switch (address >> 12) {
698 case 0x0:
699 case 0x1:
700 switch (value) {
701 case 0:
702 memory->sramAccess = false;
703 break;
704 case 0xA:
705 memory->sramAccess = true;
706 _switchSramBank(memory, memory->sramCurrentBank);
707 break;
708 default:
709 // TODO
710 mLOG(GB_MBC, STUB, "MBC5 unknown value %02X", value);
711 break;
712 }
713 break;
714 case 0x2:
715 bank = (memory->currentBank & 0x100) | value;
716 _switchBank(memory, bank);
717 break;
718 case 0x3:
719 bank = (memory->currentBank & 0xFF) | ((value & 1) << 8);
720 _switchBank(memory, bank);
721 break;
722 case 0x4:
723 case 0x5:
724 if (memory->mbcType == GB_MBC5_RUMBLE && memory->rumble) {
725 memory->rumble->setRumble(memory->rumble, (value >> 3) & 1);
726 value &= ~8;
727 }
728 _switchSramBank(memory, value & 0xF);
729 break;
730 default:
731 // TODO
732 mLOG(GB_MBC, STUB, "MBC5 unknown address: %04X:%02X", address, value);
733 break;
734 }
735}
736
737void _GBMBC6(struct GBMemory* memory, uint16_t address, uint8_t value) {
738 // TODO
739 mLOG(GB_MBC, STUB, "MBC6 unimplemented");
740}
741
742void _GBMBC7(struct GBMemory* memory, uint16_t address, uint8_t value) {
743 int bank = value & 0x7F;
744 switch (address >> 13) {
745 case 0x1:
746 _switchBank(memory, bank);
747 break;
748 case 0x2:
749 if (value < 0x10) {
750 _switchSramBank(memory, value);
751 }
752 break;
753 default:
754 // TODO
755 mLOG(GB_MBC, STUB, "MBC7 unknown address: %04X:%02X", address, value);
756 break;
757 }
758}
759
760uint8_t _GBMBC7Read(struct GBMemory* memory, uint16_t address) {
761 struct GBMBC7State* mbc7 = &memory->mbcState.mbc7;
762 switch (address & 0xF0) {
763 case 0x00:
764 case 0x10:
765 case 0x60:
766 case 0x70:
767 return 0;
768 case 0x20:
769 if (memory->rotation && memory->rotation->readTiltX) {
770 int32_t x = -memory->rotation->readTiltX(memory->rotation);
771 x >>= 21;
772 x += 2047;
773 return x;
774 }
775 return 0xFF;
776 case 0x30:
777 if (memory->rotation && memory->rotation->readTiltX) {
778 int32_t x = -memory->rotation->readTiltX(memory->rotation);
779 x >>= 21;
780 x += 2047;
781 return x >> 8;
782 }
783 return 7;
784 case 0x40:
785 if (memory->rotation && memory->rotation->readTiltY) {
786 int32_t y = -memory->rotation->readTiltY(memory->rotation);
787 y >>= 21;
788 y += 2047;
789 return y;
790 }
791 return 0xFF;
792 case 0x50:
793 if (memory->rotation && memory->rotation->readTiltY) {
794 int32_t y = -memory->rotation->readTiltY(memory->rotation);
795 y >>= 21;
796 y += 2047;
797 return y >> 8;
798 }
799 return 7;
800 case 0x80:
801 return (mbc7->sr >> 16) & 1;
802 default:
803 return 0xFF;
804 }
805}
806
807void _GBMBC7Write(struct GBMemory* memory, uint16_t address, uint8_t value) {
808 if ((address & 0xF0) != 0x80) {
809 return;
810 }
811 struct GBMBC7State* mbc7 = &memory->mbcState.mbc7;
812 GBMBC7Field old = memory->mbcState.mbc7.field;
813 mbc7->field = GBMBC7FieldClearIO(value);
814 if (!GBMBC7FieldIsCS(old) && GBMBC7FieldIsCS(value)) {
815 if (mbc7->state == GBMBC7_STATE_WRITE) {
816 if (mbc7->writable) {
817 memory->sramBank[mbc7->address * 2] = mbc7->sr >> 8;
818 memory->sramBank[mbc7->address * 2 + 1] = mbc7->sr;
819 }
820 mbc7->sr = 0x1FFFF;
821 mbc7->state = GBMBC7_STATE_NULL;
822 } else {
823 mbc7->state = GBMBC7_STATE_IDLE;
824 }
825 }
826 if (!GBMBC7FieldIsSK(old) && GBMBC7FieldIsSK(value)) {
827 if (mbc7->state > GBMBC7_STATE_IDLE && mbc7->state != GBMBC7_STATE_READ) {
828 mbc7->sr <<= 1;
829 mbc7->sr |= GBMBC7FieldGetIO(value);
830 ++mbc7->srBits;
831 }
832 switch (mbc7->state) {
833 case GBMBC7_STATE_IDLE:
834 if (GBMBC7FieldIsIO(value)) {
835 mbc7->state = GBMBC7_STATE_READ_COMMAND;
836 mbc7->srBits = 0;
837 mbc7->sr = 0;
838 }
839 break;
840 case GBMBC7_STATE_READ_COMMAND:
841 if (mbc7->srBits == 2) {
842 mbc7->state = GBMBC7_STATE_READ_ADDRESS;
843 mbc7->srBits = 0;
844 mbc7->command = mbc7->sr;
845 }
846 break;
847 case GBMBC7_STATE_READ_ADDRESS:
848 if (mbc7->srBits == 8) {
849 mbc7->state = GBMBC7_STATE_COMMAND_0 + mbc7->command;
850 mbc7->srBits = 0;
851 mbc7->address = mbc7->sr;
852 if (mbc7->state == GBMBC7_STATE_COMMAND_0) {
853 switch (mbc7->address >> 6) {
854 case 0:
855 mbc7->writable = false;
856 mbc7->state = GBMBC7_STATE_NULL;
857 break;
858 case 3:
859 mbc7->writable = true;
860 mbc7->state = GBMBC7_STATE_NULL;
861 break;
862 }
863 }
864 }
865 break;
866 case GBMBC7_STATE_COMMAND_0:
867 if (mbc7->srBits == 16) {
868 switch (mbc7->address >> 6) {
869 case 0:
870 mbc7->writable = false;
871 mbc7->state = GBMBC7_STATE_NULL;
872 break;
873 case 1:
874 mbc7->state = GBMBC7_STATE_WRITE;
875 if (mbc7->writable) {
876 int i;
877 for (i = 0; i < 256; ++i) {
878 memory->sramBank[i * 2] = mbc7->sr >> 8;
879 memory->sramBank[i * 2 + 1] = mbc7->sr;
880 }
881 }
882 break;
883 case 2:
884 mbc7->state = GBMBC7_STATE_WRITE;
885 if (mbc7->writable) {
886 int i;
887 for (i = 0; i < 256; ++i) {
888 memory->sramBank[i * 2] = 0xFF;
889 memory->sramBank[i * 2 + 1] = 0xFF;
890 }
891 }
892 break;
893 case 3:
894 mbc7->writable = true;
895 mbc7->state = GBMBC7_STATE_NULL;
896 break;
897 }
898 }
899 break;
900 case GBMBC7_STATE_COMMAND_SR_WRITE:
901 if (mbc7->srBits == 16) {
902 mbc7->srBits = 0;
903 mbc7->state = GBMBC7_STATE_WRITE;
904 }
905 break;
906 case GBMBC7_STATE_COMMAND_SR_READ:
907 if (mbc7->srBits == 1) {
908 mbc7->sr = memory->sramBank[mbc7->address * 2] << 8;
909 mbc7->sr |= memory->sramBank[mbc7->address * 2 + 1];
910 mbc7->srBits = 0;
911 mbc7->state = GBMBC7_STATE_READ;
912 }
913 break;
914 case GBMBC7_STATE_COMMAND_SR_FILL:
915 if (mbc7->srBits == 16) {
916 mbc7->sr = 0xFFFF;
917 mbc7->srBits = 0;
918 mbc7->state = GBMBC7_STATE_WRITE;
919 }
920 break;
921 default:
922 break;
923 }
924 } else if (GBMBC7FieldIsSK(old) && !GBMBC7FieldIsSK(value)) {
925 if (mbc7->state == GBMBC7_STATE_READ) {
926 mbc7->sr <<= 1;
927 ++mbc7->srBits;
928 if (mbc7->srBits == 16) {
929 mbc7->srBits = 0;
930 mbc7->state = GBMBC7_STATE_NULL;
931 }
932 }
933 }
934}
935
936void GBMemorySerialize(const struct GBMemory* memory, struct GBSerializedState* state) {
937 memcpy(state->wram, memory->wram, GB_SIZE_WORKING_RAM);
938 memcpy(state->hram, memory->hram, GB_SIZE_HRAM);
939 STORE_16LE(memory->currentBank, 0, &state->memory.currentBank);
940 state->memory.wramCurrentBank = memory->wramCurrentBank;
941 state->memory.sramCurrentBank = memory->sramCurrentBank;
942
943 STORE_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
944 STORE_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
945 STORE_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
946
947 STORE_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
948 STORE_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
949 STORE_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
950
951 STORE_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
952 state->memory.dmaRemaining = memory->dmaRemaining;
953 memcpy(state->memory.rtcRegs, memory->rtcRegs, sizeof(state->memory.rtcRegs));
954
955 state->memory.sramAccess = memory->sramAccess;
956 state->memory.rtcAccess = memory->rtcAccess;
957 state->memory.rtcLatched = memory->rtcLatched;
958 state->memory.ime = memory->ime;
959 state->memory.isHdma = memory->isHdma;
960 state->memory.activeRtcReg = memory->activeRtcReg;
961}
962
963void GBMemoryDeserialize(struct GBMemory* memory, const struct GBSerializedState* state) {
964 memcpy(memory->wram, state->wram, GB_SIZE_WORKING_RAM);
965 memcpy(memory->hram, state->hram, GB_SIZE_HRAM);
966 LOAD_16LE(memory->currentBank, 0, &state->memory.currentBank);
967 memory->wramCurrentBank = state->memory.wramCurrentBank;
968 memory->sramCurrentBank = state->memory.sramCurrentBank;
969
970 _switchBank(memory, memory->currentBank);
971 GBMemorySwitchWramBank(memory, memory->wramCurrentBank);
972 _switchSramBank(memory, memory->sramCurrentBank);
973
974 LOAD_32LE(memory->dmaNext, 0, &state->memory.dmaNext);
975 LOAD_16LE(memory->dmaSource, 0, &state->memory.dmaSource);
976 LOAD_16LE(memory->dmaDest, 0, &state->memory.dmaDest);
977
978 LOAD_32LE(memory->hdmaNext, 0, &state->memory.hdmaNext);
979 LOAD_16LE(memory->hdmaSource, 0, &state->memory.hdmaSource);
980 LOAD_16LE(memory->hdmaDest, 0, &state->memory.hdmaDest);
981
982 LOAD_16LE(memory->hdmaRemaining, 0, &state->memory.hdmaRemaining);
983 memory->dmaRemaining = state->memory.dmaRemaining;
984 memcpy(memory->rtcRegs, state->memory.rtcRegs, sizeof(state->memory.rtcRegs));
985
986 memory->sramAccess = state->memory.sramAccess;
987 memory->rtcAccess = state->memory.rtcAccess;
988 memory->rtcLatched = state->memory.rtcLatched;
989 memory->ime = state->memory.ime;
990 memory->isHdma = state->memory.isHdma;
991 memory->activeRtcReg = state->memory.activeRtcReg;
992}
993
994void _pristineCow(struct GB* gb) {
995 if (gb->memory.rom != gb->pristineRom) {
996 return;
997 }
998 gb->memory.rom = anonymousMemoryMap(GB_SIZE_CART_MAX);
999 memcpy(gb->memory.rom, gb->pristineRom, gb->memory.romSize);
1000 memset(((uint8_t*) gb->memory.rom) + gb->memory.romSize, 0xFF, GB_SIZE_CART_MAX - gb->memory.romSize);
1001 _switchBank(&gb->memory, gb->memory.currentBank);
1002}