Initial commit

This commit is contained in:
xprism1 2023-03-26 23:48:17 +08:00
commit c7bc134e1e
27 changed files with 4291 additions and 0 deletions

57
README.md Normal file
View File

@ -0,0 +1,57 @@
# ntool
## Requirements
- Python3
- pycryptodome: `pip install pycryptodome`
## Usage
- **For the example commands, those in brackets are optional**
- Use **python** instead of **python3** if using Windows
- "CCI" is equivalent to a .3ds file
### Re-sign and re-encrypt CIA/CCI for retail/dev:
```py
python3 ntool.py cia_dev2retail <path_to_cia> (--out <path_to_output_file>)
python3 ntool.py cia_retail2dev <path_to_cia> (--out <path_to_output_file>)
python3 ntool.py cci_dev2retail <path_to_cci> (--out <path_to_output_file>)
python3 ntool.py cci_retail2dev <path_to_cci> (--out <path_to_output_file>)
```
### Run dev firmware on a retail 3DS (with Luma3DS)
- **WARNING: Only perform this on SysNAND if you are able to use ntrboot to recover from a brick!**
- First, obtain the SystemUpdaterForCTR zip file from NDP if you have a o3DS/o3DS XL/2DS. For n3DS/n3DS XL/n2DS XL, obtain the SystemUpdaterForSNAKE zip file instead
- Extract the zip file, and choose the appropriate .csu file for your 3DS's region
- Run `python3 ntool.py csu2retailcias <path_to_csu> updates/`
- Place the `updates` folder in the root of your 3DS's SD
- Install [sysUpdater](https://github.com/profi200/sysUpdater), launch it and follow the on-screen instructions
- You may need to enable `Set developer UNITINFO` in Luma3DS settings
### Convert CCI to CIA
- Pass `--cci-dev` if the CCI is dev-crypted/signed, pass `--cia-dev` if you want to build a dev-signed CIA
```py
python3 ntool.py cci2cia <path_to_cci> (--out <path_to_output_file>) (--cci_dev) (--cia-dev)
```
### Convert CDN contents to CIA
- If `--title-ver` is not provided and there are multiple TMD versions in the CDN folder, the latest TMD will be used
- Pass `--cdn-dev` if the CDN contents are dev-crypted/signed, pass `--cia-dev` if you want to build a dev-signed CIA
```py
python3 ntool.py cdn2cia <path_to_cdn_folder> (--out <path_to_output_file>) (--title-ver <ver>) (--cdn-dev) (--cia-dev)
```
### Full extraction and rebuild of NCCH/CIA/CCI:
- First, use `ncch_extractall`/`cia_extractall`/`cci_extractall` to extract the NCCH/CIA/CCI to a folder
- Pass the `--dev` flag to use dev crypto
- Next, modify the files in the folder as necessary
- Note: do not modify the `exefs.bin`, `romfs.bin`, or `.ncch` files directly; modify the extracted contents
- Then, use `ncch_rebuildall`/`cia_rebuildall`/`cci_rebuildall` to rebuild the NCCH/CIA/CCI.
- Pass the `--dev` flag to use dev crypto
```py
python3 ntool.py ncch_extractall <path_to_ncch> (--dev)
python3 ntool.py ncch_rebuildall <path_to_folder> (--dev)
python3 ntool.py cia_extractall <path_to_cia> (--dev)
python3 ntool.py cia_rebuildall <path_to_folder> (--dev)
python3 ntool.py cci_extractall <path_to_cci> (--dev)
python3 ntool.py cci_rebuildall <path_to_folder> (--dev)
```

75
lib/common.py Normal file
View File

@ -0,0 +1,75 @@
import os, sys, platform, struct, shutil, subprocess, string, warnings, hashlib, secrets, math
from ctypes import *
from Crypto.Cipher import AES
from Crypto.Util import Counter
from Crypto.PublicKey import RSA
from Crypto.Hash import SHA256
from Crypto.Signature import pkcs1_15
resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'resources')
def readle(b):
return int.from_bytes(b, 'little')
def readbe(b):
return int.from_bytes(b, 'big')
def int8tobytes(x):
return int.to_bytes(x, 1, sys.byteorder)
def int16tobytes(x):
return int.to_bytes(x, 2, sys.byteorder)
def int32tobytes(x):
return int.to_bytes(x, 4, sys.byteorder)
def int64tobytes(x):
return int.to_bytes(x, 8, sys.byteorder)
def byteswap32(i):
return struct.unpack("<I", struct.pack(">I", i))[0]
def hextobytes(s):
return bytes.fromhex(s)
def read_chunks(f, size, chunk_size=0x10000):
for _ in range(size // chunk_size):
yield f.read(chunk_size)
yield f.read(size % chunk_size)
def align(size, alignment): # Returns (min) number needed to be added to 'size' so 'size' is a multiple of 'alignment'
if size % alignment != 0:
return alignment - (size % alignment)
else:
return 0
def roundup(size, alignment):
if size % alignment != 0:
return size + alignment - (size % alignment)
else:
return size
class Crypto:
def sha256(f, size, chunk_size=0x10000):
h = hashlib.sha256()
for _ in range(size // chunk_size):
h.update(f.read(chunk_size))
h.update(f.read(size % chunk_size))
return h.digest()
def sign_rsa_sha256(mod: bytes, priv: bytes, data: bytes):
x = pkcs1_15.new(RSA.construct((readbe(mod), 0x10001, readbe(priv))))
h = SHA256.new(data)
sig = x.sign(h)
return sig
def verify_rsa_sha256(mod: bytes, data: bytes, sig: bytes):
x = pkcs1_15.new(RSA.construct((readbe(mod), 0x10001)))
h = SHA256.new(data)
try:
x.verify(h, sig)
return True
except (ValueError, TypeError):
return False

749
lib/ctr_cci.py Normal file
View File

@ -0,0 +1,749 @@
from .common import *
from .keys import *
from .ctr_ncch import NCCHReader, NCCHBuilder
from .ctr_cia import CIAReader
from .ctr_romfs import RomFSReader
media_unit = 0x200
KB = 1024
MB = 1 << 20
GB = 1 << 30
class CCIHdr(Structure): # 0x0 - 0x1FF
_pack_ = 1
_fields_ = [
('sig', c_uint8 * 0x100),
('magic', c_char * 4),
('ncsd_size', c_uint32),
('mediaID', c_uint8 * 8),
('partitions_fs_type', c_uint8 * 8),
('partitions_crypt_type', c_uint8 * 8),
('partitions_offset_size', c_uint8 * 64),
('exh_hash', c_uint8 * 32),
('exh_size', c_uint32),
('sector_0_offset', c_uint32),
('flags', c_uint8 * 8),
('partitionIDs', c_uint8 * 64),
('reserved', c_uint8 * 0x2E),
('crypt_type', c_uint8),
('backup_security_ver', c_uint8)
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class CardInfo(Structure): # 0x200 - 0x2FF
_pack_ = 1
_fields_ = [
('writable_addr', c_uint32),
('reserved1', c_uint8 * 3),
('card_flags', c_uint8),
('reserved2', c_uint8 * 0xF8),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class MasteringInfo(Structure): # 0x300 - 0x3FF
_pack_ = 1
_fields_ = [
('media_size_used', c_uint32),
('reserved1', c_uint8 * 0xC),
('title_ver', c_uint16),
('card_rev', c_uint16),
('reserved2', c_uint8 * 0xC),
('cver_titleID', c_uint8 * 8),
('cver_title_ver', c_uint16),
('reserved3', c_uint8 * 0xD6)
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class InitialData(Structure): # 0x1000 - 0x11FF
_pack_ = 1
_fields_ = [
('keyY', c_uint8 * 16),
('enc_titlekey', c_uint8 * 16),
('mac', c_uint8 * 16),
('nonce', c_uint8 * 0xC),
('reserved', c_uint8 * 0xC4),
('ncch_hdr_copy', c_uint8 * 0x100)
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class CardDeviceInfo(Structure): # 0x1200 - 0x3FFF, retail cards returns 'FF' here when read
_pack_ = 1
_fields_ = [
('card_device_reserved_1', c_uint8 * 0x200),
('titlekey', c_uint8 * 16),
('card_device_reserved_2', c_uint8 * 0x1BF0),
('test_pattern', c_uint8 * 0x1000)
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class CCIReader:
def __init__(self, file, dev=0):
self.file = file
self.dev = dev
with open(file, 'rb') as f:
self.hdr = CCIHdr(f.read(0x200))
self.card_info = CardInfo(f.read(0x100))
self.mastering_info = MasteringInfo(f.read(0x100))
padding = f.read(0xC00)
self.initial_data = InitialData(f.read(0x200))
self.card_device_info = CardDeviceInfo(f.read(0x2E00))
# Decrypt InitialData TitleKey
if self.card_info.card_flags >> 6 == 3:
normal_key = b'\x00' * 16
else:
normal_key = CTR.key_scrambler(CTR.KeyX0x3B[0], readbe(bytes(self.initial_data.keyY)))
cipher = AES.new(normal_key, AES.MODE_CCM, nonce=bytes(self.initial_data.nonce))
self.title_key = cipher.decrypt(bytes(self.initial_data.enc_titlekey))
# Get component offset and size
files = {}
files['cci_header.bin'] = {
'offset': 0,
'size': 0x200
}
files['card_info.bin'] = {
'offset': 0x200,
'size': 0x100
}
files['mastering_info.bin'] = {
'offset': 0x300,
'size': 0x100
}
files['initialdata.bin'] = {
'offset': 0x1000,
'size': 0x200
}
if bytes(self.card_device_info) != b'\xFF' * 0x2E00:
files['card_device_info.bin'] = {
'offset': 0x1200,
'size': 0x2E00
}
names = {
0: 'game',
1: 'manual',
2: 'dlp',
3: 'unk3',
4: 'unk4',
5: 'unk5',
6: 'update_n3ds',
7: 'update_o3ds'
}
for i in range(0, 64, 8):
part_off, part_size = readle(self.hdr.partitions_offset_size[i:i + 4]) * media_unit, readle(self.hdr.partitions_offset_size[i + 4:i + 8]) * media_unit
if part_off:
files[f'content{i // 8}.{names[i // 8]}.ncch'] = {
'offset': part_off,
'size': part_size
}
self.files = files
def extract(self):
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
print(f'Extracted {name}')
g.close()
f.close()
def decrypt(self):
# Extract components
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
g.close()
f.close()
f = open('decrypted.3ds', 'wb')
with open('cci_header.bin', 'rb') as g:
f.write(g.read())
with open('card_info.bin', 'rb') as g:
f.write(g.read())
with open('mastering_info.bin', 'rb') as g:
f.write(g.read())
f.write(b'\x00' * 0xC00)
with open('initialdata.bin', 'rb') as g:
f.write(g.read())
if os.path.isfile('card_device_info.bin'):
with open('card_device_info.bin', 'rb') as g:
f.write(g.read())
else:
f.write(b'\xFF' * 0x2E00)
# Use NCCHReader to decrypt NCCHs and write to new file
sys.stdout = open(os.devnull, 'w') # Block print statements
for name, info in self.files.items():
if name.endswith('ncch'):
ncch = NCCHReader(name, dev=self.dev)
ncch.decrypt()
g = open('decrypted.ncch', 'rb')
for data in read_chunks(g, info['size']):
f.write(data)
sys.stdout = sys.__stdout__
f.write(b'\xff' * (os.path.getsize(self.file) - f.tell()))
f.close()
for name, info in self.files.items():
os.remove(name)
os.remove('decrypted.ncch')
print(f'Decrypted to decrypted.3ds')
def encrypt(self):
# Extract components
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
g.close()
# Read original partition 0 flags from NCCH header copy
f.seek(0x1188)
flags = f.read(8)
if flags[7] & 0x1:
part0_crypto = 'fixed'
else:
part0_crypto = { 0x00: 'Secure1',
0x01: 'Secure2',
0x0A: 'Secure3',
0x0B: 'Secure4' }[flags[3]]
f.close()
f = open('encrypted.3ds', 'wb')
with open('cci_header.bin', 'rb') as g:
f.write(g.read())
with open('card_info.bin', 'rb') as g:
f.write(g.read())
with open('mastering_info.bin', 'rb') as g:
f.write(g.read())
f.write(b'\x00' * 0xC00)
with open('initialdata.bin', 'rb') as g:
f.write(g.read())
if os.path.isfile('card_device_info.bin'):
with open('card_device_info.bin', 'rb') as g:
f.write(g.read())
else:
f.write(b'\xFF' * 0x2E00)
# Use NCCHReader to extract and NCCHBuilder to re-encrypt NCCHs, then write to new file
sys.stdout = open(os.devnull, 'w') # Block print statements
for name, info in self.files.items():
if name.endswith('ncch'):
ncch = NCCHReader(name, dev=self.dev)
ncch.extract()
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
else:
romfs = ''
if name.startswith('content0'):
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto=part0_crypto, dev=self.dev)
else: # Partitions 1 and up use Secure1, but if partition 0 uses fixed key, then the others will also use fixed key
if part0_crypto == 'fixed':
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='fixed', dev=self.dev)
else:
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='Secure1', dev=self.dev)
g = open('new.ncch', 'rb')
for data in read_chunks(g, info['size']):
f.write(data)
for i in os.listdir('.'):
if i in ['ncch_header.bin', 'exheader.bin', 'logo.bin', 'plain.bin', 'exefs.bin', 'romfs.bin']:
os.remove(i)
sys.stdout = sys.__stdout__
f.write(b'\xff' * (os.path.getsize(self.file) - f.tell()))
f.close()
for name, info in self.files.items():
os.remove(name)
os.remove('new.ncch')
print(f'Encrypted to encrypted.3ds')
def regen_undumpable(self):
with open(os.path.join(resources_dir, 'test_pattern.bin'), 'rb') as f:
test_pattern = f.read()
shutil.copyfile(self.file, 'new.3ds')
with open('new.3ds', 'r+b') as f:
f.seek(0x1400)
f.write(self.title_key)
f.seek(0x3000)
f.write(test_pattern)
print('Wrote to new.3ds')
def verify(self):
sig_check = []
sig_check.append(('NCSD Header', Crypto.verify_rsa_sha256(CTR.cci_mod[self.dev], bytes(self.hdr)[0x100:], bytes(self.hdr.sig))))
mac_check = []
if self.card_info.card_flags >> 6 == 3:
normal_key = b'\x00' * 16
else:
normal_key = CTR.key_scrambler(CTR.KeyX0x3B[0], readbe(bytes(self.initial_data.keyY)))
cipher = AES.new(normal_key, AES.MODE_CCM, nonce=bytes(self.initial_data.nonce))
try:
cipher.decrypt_and_verify(bytes(self.initial_data.enc_titlekey), received_mac_tag=bytes(self.initial_data.mac))
mac_check.append(('TitleKey', True))
except ValueError:
mac_check.append(('TitleKey', False))
others = []
if self.hdr.crypt_type & 1: # Bit 0 of hdr.crypt_type is set
others.append(('Cardbus crypto', self.hdr.crypt_type >> 1 == self.card_info.card_flags >> 6)) # Check if bits 2-1 of hdr.crypt_type == crypt type in card info section
print("Signatures:")
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print("MACs:")
for i in mac_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
if others != []:
print("Others:")
for i in others:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
def __str__(self):
partitions = ''
for i in range(0, 64, 8):
part_id = hex(readle(self.hdr.partitionIDs[i:i + 8]))[2:].zfill(16)
if part_id != '0' * 16:
partitions += f'Partition {i // 8}\n'
partitions += f' > ID: {part_id}\n'
card_device = {
1: 'NOR Flash',
2: 'None',
3: 'BT'
}
media_platform = {
1: 'CTR'
}
media_type = {
0: 'Inner device',
1: 'CARD1',
2: 'CARD2',
3: 'Extended device'
}
card_type = {
0: 'S1',
1: 'S2'
}
crypt_type = {
0: 'Secure0',
1: 'Secure1',
2: 'Secure2',
3: 'Fixed key'
}
return (
f'TitleID: {hex(readle(self.hdr.mediaID))[2:].zfill(16)}\n'
f'{partitions}'
f'Flags:\n'
f' > BkupWriteWaitTime: {hex(self.hdr.flags[0])[2:].zfill(2)}\n'
f' > BkupSecurityVer: {hex(self.hdr.flags[1] + self.hdr.backup_security_ver)[2:].zfill(2)}\n'
f' > Card device: {card_device[self.hdr.flags[3] | self.hdr.flags[7]]}\n'
f' > Media platform: {media_platform[self.hdr.flags[4]]}\n'
f' > Media type: {media_type[self.hdr.flags[5]]}\n'
f'Card info:\n'
f' Writable address: 0x{hex(self.card_info.writable_addr)[2:].zfill(8)}\n'
f' Card type: {card_type[(self.card_info.card_flags >> 5) & 1]}\n' # Bit 5
f' Cardbus crypto: {crypt_type[self.card_info.card_flags >> 6]}\n' # Bit 7-6
f'Mastering metadata:\n'
f' Media size used: 0x{hex(self.mastering_info.media_size_used)[2:].zfill(8)}\n'
f' Title version: {self.mastering_info.title_ver}\n'
f' Card revision: {self.mastering_info.card_rev}\n'
f' CVer TitleID: {hex(readle(self.mastering_info.cver_titleID))[2:].zfill(16)}\n'
f' CVer title version: {self.mastering_info.cver_title_ver}\n'
f'Initial data:\n'
f' KeyY: {hex(readbe(self.initial_data.keyY))[2:].zfill(32)}\n'
f' TitleKey: {hex(readbe(self.initial_data.enc_titlekey))[2:].zfill(32)} (decrypted: {hex(readbe(self.title_key))[2:].zfill(32)})\n'
f' MAC: {hex(readbe(self.initial_data.mac))[2:].zfill(32)}\n'
f'Card device info:\n'
f' TitleKey: {hex(readbe(self.card_device_info.titlekey))[2:].zfill(32)}'
)
class CCIBuilder:
def __init__(self, cci_header='', card_info='', mastering_info='', initialdata='', card_device_info='', ncchs=[], size='', backup_write_wait_time=-1, save_crypto='', card_device='', media_type='', writable_addr='', card_type='', cardbus_crypto='', title_ver=-1, card_rev=-1, regen_sig='', dev=0, gen_card_device_info=0, out='new.3ds'):
'''
cci_header, card_info, mastering_info, initialdata, card_device_info: path to respective component (if available)
ncchs: list containing filenames of NCCHs, which must each be named 'content[content index]*' (* is wildcard)
Following parameters are required if no cci_header, card_info and mastering_info are provided; if files and parameter is supplied, the parameter overrides the file(s)
- size: total ROM size; '128MB' or '256MB' or '512MB' or '1GB' or '2GB' or '4GB' or '8GB' (leave blank for auto)
- backup_write_wait_time (leave blank for auto)
- save_crypto: 'fw1' or 'fw2' or 'fw3' or 'fw6' (leave blank for auto)
- card_device: 'NorFlash' or 'None' or 'BT' (leave blank for auto)
- media_type: 'InnerDevice' or 'CARD1' or 'CARD2' or 'ExtendedDevice' (leave blank for auto)
- writable_addr: in hex (leave blank for auto)
- card_type: 'S1' or 'S2' (leave blank for auto)
- cardbus_crypto: 'Secure0' or 'Secure1' or 'Secure2' or 'fixed' (leave blank for auto)
- title_ver
- card_rev
regen_sig: '' or 'retail' (test keys) or 'dev'
dev: 0 or 1
gen_card_device_info: 0 or 1 (whether to fill in 0x1400-0x140F and 0x3000-0x3FFF)
out: path to output file
'''
# Get savedata size
ncchs.sort() # Sort NCCHs by content index
used_size = 0x4000 + sum([os.path.getsize(i) for i in ncchs])
ncch = NCCHReader(ncchs[0], dev=dev)
info = ncch.files['exheader.bin']
with open(ncchs[0], 'rb') as f:
f.seek(0x100)
ncch_hdr = f.read(0x100)
f.seek(info['offset'])
if ncch.is_decrypted:
exheader = f.read(info['size'])
else:
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
exheader = cipher.decrypt(f.read(info['size']))
save_data_size = readle(exheader[0x1C0:0x1C8])
if save_data_size > 0 and save_data_size < 128 * KB:
save_data_size = 128 * KB
elif save_data_size > 128 * KB and save_data_size < 512 * KB:
save_data_size = 512 * KB
elif save_data_size > 512 * KB:
save_data_size += align(save_data_size, MB)
# Checks
if backup_write_wait_time != -1:
if not (backup_write_wait_time >= 0 and backup_write_wait_time <= 255):
raise Exception('Invalid backup write wait time')
if card_device == 'NorFlash':
if media_type == 'CARD2':
raise Exception('NorFlash is invalid for CARD2')
elif media_type == 'CARD1' and save_data_size != 128 * KB and save_data_size != 512 * KB:
raise Exception('NorFlash can only be used with save-data sizes 128K and 512K')
if writable_addr != '':
if not all([i in string.hexdigits for i in writable_addr]):
raise Exception('Invalid writable address')
# Defaults
if cci_header == '':
if regen_sig == '':
regen_sig = 'retail'
if size == '':
if save_data_size >= MB:
data_size = used_size + save_data_size
else:
data_size = used_size
if data_size < 128 * MB:
size = '128MB'
elif data_size < 256 * MB:
size = '256MB'
elif data_size < 512 * MB:
size = '512MB'
elif data_size < 1 * GB:
size = '1GB'
elif data_size < 2 * GB:
size = '2GB'
elif data_size < 4 * GB:
size = '4GB'
elif data_size < 8 * GB:
size = '8GB'
else:
raise Exception('NCCH partitions are too large')
if backup_write_wait_time == -1:
backup_write_wait_time = 0
if save_crypto == '':
save_crypto = 'fw3'
if card_device == '':
if save_data_size == 0 or save_data_size >= MB:
card_device = 'None'
else:
card_device = 'NorFlash'
if media_type == '':
if save_data_size >= MB:
media_type = 'CARD2'
else:
media_type = 'CARD1'
if card_type == '':
card_type = 'S1'
if cardbus_crypto == '':
if regen_sig == 'dev':
cardbus_crypto = 'fixed'
else:
cardbus_crypto = 'Secure0'
# Create (or modify) CCI header
if cci_header == '':
hdr = CCIHdr(b'\x00' * 0x200)
hdr.magic = b'NCSD'
else:
with open(cci_header, 'rb') as f:
hdr = CCIHdr(f.read())
if size != '':
hdr.ncsd_size = { '128MB': 128 * MB,
'256MB': 256 * MB,
'512MB': 512 * MB,
'1GB': 1 * GB ,
'2GB': 2 * GB ,
'4GB': 4 * GB ,
'8GB': 8 * GB }[size] // media_unit
titleID = bytes(ncch.hdr.titleID)
hdr.mediaID = (c_uint8 * sizeof(hdr.mediaID))(*titleID)
curr = 0x4000
for i in range(0, 64, 8):
for file in ncchs:
if file.startswith(f'content{i // 8}'):
hdr.partitions_offset_size[i:i + 4] = int32tobytes(curr // media_unit)
file_size = os.path.getsize(file)
hdr.partitions_offset_size[i + 4:i + 8] = int32tobytes(file_size // media_unit)
curr += file_size
for i in range(0, 64, 8):
for file in ncchs:
if file.startswith(f'content{i // 8}'):
tmp = NCCHReader(file, dev)
hdr.partitionIDs[i:i + 8] = bytes(tmp.hdr.titleID)
if backup_write_wait_time != -1:
hdr.flags[0] = backup_write_wait_time
if save_crypto != '':
if save_crypto == 'fw6':
hdr.flags[1] = 1
if card_device != '':
card_device = { 'NorFlash': 1,
'None': 2,
'BT': 3 }[card_device]
if save_crypto == 'fw2':
hdr.flags[7] = card_device
elif save_crypto == 'fw3' or 'fw6':
hdr.flags[3] = card_device
hdr.flags[4] = 1
if media_type != '':
hdr.flags[5] = { 'InnerDevice': 0,
'CARD1': 1,
'CARD2': 2,
'ExtendedDevice': 3 }[media_type]
if regen_sig == 'retail':
sig = Crypto.sign_rsa_sha256(CTR.test_mod, CTR.test_priv, bytes(hdr)[0x100:])
hdr.sig = (c_uint8 * sizeof(hdr.sig))(*sig)
elif regen_sig == 'dev':
sig = Crypto.sign_rsa_sha256(CTR.cci_mod[1], CTR.cci_priv[1], bytes(hdr)[0x100:])
hdr.sig = (c_uint8 * sizeof(hdr.sig))(*sig)
# Create (or modify) card info
if card_info == '':
cinfo = CardInfo(b'\x00' * 0x100)
else:
with open(card_info, 'rb') as f:
cinfo = CardInfo(f.read())
if (hdr.ncsd_size * media_unit / 2 < save_data_size) or (save_data_size > 2047 * MB):
raise Exception('Too large savedata size')
if card_info == '' and writable_addr == '': # Defaults
if media_type == 'CARD1':
writable_addr = hex(0xFFFFFFFF * media_unit)[2:]
else:
# unused_size: values related to the physical implementation of gamecards
if media_type == 'CARD1':
unused_size = { '128MB': 0x00280000,
'256MB': 0x00500000,
'512MB': 0x00a00000,
'1GB': 0x04680000,
'2GB': 0x08c80000,
'4GB': 0x11900000,
'8GB': 0x23000000 }[size]
elif media_type == 'CARD2':
unused_size = { '512MB': 0x02380000,
'1GB': 0x04680000,
'2GB': 0x08c80000,
'4GB': 0x11900000,
'8GB': 0x23000000 }[size]
if unused_size > 0:
writable_addr = hdr.ncsd_size * media_unit - unused_size - save_data_size # Nintendo's method of calculating writable region offset
else:
warnings.warn('Nintendo does not support CARD2 for the current ROM size, aligning save offset after last NCCH')
writable_addr = used_size + align(used_size, media_unit)
writable_addr = hex(writable_addr)[2:]
if writable_addr != '':
writable_addr = int(writable_addr, 16)
cinfo.writable_addr = writable_addr // media_unit
if card_type != '':
cinfo.card_flags &= 0b11011111 # Clear flag
cinfo.card_flags |= { 'S1': 0,
'S2': 1 }[card_type] << 5
if cardbus_crypto != '':
cinfo.card_flags &= 0b00111111 # Clear flag
cinfo.card_flags |= { 'Secure0': 0,
'Secure1': 1,
'Secure2': 2,
'fixed': 3 }[cardbus_crypto] << 6
# Create (or modify) mastering info
if mastering_info == '':
minfo = MasteringInfo(b'\x00' * 0x100)
else:
with open(mastering_info, 'rb') as f:
minfo = MasteringInfo(f.read())
minfo.media_size_used = used_size
if title_ver != -1:
minfo.title_ver = title_ver
if card_rev != -1:
minfo.card_rev = card_rev
cver_tids = ['000400db00017102',
'000400db00017202',
'000400db00017302',
'000400db00017402',
'000400db00017502',
'000400db00017602' ]
for i in ncchs:
if i.startswith('content7'):
sys.stdout = open(os.devnull, 'w') # Block print statements
upd = NCCHReader(i, dev)
upd.extract()
upd_romfs = RomFSReader('romfs.bin')
for path, info in upd_romfs.files.items():
tid = path.replace('.cia', '')
if tid in cver_tids:
f = open('romfs.bin', 'rb')
f.seek(info['offset'])
g = open(path, 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
g.close()
f.close()
cia = CIAReader(path)
titleID_bytes = int64tobytes(int(tid, 16))
minfo.cver_titleID = (c_uint8 * sizeof(minfo.cver_titleID))(*titleID_bytes)
minfo.cver_title_ver = cia.tmd.hdr.title_ver
os.remove(path)
break
sys.stdout = sys.__stdout__
for i in os.listdir('.'):
if i in ['ncch_header.bin', 'exheader.bin', 'logo.bin', 'plain.bin', 'exefs.bin', 'romfs.bin']:
os.remove(i)
# Create initialdata
if initialdata == '':
idata = InitialData(b'\x00' * 0x200)
idata.keyY = (c_uint8 * sizeof(idata.keyY))(*titleID)
if cinfo.card_flags >> 6 == 3:
normal_key = b'\x00' * 16
else:
normal_key = CTR.key_scrambler(CTR.KeyX0x3B[0], readbe(bytes(idata.keyY)))
title_key = secrets.token_bytes(16) # Random
nonce = secrets.token_bytes(0xC) # Random
cipher = AES.new(normal_key, AES.MODE_CCM, nonce=nonce)
enc_titlekey, mac = cipher.encrypt_and_digest(title_key)
idata.enc_titlekey = (c_uint8 * sizeof(idata.enc_titlekey))(*enc_titlekey)
idata.mac = (c_uint8 * sizeof(idata.mac))(*mac)
idata.nonce = (c_uint8 * sizeof(idata.nonce))(*nonce)
idata.ncch_hdr_copy = (c_uint8 * sizeof(idata.ncch_hdr_copy))(*ncch_hdr)
else:
with open(initialdata, 'rb') as f:
idata = InitialData(f.read())
if cinfo.card_flags >> 6 == 3:
normal_key = b'\x00' * 16
else:
normal_key = CTR.key_scrambler(CTR.KeyX0x3B[0], readbe(bytes(idata.keyY)))
cipher = AES.new(normal_key, AES.MODE_CCM, nonce=bytes(idata.nonce))
title_key = cipher.decrypt(bytes(idata.enc_titlekey))
# Create card device info (if necessary)
if card_device_info == '':
cdinfo = CardDeviceInfo(b'\xFF' * 0x2E00)
else:
with open(card_device_info, 'rb') as f:
cdinfo = CardDeviceInfo(f.read())
if gen_card_device_info:
cdinfo.titlekey = (c_uint8 * sizeof(cdinfo.titlekey))(*title_key)
with open(os.path.join(resources_dir, 'test_pattern.bin'), 'rb') as f:
test_pattern = f.read()
cdinfo.test_pattern = (c_uint8 * sizeof(cdinfo.test_pattern))(*test_pattern)
# Write CCI
with open(out, 'wb') as f:
f.write(bytes(hdr))
f.write(bytes(cinfo))
f.write(bytes(minfo))
f.write(b'\x00' * 0xC00)
f.write(bytes(idata))
f.write(bytes(cdinfo))
for i in ncchs:
g = open(i, 'rb')
for data in read_chunks(g, os.path.getsize(i)):
f.write(data)
g.close()
f.write(b'\xFF' * (hdr.ncsd_size * media_unit - used_size))
print(f'Wrote to {out}')

143
lib/ctr_cdn.py Normal file
View File

@ -0,0 +1,143 @@
from .common import *
from .keys import *
from .ctr_tik import tikReader
from .ctr_tmd import TMDReader
class CDNReader:
def __init__(self, content_files, tmd, tik='', dev=0):
content_files.sort(key=lambda h: int(h.split('.')[0], 16))
self.content_files = content_files
self.tmd = tmd
self.tik = tik
self.dev = dev
self.tmd_read = TMDReader(tmd, dev)
if tik != '': # If ticket is present, parse ticket to get titlekey
self.tik_read = tikReader(tik, dev)
self.titlekey = self.tik_read.titlekey
else: # Use titlekey generation algorithm
self.titlekey = hextobytes(CTR.titlekey_gen(self.tmd_read.titleID, 'mypass'))
def decrypt(self):
for i in self.content_files:
for name, info in self.tmd_read.files.items():
if name.split('.')[1] == i: # CDN files are named as contentID
f = open(i, 'rb')
g = open(name, 'wb')
cipher = AES.new(self.titlekey, AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
g.write(cipher.decrypt(data))
f.close()
g.close()
print(f'Decrypted {i} to {name}')
break
def verify(self):
tmd = self.tmd_read.verify(no_print=1)
hash_check = tmd[0]
for i in self.content_files:
for name, info in self.tmd_read.files.items():
if name.split('.')[1] == i:
f = open(i, 'rb')
name2 = '.'.join(name.split('.')[:-1]) # Remove extension so printout is short enough to be aligned
h = hashlib.sha256()
cipher = AES.new(self.titlekey, AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
h.update(cipher.decrypt(data))
f.close()
hash_check.append((name2, h.digest() == info['hash']))
break
sig_check = []
if self.tik != '':
sig_check += self.tik_read.verify(no_print=1)
sig_check += tmd[1]
print('Hashes:')
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print('Signatures:')
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
def __str__(self):
if self.tik != '':
tik = 'Ticket:\n' + ''.join([' ' + i + '\n' for i in self.tik_read.__str__().split('\n')])
else:
tik = ''
tmd = ''.join([' ' + i + '\n' for i in self.tmd_read.__str__().split('\n')])
return (
f'{tik}'
f'TMD:\n'
f'{tmd[:-1]}' # Remove last '\n'
)
class CDNBuilder:
def __init__(self, content_files=[], tik='', tmd='', dev=0, out_tik='tik_new'):
'''
content_files: list containing filenames of content files, which must each be named '[content index in hex, 4 chars].[contentID in hex, 8 chars].[ncch/nds]'
Certificate chain will be appended at the end of the following files:
- tik: path to ticket (optional)
- tmd: path to tmd
dev: 0 or 1 (if 1, use dev-crypto for ticket titlekey)
out_tik: path to output ticket with cert chain appended
'''
content_files.sort(key=lambda h: int(h.split('.')[0], 16))
self.content_files = content_files
self.tmd = tmd
self.tik = tik
self.dev = dev
self.tmd_read = TMDReader(tmd, dev)
if tik != '': # If ticket is present, parse ticket to get titlekey
self.tik_read = tikReader(tik, dev)
self.titlekey = self.tik_read.titlekey
else: # Use titlekey generation algorithm
self.titlekey = hextobytes(CTR.titlekey_gen(self.tmd_read.titleID, 'mypass'))
# Encrypt content files
for i in self.content_files:
info = self.tmd_read.files[i]
name = i.split('.')[1] # CDN files are named as contentID
f = open(i, 'rb')
g = open(name, 'wb')
cipher = AES.new(self.titlekey, AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
g.write(cipher.encrypt(data))
f.close()
g.close()
print(f'Wrote to {name}')
# Append certificate chain to end of tmd (and tik)
name = f'tmd.{self.tmd_read.hdr.title_ver}'
with open(name, 'wb') as f:
with open(tmd, 'rb') as g:
f.write(g.read())
if dev == 0:
with open(os.path.join(resources_dir, 'CP0000000b.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CA00000003.cert'), 'rb') as g:
f.write(g.read())
elif dev == 1:
with open(os.path.join(resources_dir, 'CP0000000a.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CA00000004.cert'), 'rb') as g:
f.write(g.read())
print(f'Wrote to {name}')
if self.tik != '':
with open(f'{out_tik}', 'wb') as f:
with open(tik, 'rb') as g:
f.write(g.read())
if dev == 0:
with open(os.path.join(resources_dir, 'XS0000000c.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CA00000003.cert'), 'rb') as g:
f.write(g.read())
elif dev == 1:
with open(os.path.join(resources_dir, 'XS00000009.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CA00000004.cert'), 'rb') as g:
f.write(g.read())
print(f'Wrote to {out_tik}')

418
lib/ctr_cia.py Normal file
View File

@ -0,0 +1,418 @@
from .common import *
from .keys import *
from .ctr_tik import signature_types, tikReader
from .ctr_tmd import TMDReader, TMDBuilder
from .ctr_ncch import NCCHReader
class CIAHdr(Structure):
_fields_ = [
('hdr_size', c_uint32), # 0x2020 bytes
('type', c_uint16),
('format_ver', c_uint16),
('cert_chain_size', c_uint32),
('tik_size', c_uint32),
('tmd_size', c_uint32),
('meta_size', c_uint32),
('content_size', c_uint64),
('content_index', c_uint8 * 0x2000),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class CertificateInfo(BigEndianStructure):
_pack_ = 1
_fields_ = [
('issuer', c_char * 0x40),
('key_type', c_uint32),
('name', c_char * 0x40),
('expiration_time', c_int32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class RSA4096PubKey(BigEndianStructure):
_pack_ = 1
_fields_ = [
('mod', c_uint8 * 0x200),
('pub_exp', c_uint32),
('reserved', c_uint8 * 0x34),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class RSA2048PubKey(BigEndianStructure):
_pack_ = 1
_fields_ = [
('mod', c_uint8 * 0x100),
('pub_exp', c_uint32),
('reserved', c_uint8 * 0x34),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class CIAReader:
def __init__(self, file, dev=0):
self.file = file
self.dev = dev
with open(file, 'rb') as f:
self.hdr = CIAHdr(f.read(0x2020))
# Get offsets for CIA components
curr = 0x2020
files = {}
files['cia_header.bin'] = {
'size': 0x2020,
'offset': 0,
'crypt': 'none',
}
curr += align(curr, 64)
files['cert.bin'] = {
'size': self.hdr.cert_chain_size,
'offset': curr,
'crypt': 'none',
}
curr += self.hdr.cert_chain_size
curr += align(curr, 64)
files['tik'] = {
'size': self.hdr.tik_size,
'offset': curr,
'crypt': 'none',
}
curr += self.hdr.tik_size
curr += align(curr, 64)
files['tmd'] = {
'size': self.hdr.tmd_size,
'offset': curr,
'crypt': 'none',
}
curr += self.hdr.tmd_size
curr += align(curr, 64)
# Parse ticket to get titlekey (the AES-CBC key)
with open(file, 'rb') as f:
f.seek(files['tik']['offset'])
with open('tik', 'wb') as g:
g.write(f.read(files['tik']['size']))
self.tik = tikReader('tik', dev)
os.remove('tik')
# Parse TMD to get content files offset, size, AES-CBC IV (if encrypted), hash
with open(file, 'rb') as f:
f.seek(files['tmd']['offset'])
with open('tmd', 'wb') as g:
g.write(f.read(files['tmd']['size']))
self.tmd = TMDReader('tmd', dev)
os.remove('tmd')
for i in self.tmd.files.keys():
content_index = int(i.split('.')[0], 16)
if self.hdr.content_index[content_index // 8] & (0b10000000 >> (content_index % 8)): # Check if content file listed in TMD actually exists in CIA (e.g. in the case of incomplete DLC CIA)
files[i] = self.tmd.files[i]
curr += align(curr, 64)
files[i]['offset'] = curr
if 'key' in files[i].keys():
files[i]['key'] = self.tik.titlekey
curr += files[i]['size']
if self.hdr.meta_size:
curr += align(curr, 64)
files['meta.bin'] = {
'size': self.hdr.meta_size,
'offset': curr,
'crypt': 'none',
}
curr += self.hdr.meta_size
self.files = files
def extract(self):
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
if info['crypt'] == 'none':
for data in read_chunks(f, info['size']):
g.write(data)
elif info['crypt'] == 'normal':
cipher = AES.new(info['key'], AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
g.write(cipher.decrypt(data))
print(f'Extracted {name}')
g.close()
f.close()
def decrypt(self):
f = open(self.file, 'rb')
g = open('decrypted.cia', 'wb')
cur = 0
for name, info in self.files.items():
if cur < info['offset']: # Padding between CIA components
pad_size = info['offset'] - cur
g.write(b'\x00' * pad_size)
cur += pad_size
f.seek(info['offset'])
if name == 'tmd': # Modify TMD to remove crypt flags
with open('tmd', 'wb') as h:
h.write(f.read(info['size']))
if self.dev == 0:
TMDBuilder('tmd', crypt=0)
else:
TMDBuilder('tmd', crypt=0, regen_sig='dev')
with open('tmd_new', 'rb') as h:
g.write(h.read())
os.remove('tmd')
os.remove('tmd_new')
elif info['crypt'] == 'none':
for data in read_chunks(f, info['size']):
g.write(data)
elif info['crypt'] == 'normal':
cipher = AES.new(info['key'], AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
g.write(cipher.decrypt(data))
cur += info['size']
f.close()
g.close()
print(f'Decrypted to decrypted.cia')
def verify(self):
f = open(self.file, 'rb')
tmd = self.tmd.verify(no_print=1)
hash_check = tmd[0]
for name, info in self.files.items(): # Content files
if name.endswith('nds') or name.endswith('ncch'):
f.seek(info['offset'])
name2 = '.'.join(name.split('.')[:-1]) # Remove extension so printout is short enough to be aligned
if info['crypt'] == 'none':
hash_check.append((name2, Crypto.sha256(f, info['size']) == info['hash']))
elif info['crypt'] == 'normal':
h = hashlib.sha256()
cipher = AES.new(info['key'], AES.MODE_CBC, iv=info['iv'])
for data in read_chunks(f, info['size']):
h.update(cipher.decrypt(data))
hash_check.append((name2, h.digest() == info['hash']))
sig_check = []
f.seek(self.files['cert.bin']['offset']) # CIA cert chain
ca_mod = b''
for i in range(3):
sig_type = readbe(f.read(4))
sig = f.read(signature_types[sig_type][0])
f.read(signature_types[sig_type][1]) # advance pointer
cert_info = CertificateInfo(f.read(0x88))
if cert_info.key_type == 0:
pubkey = RSA4096PubKey(f.read(0x238))
elif cert_info.key_type == 1:
pubkey = RSA2048PubKey(f.read(0x138))
if i == 0:
ca_mod = bytes(pubkey.mod) # store CA modulus to verify Ticket cert and TMD cert
sig_check.append(('CIA Cert (CA)', Crypto.verify_rsa_sha256(CTR.root_mod[self.dev], bytes(cert_info) + bytes(pubkey), sig)))
elif i == 1:
sig_check.append(('CIA Cert (XS)', Crypto.verify_rsa_sha256(ca_mod, bytes(cert_info) + bytes(pubkey), sig)))
elif i == 2:
sig_check.append(('CIA Cert (CP)', Crypto.verify_rsa_sha256(ca_mod, bytes(cert_info) + bytes(pubkey), sig)))
sig_check += self.tik.verify(no_print=1) + tmd[1]
f.close()
print('Hashes:')
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print('Signatures:')
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
def __str__(self):
enabled_content_idxs = []
for i in range(0, 0x2000 * 8):
if self.hdr.content_index[i // 8] & (0b10000000 >> (i % 8)):
enabled_content_idxs.append(hex(i)[2:].zfill(4))
contents = ''
for i in enabled_content_idxs:
contents += f' > {i}\n'
tik = ''.join([' ' + i + '\n' for i in self.tik.__str__().split('\n')])
tmd = ''.join([' ' + i + '\n' for i in self.tmd.__str__().split('\n')])
return (
f'CIA:\n'
f' Enabled contents:\n'
f'{contents}'
f'Ticket:\n'
f'{tik}'
f'TMD:\n'
f'{tmd[:-1]}' # Remove last '\n'
)
class CIABuilder:
def __init__(self, certs='', content_files=[], tik='', tmd='', meta=1, dev=0, out='new.cia'):
'''
certs: path to certs (if not provided, use existing ones (dev=1 will use dev certs))
content_files: list containing filenames of content files, which must each be named '[content index in hex, 4 chars].[contentID in hex, 8 chars].[ncch/nds]'
tik: path to ticket
tmd: path to tmd
meta: 0 or 1 (whether to generate meta section)
dev: 0 or 1 (if 1, content files and ticket titlekey are dev-crypted)
out: path to output file
'''
# Checks
if content_files[0].endswith('nds') and meta:
raise Exception('Cannot generate meta section for TWL CIA')
# Create CIA header
hdr = CIAHdr(b'\x00' * 0x2020)
hdr.hdr_size = 0x2020
hdr.cert_chain_size = 0xA00
hdr.tik_size = os.path.getsize(tik)
hdr.tmd_size = os.path.getsize(tmd)
if meta:
hdr.meta_size = 0x3AC0
hdr.content_size = sum([os.path.getsize(i) for i in content_files])
content_files.sort(key=lambda h: int(h.split('.')[0], 16)) # Sort list of content files by content index
for i in content_files: # Enable content files present in content index
content_index = int(i.split('.')[0], 16)
hdr.content_index[content_index // 8] |= (0b10000000 >> (content_index % 8))
tik_read = tikReader(tik, dev)
tmd_read = TMDReader(tmd, dev)
# Write CIA
f = open(f'{out}', 'wb')
f.write(bytes(hdr))
curr = 0x2020
alignment = align(curr, 64)
if alignment:
f.write(b'\x00' * alignment)
curr += alignment
if certs != '':
with open(certs, 'rb') as g:
f.write(g.read())
elif dev == 0:
with open(os.path.join(resources_dir, 'CA00000003.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'XS0000000c.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CP0000000b.cert'), 'rb') as g:
f.write(g.read())
elif dev == 1:
with open(os.path.join(resources_dir, 'CA00000004.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'XS00000009.cert'), 'rb') as g:
f.write(g.read())
with open(os.path.join(resources_dir, 'CP0000000a.cert'), 'rb') as g:
f.write(g.read())
curr += hdr.cert_chain_size
alignment = align(curr, 64)
if alignment:
f.write(b'\x00' * alignment)
curr += alignment
with open(tik, 'rb') as g:
f.write(g.read())
curr += hdr.tik_size
alignment = align(curr, 64)
if alignment:
f.write(b'\x00' * alignment)
curr += alignment
with open(tmd, 'rb') as g:
f.write(g.read())
curr += hdr.tmd_size
alignment = align(curr, 64)
if alignment:
f.write(b'\x00' * alignment)
curr += alignment
for i in content_files:
tmd_info = tmd_read.files[i]
g = open(i, 'rb')
if 'key' in tmd_info.keys():
cipher = AES.new(tik_read.titlekey, AES.MODE_CBC, iv=tmd_info['iv'])
for data in read_chunks(g, tmd_info['size']):
f.write(cipher.encrypt(data))
else:
for data in read_chunks(g, tmd_info['size']):
f.write(data)
g.close()
curr += hdr.content_size
if meta:
ncch = NCCHReader(content_files[0], dev=dev)
if 'exheader.bin' in ncch.files.keys():
info = ncch.files['exheader.bin']
g = open(content_files[0], 'rb')
g.seek(info['offset'])
if ncch.is_decrypted:
exheader = g.read(info['size'])
else:
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
exheader = cipher.decrypt(g.read(info['size']))
info = ncch.files['exefs.bin']
icon = b''
for off, size, key, name in info['files']:
if name == 'icon':
g.seek(info['offset'] + off)
if ncch.is_decrypted:
icon = g.read(size)
else:
counter = Counter.new(128, initial_value=readbe(info['counter']) + (off // 16))
cipher = AES.new(info['key'][key], AES.MODE_CTR, counter=counter)
cipher.decrypt(b'\0' * (off % 16))
icon = cipher.decrypt(g.read(size))
break
if icon == b'':
warnings.warn('Not generating meta section as could not find icon in ExeFS')
f.seek(0x14)
f.write(b'\x00' * 4) # Set meta size in header back to 0
else:
alignment = align(curr, 64)
if alignment:
f.write(b'\x00' * alignment)
curr += alignment
f.write(exheader[0x40:0x40 + 0x180]) # TitleID dependency list
f.write(b'\x00' * 0x180)
f.write(exheader[0x208:0x208 + 0x4]) # Core version
f.write(b'\x00' * 0xFC)
f.write(icon)
curr += hdr.meta_size
g.close()
else:
warnings.warn('Not generating meta section as NCCH does not have exheader')
f.seek(0x14)
f.write(b'\x00' * 4) # Set meta size in header back to 0
f.close()
print(f'Wrote to {out}')

73
lib/ctr_cnt.py Normal file
View File

@ -0,0 +1,73 @@
from .common import *
from .keys import *
class cntRecord(Structure):
_pack_ = 1
_fields_ = [
('offset', c_uint32),
('offset_end', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class cntHdr(Structure):
_pack_ = 1
_fields_ = [
('magic', c_char * 4),
('unk', c_uint8 * 0xBFC),
('content_records', cntRecord * 0x100)
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class cntReader:
def __init__(self, cuplist, cnt): # files named 'CupList' and 'Contents.cnt' respectively
self.cuplist = cuplist
self.cnt = cnt
with open(cuplist, 'rb') as f:
cupdata = f.read()
tidlist = []
for i in range(0, 0x800, 8):
if cupdata[i:i + 8] == b'\x00' * 8:
break
tidlist.append(hex(readle(cupdata[i:i + 8]))[2:].zfill(16))
self.tidlist = tidlist
with open(cnt, 'rb') as f:
self.cnt_hdr = cntHdr(f.read(0x1400))
files = {}
for i in range(len(tidlist)):
files[f'{tidlist[i]}.cia'] = {
'size': self.cnt_hdr.content_records[i].offset_end - self.cnt_hdr.content_records[i].offset,
'offset': self.cnt_hdr.content_records[i].offset + 0x1400 - 2048
}
self.files = files
def extract(self):
output_dir = 'updates/'
if not os.path.isdir(output_dir):
os.mkdir(output_dir)
f = open(self.cnt, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(os.path.join(output_dir, name), 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
g.close()
f.close()
print(f'Extracted to {output_dir}')

111
lib/ctr_cro.py Normal file
View File

@ -0,0 +1,111 @@
from .common import *
from .keys import *
class croHdr(Structure):
_pack_ = 1
_fields_ = [
('hdr_hash', c_uint8 * 0x20),
('sect0_hash', c_uint8 * 0x20),
('sect1_hash', c_uint8 * 0x20),
('sect2_hash', c_uint8 * 0x20),
('magic', c_char * 4),
('name_offset', c_uint32),
('next_cro', c_uint32),
('prev_cro', c_uint32),
('file_size', c_uint32),
('bss_size', c_uint32),
('unk1', c_uint32),
('unk2', c_uint32),
('segment_offset_nnroControlObject', c_uint32),
('segment_offset_OnLoad', c_uint32),
('segment_offset_OnExit', c_uint32),
('segment_offset_OnUnresolved', c_uint32),
('code_offset', c_uint32),
('code_size', c_uint32),
('data_offset', c_uint32),
('data_size', c_uint32),
('module_name_offset', c_uint32),
('module_name_size', c_uint32),
('segment_table_offset', c_uint32),
('segment_table_count', c_uint32),
('named_export_table_offset', c_uint32),
('named_export_table_count', c_uint32),
('indexed_export_table_offset', c_uint32),
('indexed_export_table_count', c_uint32),
('export_strings_offset', c_uint32),
('export_strings_size', c_uint32),
('export_tree_offset', c_uint32),
('export_tree_count', c_uint32),
('import_module_table_offset', c_uint32),
('import_module_table_count', c_uint32),
('import_patches_offset', c_uint32),
('import_patches_count', c_uint32),
('named_import_table_offset', c_uint32),
('named_import_table_count', c_uint32),
('indexed_import_table_offset', c_uint32),
('indexed_import_table_count', c_uint32),
('anonymous_import_table_offset', c_uint32),
('anonymous_import_table_count', c_uint32),
('import_strings_offset', c_uint32),
('import_strings_size', c_uint32),
('unk3_offset', c_uint32),
('unk3_count', c_uint32),
('relocation_patches_offset', c_uint32),
('relocation_patches_count', c_uint32),
('unk4_offset', c_uint32),
('unk4_count', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class croReader:
def __init__(self, file):
self.file = file
with open(file, 'rb') as f:
self.hdr = croHdr(f.read(0x138))
def verify(self):
f = open(self.file, 'rb')
hash_check = []
hash_check_info = [ # (name, offset to read from, size, expected hash)
('Header', 0x80, 0x100, bytes(self.hdr.hdr_hash)),
('Section 0', self.hdr.code_offset, self.hdr.code_size, bytes(self.hdr.sect0_hash)),
('Section 1', self.hdr.module_name_offset, self.hdr.data_offset - self.hdr.module_name_offset, bytes(self.hdr.sect1_hash)),
('Section 2', self.hdr.data_offset, self.hdr.data_size, bytes(self.hdr.sect2_hash)),
]
for name, off, size, hash_expected in hash_check_info:
f.seek(off)
h = hashlib.sha256()
h.update(f.read(size))
hash_check.append((name, h.digest() == hash_expected))
f.close()
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
def regen_hash(self): # Overwrites existing file
f = open(self.file, 'r+b')
hash_info = [ # (name, offset to read from, size, offset to put hash)
('Header', 0x80, 0x100, 0),
('Section 0', self.hdr.code_offset, self.hdr.code_size, 0x20),
('Section 1', self.hdr.module_name_offset, self.hdr.data_offset - self.hdr.module_name_offset, 0x40),
('Section 2', self.hdr.data_offset, self.hdr.data_size, 0x60),
]
for _, off, size, hash_off in hash_info:
f.seek(off)
h = hashlib.sha256()
h.update(f.read(size))
f.seek(hash_off)
f.write(h.digest())
f.close()
print(f'{self.file} rehashed')

137
lib/ctr_crr.py Normal file
View File

@ -0,0 +1,137 @@
from .common import *
from .keys import *
class crrHdr(Structure):
_pack_ = 1
_fields_ = [
('magic', c_char * 4),
('reserved1', c_uint32),
('next_crr', c_uint32),
('prev_crr', c_uint32),
('debug_info_offset', c_uint32),
('debug_info_size', c_uint32),
('reserved2', c_uint64),
('unique_id_mask', c_uint32),
('unique_id_pattern', c_uint32),
('reserved3', c_uint8 * 0x18),
('crr_body_mod', c_uint8 * 0x100),
('sig', c_uint8 * 0x100),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class crrBodyHdr(Structure):
_pack_ = 1
_fields_ = [
('sig', c_uint8 * 0x100),
('unique_id', c_uint32),
('size', c_uint32),
('reserved1', c_uint64),
('hash_offset', c_uint32),
('hash_count', c_uint32),
('plain_offset', c_uint32),
('plain_size', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class crrReader:
def __init__(self, file, dev=0):
self.file = file
self.dev = dev
with open(file, 'rb') as f:
self.hdr = crrHdr(f.read(0x240))
self.body_hdr = crrBodyHdr(f.read(0x120))
f.seek(self.body_hdr.hash_offset)
cro_hash_list = []
for i in range(self.body_hdr.hash_count):
cro_hash_list.append(f.read(0x20))
self.cro_hash_list = cro_hash_list
# If re-generating hash / want to verify CRO hashlist, place all CROs in the same directory as static.crr
self.current_dir = os.path.dirname(os.path.abspath(self.file))
self.cros = [i for i in os.listdir(self.current_dir) if i.endswith('.cro')]
if len(self.cros) != 0 and len(self.cros) != self.body_hdr.hash_count:
raise Exception(f'Expected {self.body_hdr.hash_count} CROs but found {len(self.cros)}')
def regen_hash(self): # Overwrites existing file
if len(self.cros) == 0:
raise Exception('Please place all CROs in the same directory as static.crr')
hashes = []
for i in self.cros:
with open(os.path.join(self.current_dir, i), 'rb') as g:
h = hashlib.sha256()
h.update(g.read(0x80))
hashes.append(h.digest())
hashes = [hex(readbe(i))[2:].zfill(64) for i in hashes]
hashes.sort()
hashes = [hextobytes(i) for i in hashes]
with open(self.file, 'r+b') as f:
f.seek(0x360)
f.write(b''.join(hashes))
print(f'{self.file} rehashed')
def regen_sig(self, dev=0): # Overwrites existing file
with open(self.file, 'r+b') as f:
# Body sig
f.seek(0x340)
crr_body = f.read(self.body_hdr.plain_offset - 0x340)
body_sig = Crypto.sign_rsa_sha256(CTR.crr_body_mod, CTR.crr_body_priv, crr_body)
f.seek(0x40)
f.write(CTR.crr_body_mod)
f.seek(0x240)
f.write(body_sig)
if dev == 1: # Header sig
f.seek(0x20)
data = f.read(0x120)
hdr_sig = Crypto.sign_rsa_sha256(CTR.crr_mod[1], CTR.crr_priv[1], data)
f.seek(0x140)
f.write(hdr_sig)
print(f'{self.file} resigned')
def verify(self):
f = open(self.file, 'rb')
hash_check = []
if len(self.cros) != 0:
hashes = []
for i in self.cros: # Check if sha256 of first 0x80 bytes of cro exists in cro hashlist
with open(os.path.join(self.current_dir, i), 'rb') as g:
h = hashlib.sha256()
h.update(g.read(0x80))
hashes.append(h.digest() in self.cro_hash_list)
hash_check.append(('CRO Hashlist', all(hashes)))
sig_check = []
sig_check.append(('CRR Header', Crypto.verify_rsa_sha256(CTR.crr_mod[self.dev], bytes(self.hdr)[0x20:0x140], bytes(self.hdr.sig))))
f.seek(0x340)
crr_body = f.read(self.body_hdr.plain_offset - 0x340)
sig_check.append(('CRR Body', Crypto.verify_rsa_sha256(bytes(self.hdr.crr_body_mod), crr_body, bytes(self.body_hdr.sig))))
f.close()
if hash_check != []:
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print("Signatures:")
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))

161
lib/ctr_exefs.py Normal file
View File

@ -0,0 +1,161 @@
from .common import *
from .keys import *
if platform.system() == 'Windows':
tool = os.path.join(resources_dir, '3dstool.exe')
elif platform.system() == 'Linux':
tool = os.path.join(resources_dir, '3dstool_linux')
elif platform.system() == 'Darwin':
tool = os.path.join(resources_dir, '3dstool_macos')
else:
raise Exception('Could not identify OS')
block_size = 0x200
class ExeFSFileHdr(Structure):
_fields_ = [
('name', c_char * 8),
('offset', c_uint32),
('size', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class ExeFSHdr(Structure):
_pack_ = 1
_fields_ = [
('file_headers', ExeFSFileHdr * 10),
('reserved', c_uint8 * 0x20),
('file_hashes', c_uint8 * 0x140),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class ExeFSReader:
def __init__(self, file):
self.file = file
with open(file, 'rb') as f:
self.hdr = ExeFSHdr(f.read(0x200))
files = {}
for i in range(10):
file_hdr = self.hdr.file_headers[i]
if file_hdr.size:
files[f'{file_hdr.name.decode("utf-8")}.bin'] = {
'size': file_hdr.size,
'offset': 0x200 + file_hdr.offset
}
self.files = files
def extract(self, code_compressed=0):
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
print(f'Extracted {name}')
g.close()
if name == '.code.bin' and code_compressed:
proc = subprocess.Popen([tool, '-uvf', '.code.bin', '--compress-type', 'blz', '--compress-out', 'code-decompressed.bin'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = proc.communicate()
if result[0] == b'':
print('Decompressed to code-decompressed.bin')
else:
print(result[0].decode('utf-8'))
f.close()
def verify(self):
f = open(self.file, 'rb')
hash_check = []
hashes = [bytes(self.hdr.file_hashes[i * 0x20:(i + 1) * 0x20]) for i in range(10)]
hashes.reverse()
for i, (name, info) in enumerate(self.files.items()):
f.seek(info['offset'])
hash_check.append((name.replace('.bin', ''), Crypto.sha256(f, info['size']) == hashes[i]))
f.close()
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
class ExeFSBuilder:
def __init__(self, exefs_dir='', code_compress=0, out='exefs.bin'):
'''
exefs_dir: path to directory containing files to be added to exefs (files must be named '.code.bin', 'banner.bin', 'icon.bin', 'logo.bin')
code_compress: 0 or 1
out: path to output file
'''
files = os.listdir(exefs_dir) # Contains filenames, not paths
files.sort()
hdr = ExeFSHdr(b'\x00' * 0x200)
if files[0] == '.code.bin' and code_compress == 1:
proc = subprocess.Popen([tool, '-zvf', os.path.join(exefs_dir, '.code.bin'), '--compress-type', 'blz', '--compress-out', os.path.join(exefs_dir, 'code-compressed.bin')], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
result = proc.communicate()
if result[0] == b'':
files[0] = 'code-compressed.bin'
else:
print(result[0].decode('utf-8'))
# Create ExeFS header
hashes = []
for i in range(len(files)):
if files[i] == 'code-compressed.bin':
hdr.file_headers[i].name = '.code'.encode('utf-8')
else:
hdr.file_headers[i].name = files[i].replace('.bin', '').encode('utf-8')
hdr.file_headers[i].size = os.path.getsize(os.path.join(exefs_dir, files[i]))
if i == 0:
hdr.file_headers[i].offset = 0
else:
hdr.file_headers[i].offset = roundup(hdr.file_headers[i - 1].offset + hdr.file_headers[i - 1].size, block_size)
f = open(os.path.join(exefs_dir, files[i]), 'rb')
hashes.append(Crypto.sha256(f, hdr.file_headers[i].size))
f.close()
for _ in range(len(files), 10):
hashes.append(b'\x00' * 0x20)
hashes.reverse()
hashes_all = b''.join(hashes)
hdr.file_hashes = (c_uint8 * sizeof(hdr.file_hashes))(*hashes_all)
# Write ExeFS
f = open(out, 'wb')
f.write(bytes(hdr))
curr = 0x200
for i in range(len(files)):
g = open(os.path.join(exefs_dir, files[i]), 'rb')
if curr < (hdr.file_headers[i].offset + 0x200):
pad_size = hdr.file_headers[i].offset + 0x200 - curr
f.write(b'\x00' * pad_size)
curr += pad_size
for data in read_chunks(g, hdr.file_headers[i].size):
f.write(data)
curr += hdr.file_headers[i].size
g.close()
f.write(b'\x00' * align(curr, block_size))
f.close()
if os.path.isfile(os.path.join(exefs_dir, 'code-compressed.bin')):
os.remove(os.path.join(exefs_dir, 'code-compressed.bin'))
print(f'Wrote to {out}')

685
lib/ctr_ncch.py Normal file
View File

@ -0,0 +1,685 @@
from .common import *
from .keys import *
from .ctr_exefs import ExeFSFileHdr
from .ctr_romfs import RomFSReader
media_unit = 0x200
class NCCHHdr(Structure):
_fields_ = [
('sig', c_uint8 * 0x100),
('magic', c_char * 4),
('ncch_size', c_uint32),
('titleID', c_uint8 * 8),
('maker_code', c_char * 2),
('format_ver', c_uint16),
('seed_hash', c_uint8 * 4),
('programID', c_uint8 * 8),
('reserved1', c_uint8 * 16),
('logo_hash', c_uint8 * 32),
('product_code', c_char * 16),
('exh_hash', c_uint8 * 32),
('exh_size', c_uint32),
('reserved2', c_uint32),
('flags', c_uint8 * 8),
('plain_offset', c_uint32),
('plain_size', c_uint32),
('logo_offset', c_uint32),
('logo_size', c_uint32),
('exefs_offset', c_uint32),
('exefs_size', c_uint32),
('exefs_hash_size', c_uint32),
('reserved4', c_uint32),
('romfs_offset', c_uint32),
('romfs_size', c_uint32),
('romfs_hash_size', c_uint32),
('reserved5', c_uint32),
('exefs_hash', c_uint8 * 32),
('romfs_hash', c_uint8 * 32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
# Returns the initial value for the AES-CTR counter
def get_ncch_counter(hdr, component):
counter = bytearray(b'\0' * 16)
if hdr.format_ver == 0 or hdr.format_ver == 2:
section = { 'exheader.bin': 0x01,
'exefs.bin': 0x02,
'romfs.bin': 0x03 }
counter[:8] = bytearray(hdr.titleID[::-1])
counter[8:9] = int8tobytes(section[component])
elif hdr.format_ver == 1:
if component == 'exheader.bin':
x = 0x200
elif component == 'exefs.bin':
x = hdr.exefs_offset * media_unit
elif component == 'romfs.bin':
x = hdr.romfs_offset * media_unit
counter[:8] = bytearray(hdr.titleID)
for i in range(4):
counter[12 + i] = int8tobytes(x >> (3 - i) * 8 & 255)
return bytes(counter)
def get_seed(titleID: bytes):
with open(os.path.join(resources_dir, 'seeddb.bin'), 'rb') as f:
seed_count = readle(f.read(4))
f.seek(0x10)
seed = -1
for _ in range(seed_count):
entry = f.read(0x20)
if entry[:8] == titleID:
seed = entry[8:24]
if seed == -1:
raise Exception('Could not find TitleID in SEEDDB')
return seed
class NCCHReader:
def __init__(self, file, dev=0, build=0): # 'build' parameter is to facilitate NCCHBuilder class
self.file = file
self.dev = dev
with open(file, 'rb') as f:
self.hdr = NCCHHdr(f.read(0x200))
# Parse flags
self.keyX_2 = { 0x00: CTR.KeyX0x2C,
0x01: CTR.KeyX0x25,
0x0A: CTR.KeyX0x18,
0x0B: CTR.KeyX0x1B }[self.hdr.flags[3]]
self.fixed_key = self.hdr.flags[7] & 0x1
self.no_romfs = self.hdr.flags[7] & 0x2
self.is_decrypted = self.hdr.flags[7] & 0x4
self.uses_seed = self.hdr.flags[7] & 0x20
# Generate keys
if self.fixed_key:
if readle(bytes(self.hdr.titleID)) & (0x10 << 32): # System category bit set in TitleID
self.normal_key = [hextobytes(hex(CTR.fixed_system)[2:]) for _ in range(2)]
else:
self.normal_key = [b'\0' * 16 for _ in range(2)]
else:
self.keyY = [bytes(self.hdr.sig)[:0x10], bytes(self.hdr.sig)[:0x10]]
self.keyX = [CTR.KeyX0x2C[dev], self.keyX_2[dev]]
if self.uses_seed: # This will result in keyY_2 being different
seed = get_seed(bytes(self.hdr.titleID))
# Verify seed in SEEDDB
if hashlib.sha256(seed + self.hdr.titleID).digest()[:4] != bytes(self.hdr.seed_hash):
raise Exception('Seed in SEEDDB failed verification')
self.keyY[1] = hashlib.sha256(self.keyY[0] + seed).digest()[:16]
self.normal_key = [CTR.key_scrambler(self.keyX[i], readbe(self.keyY[i])) for i in range(2)]
# Get component offset, size, AES-CTR key and initial value for counter, hash and size of component to calculate hash over
# Exheader, ExeFS and RomFS are encrypted
files = {}
files['ncch_header.bin'] = {
'name': 'NCCH Header',
'size': 0x200,
'offset': 0,
'crypt': 'none'
}
if self.hdr.exh_size:
files['exheader.bin'] = {
'name': 'Exheader',
'size': 0x800,
'offset': 0x200,
'crypt': 'normal',
'key': self.normal_key[0],
'counter': get_ncch_counter(self.hdr, 'exheader.bin'),
'hashes': (bytes(self.hdr.exh_hash), 0x400)
}
if self.hdr.logo_offset:
files['logo.bin'] = {
'name': 'Logo',
'size': self.hdr.logo_size * media_unit,
'offset': self.hdr.logo_offset * media_unit,
'crypt': 'none',
'hashes': (bytes(self.hdr.logo_hash), self.hdr.logo_size * media_unit)
}
if self.hdr.plain_size:
files['plain.bin'] = {
'name': 'Plain',
'size': self.hdr.plain_size * media_unit,
'offset': self.hdr.plain_offset * media_unit,
'crypt': 'none',
}
if self.hdr.exefs_offset:
files['exefs.bin'] = {
'name': 'ExeFS',
'size': self.hdr.exefs_size * media_unit,
'offset': self.hdr.exefs_offset * media_unit,
'crypt': 'exefs',
'key': self.normal_key,
'counter': get_ncch_counter(self.hdr, 'exefs.bin'),
'hashes': (bytes(self.hdr.exefs_hash), self.hdr.exefs_hash_size * media_unit)
}
# ExeFS header, 'icon' and 'banner' use normal_key[0], all other files in ExeFS use normal_key[1]
counter = Counter.new(128, initial_value=readbe(files['exefs.bin']['counter']))
cipher = AES.new(self.normal_key[0], AES.MODE_CTR, counter=counter)
with open(file, 'rb') as f:
f.seek(self.hdr.exefs_offset * media_unit)
if self.is_decrypted or build:
exefs_file_hdr = f.read(0xA0)
else:
exefs_file_hdr = cipher.decrypt(f.read(0xA0))
exefs_files = [(0, 0x200, 0, 'header')] # Each tuple is (offset in ExeFS, size, normal_key index, name)
for i in range(10):
file_hdr = ExeFSFileHdr(exefs_file_hdr[i * 16:(i + 1) * 16])
if file_hdr.size:
name = file_hdr.name.decode('utf-8').strip('\0')
if name in ('icon', 'banner'):
exefs_files.append((0x200 + file_hdr.offset, file_hdr.size, 0, name))
else:
exefs_files.append((0x200 + file_hdr.offset, file_hdr.size, 1, name))
curr = 0x200 + file_hdr.offset + file_hdr.size
if align(curr, 0x200): # Padding between ExeFS files uses normal_key[0]
exefs_files.append((curr, align(curr, 0x200), 0, 'padding'))
files['exefs.bin']['files'] = exefs_files
if not self.no_romfs:
if self.hdr.romfs_offset:
files['romfs.bin'] = {
'name': 'RomFS',
'size': self.hdr.romfs_size * media_unit,
'offset': self.hdr.romfs_offset * media_unit,
'crypt': 'normal',
'key': self.normal_key[1],
'counter': get_ncch_counter(self.hdr, 'romfs.bin'),
'hashes': (bytes(self.hdr.romfs_hash), self.hdr.romfs_hash_size * media_unit)
}
self.files = files
def extract(self):
f = open(self.file, 'rb')
for name, info in self.files.items():
f.seek(info['offset'])
g = open(name, 'wb')
if self.is_decrypted or info['crypt'] == 'none':
for data in read_chunks(f, info['size']):
g.write(data)
elif info['crypt'] == 'normal':
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
for data in read_chunks(f, info['size']):
g.write(cipher.decrypt(data))
elif info['crypt'] == 'exefs':
for off, size, key, _ in info['files']:
f.seek(info['offset'] + off)
counter = Counter.new(128, initial_value=readbe(info['counter']) + (off // 16)) # We have to set the counter manually (initial value increments by 1 per AES block i.e. 16 bytes) since we are decrypting an arbitrary portion (and not from beginning)
cipher = AES.new(info['key'][key], AES.MODE_CTR, counter=counter)
cipher.decrypt(b'\0' * (off % 16)) # Cipher has to be advanced manually also
for data in read_chunks(f, size):
g.write(cipher.decrypt(data))
print(f'Extracted {name}')
g.close()
f.close()
def decrypt(self):
f = open(self.file, 'rb')
g = open('decrypted.ncch', 'wb')
curr = 0
for name, info in self.files.items():
if curr < info['offset']: # Padding between NCCH components
pad_size = info['offset'] - curr
g.write(b'\x00' * pad_size)
curr += pad_size
f.seek(info['offset'])
if name == 'ncch_header.bin':
hdr_dec = self.hdr
hdr_dec.flags[3] = 0 # Set keyX_2 to Key 0x2C
hdr_dec.flags[7] |= 4 # Set NoCrypto flag
hdr_dec.flags[7] &= ~1 # Unset FixedCryptoKey flag
hdr_dec.flags[7] &= ~0x20 # Unset UseSeedCrypto flag
g.write(bytes(hdr_dec))
elif self.is_decrypted or info['crypt'] == 'none':
for data in read_chunks(f, info['size']):
g.write(data)
elif info['crypt'] == 'normal':
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
for data in read_chunks(f, info['size']):
g.write(cipher.decrypt(data))
elif info['crypt'] == 'exefs':
for off, size, key, _ in info['files']:
f.seek(info['offset'] + off)
counter = Counter.new(128, initial_value=readbe(info['counter']) + (off // 16))
cipher = AES.new(info['key'][key], AES.MODE_CTR, counter=counter)
cipher.decrypt(b'\0' * (off % 16))
for data in read_chunks(f, size):
g.write(cipher.decrypt(data))
curr += info['size']
f.close()
g.close()
print(f'Decrypted to decrypted.ncch')
def verify(self):
f = open(self.file, 'rb')
# Hash checks
hash_check = []
for name, info in self.files.items():
if 'hashes' in info.keys():
hashes = info['hashes']
f.seek(info['offset'])
if self.is_decrypted or info['crypt'] == 'none':
hash_check.append((info['name'], Crypto.sha256(f, hashes[1]) == hashes[0]))
else:
h = hashlib.sha256()
counter = Counter.new(128, initial_value=readbe(info['counter']))
if info['crypt'] == 'normal':
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
elif info['crypt'] == 'exefs': # ExeFS hash is only over the ExeFS header (size 0x200), so we don't need to change the counter or the key
cipher = AES.new(info['key'][0], AES.MODE_CTR, counter=counter)
for data in read_chunks(f, hashes[1]):
h.update(cipher.decrypt(data))
hash_check.append((info['name'], h.digest() == hashes[0]))
# Signature checks
sig_check = []
if self.hdr.flags[5] & 0x2: # CXI
# Modulus for NCCH header signature is in accessdesc of exheader
f.seek(0x700)
if self.is_decrypted:
ncch_mod = f.read(0x100)
else:
info = self.files['exheader.bin']
counter = Counter.new(128, initial_value=readbe(info['counter']) + 0x500 // 16)
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
ncch_mod = cipher.decrypt(f.read(0x100))
sig_check.append(('NCCH Header', Crypto.verify_rsa_sha256(ncch_mod, bytes(self.hdr)[0x100:], bytes(self.hdr.sig))))
f.seek(0x600)
if self.is_decrypted:
data = f.read(0x400)
else:
info = self.files['exheader.bin']
counter = Counter.new(128, initial_value=readbe(info['counter']) + 0x400 // 16)
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
data = cipher.decrypt(f.read(0x400))
sig_check.append(('Exheader', Crypto.verify_rsa_sha256(CTR.accessdesc_mod[self.dev], data[0x100:], data[:0x100])))
elif self.hdr.flags[5] & 0x1: # CFA
sig_check.append(('NCCH Header', Crypto.verify_rsa_sha256(CTR.cfa_mod[self.dev], bytes(self.hdr)[0x100:], bytes(self.hdr.sig))))
f.close()
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print("Signatures:")
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
def __str__(self):
keyX_2 = { 0x00: 'Secure1 (Key 0x2C)',
0x01: 'Secure2 (Key 0x25)',
0x0A: 'Secure3 (Key 0x18)',
0x0B: 'Secure4 (Key 0x1B)' }
if self.is_decrypted:
crypto = 'None (Decrypted)'
elif self.fixed_key:
if readle(bytes(self.hdr.titleID)) & (0x10 << 32):
crypto = 'Fixed key (System)'
else:
crypto = 'Fixed key (Zero key)'
else:
crypto = keyX_2[self.hdr.flags[3]]
if self.uses_seed:
crypto += ' (KeyY seeded)'
platform = {
1: 'CTR',
2: 'SNAKE'
}
form_type = {
1: 'CFA',
2: 'CXI without RomFS',
3: 'CXI'
}
content_type = {
0: 'Application',
1: 'CTR System Update',
2: 'Manual',
3: 'Child',
4: 'Trial',
5: 'SNAKE System Update'
}
return (
f'TitleID: {hex(readle(self.hdr.titleID))[2:].zfill(16)}\n'
f'Maker code: {self.hdr.maker_code.decode("ascii")}\n'
f'Product code: {self.hdr.product_code.decode("ascii")}\n'
f'Flags:\n'
f' > Crypto method: {crypto}\n'
f' > Platform: {platform[self.hdr.flags[4]]}\n'
f' > Form type: {form_type[self.hdr.flags[5] & 0b11]}\n' # Lower 2 bits
f' > Content type: {content_type[self.hdr.flags[5] >> 2]}' # Bits 2-7
)
class NCCHBuilder:
def __init__(self, ncch_header='', exheader='', logo='', plain='', exefs='', romfs='', platform='', ncch_type='', maker_code='', product_code='', titleID='', programID='', crypto='', seed=0, regen_sig='', replace_tid=0, dev=0, out='new.ncch'):
'''
ncch_header, exheader, logo, plain, exefs, romfs: path to respective component (if available)
Following parameters are required if no NCCH header is provided; if both header and parameter is supplied, the parameter overrides the header(s)
- platform: 'CTR' or 'SNAKE'
- ncch_type: 'CXI' or 'CTRSystemUpdate' or 'SNAKESystemUpdate' or 'Manual' or 'Child' or 'Trial'
- maker_code: maker code, e.g. '00'
- product_code: product code, e.g. 'CTR-P-CTAP'
- titleID: titleID in hex (if not provided, take from exheader), e.g. '000400000FF3FF00'
- programID: programID in hex (if not provided, use the titleID)
- crypto: 'none' / 'fixed' / 'Secure1' / 'Secure2' / 'Secure3' / 'Secure4'
- seed: 0 or 1
regen_sig: '' or 'retail' (test keys; CXI header signature only) or 'dev' (NCCH header and exheader signature)
replace_tid: 0 or 1 (replaces TitleID in NCCH header and exheader)
dev: 0 or 1
out: path to output file
'''
# Checks
if ncch_type != '' and ncch_type != 'CXI' and exheader != '':
warnings.warn('Ignoring exheader since NCCH type is CFA')
exheader = ''
if maker_code != '':
if not len(maker_code) == 2:
raise Exception('Maker code length must be 2')
if product_code != '':
if not all([i == '-' or i.isdigit() or i.isupper() for i in product_code]) or len(product_code) < 10 or len(product_code) > 16 or product_code[:3] not in ['CTR', 'KTR']:
raise Exception('Invalid product code')
if titleID != '':
if not all([i in string.hexdigits for i in titleID]) or len(titleID) != 16:
raise Exception('Invalid TitleID')
if programID != '':
if not all([i in string.hexdigits for i in programID]) or len(programID) != 16:
raise Exception('Invalid programID')
if seed == 1 and (not crypto.startswith('Secure')):
raise Exception('Seed crypto can only be used with Secure crypto')
# Defaults
if ncch_header == '' and regen_sig == '':
regen_sig = 'retail'
if regen_sig == 'dev':
dev = 1
# Create (or modify) NCCH header
if exheader != '' and (regen_sig != '' or replace_tid == 1):
shutil.copyfile(exheader, 'exheader_mod')
if ncch_header == '':
hdr = NCCHHdr(b'\x00' * 0x200)
hdr.magic = b'NCCH'
if romfs == '':
hdr.flags[7] |= 2
if titleID == '' and exheader != '':
with open(exheader, 'rb') as f:
f.seek(0x200)
titleID = hex(readle(f.read(8)))[2:].zfill(16)
else:
with open(ncch_header, 'rb') as f:
hdr = NCCHHdr(f.read())
if ncch_header == '' and programID == '': # Defaults
programID = titleID
if titleID != '':
titleID_bytes = int64tobytes(int(titleID, 16))
hdr.titleID = (c_uint8 * sizeof(hdr.titleID))(*titleID_bytes)
if replace_tid == 1: # Replace TitleID in exheader
if exheader != '':
with open('exheader_mod', 'r+b') as f:
offs = [0x1C8, 0x200, 0x600]
for off in offs:
f.seek(off)
f.write(hextobytes(titleID))
if programID != '':
programID_bytes = int64tobytes(int(programID, 16))
hdr.programID = (c_uint8 * sizeof(hdr.programID))(*programID_bytes)
if maker_code != '':
hdr.maker_code = maker_code.encode('ascii')
if ncch_type != '':
if ncch_type == 'CXI':
hdr.format_ver = 2
else:
hdr.format_ver = 0
if ncch_header != '': # Reset content type flag first if existing value already exists
hdr.flags[5] = 0
if ncch_type == 'CXI' and romfs == '':
hdr.flags[5] |= 2
elif ncch_type == 'CXI' and romfs != '':
hdr.flags[5] |= 3
else:
hdr.flags[5] |= 1
if ncch_type == 'CTRSystemUpdate':
hdr.flags[5] |= 4
elif ncch_type == 'Manual':
hdr.flags[5] |= 8
elif ncch_type == 'Child':
hdr.flags[5] |= 0xC
elif ncch_type == 'Trial':
hdr.flags[5] |= 0x10
elif ncch_type == 'SNAKESystemUpdate':
hdr.flags[5] |= 0x14
if product_code != '':
hdr.product_code = product_code.encode('ascii')
if platform != '':
if platform == 'CTR':
hdr.flags[4] = 1
elif platform == 'SNAKE':
hdr.flags[4] = 2
if crypto != '':
# Reset crypto flags
hdr.flags[7] &= ~1
hdr.flags[7] &= ~4
hdr.flags[7] &= ~0x20
if crypto == 'none':
hdr.flags[7] |= 4
elif crypto == 'fixed':
hdr.flags[7] |= 1
else:
hdr.flags[3] = { 'Secure1': 0x00,
'Secure2': 0x01,
'Secure3': 0x0A,
'Secure4': 0x0B }[crypto]
if seed == 1:
hdr.flags[7] |= 0x20
seed = get_seed(titleID_bytes)
seed_hash = hashlib.sha256(seed + hdr.titleID).digest()[:4]
hdr.seed_hash = (c_uint8 * sizeof(hdr.seed_hash))(*seed_hash)
# Modify exheader (if necessary)
if regen_sig == 'retail':
if exheader != '':
with open('exheader_mod', 'r+b') as f: # Replace NCCH header mod
f.seek(0x500)
f.write(CTR.test_mod)
elif regen_sig == 'dev':
if exheader != '':
with open('exheader_mod', 'r+b') as f:
# NCCH header mod
f.seek(0x500)
f.write(CTR.test_mod)
# Exheader signature
f.seek(0x500)
sig = Crypto.sign_rsa_sha256(CTR.accessdesc_mod[1], CTR.accessdesc_priv[1], f.read(0x300))
f.seek(0x400)
f.write(sig)
curr = 0x200
files = {}
size_check = []
if exheader != '':
if regen_sig != '' or replace_tid == 1:
exheader = 'exheader_mod'
hdr.exh_size = 0x400
f = open(exheader, 'rb')
h = Crypto.sha256(f, 0x400)
hdr.exh_hash = (c_uint8 * sizeof(hdr.exh_hash))(*h)
f.close()
curr += os.path.getsize(exheader)
files['exheader.bin'] = {
'path': exheader
}
if logo != '':
curr += align(curr, 0x200)
size_check.append(hdr.logo_size == os.path.getsize(logo) // media_unit)
hdr.logo_offset = curr // media_unit
hdr.logo_size = os.path.getsize(logo) // media_unit
f = open(logo, 'rb')
h = Crypto.sha256(f, os.path.getsize(logo))
hdr.logo_hash = (c_uint8 * sizeof(hdr.logo_hash))(*h)
f.close()
curr += os.path.getsize(logo)
files['logo.bin'] = {
'path': logo
}
if plain != '':
curr += align(curr, 0x200)
size_check.append(hdr.plain_size == os.path.getsize(plain) // media_unit)
hdr.plain_offset = curr // media_unit
hdr.plain_size = os.path.getsize(plain) // media_unit
curr += os.path.getsize(plain)
files['plain.bin'] = {
'path': plain
}
if exefs != '':
curr += align(curr, 0x200)
size_check.append(hdr.exefs_size == os.path.getsize(exefs) // media_unit)
hdr.exefs_offset = curr // media_unit
hdr.exefs_size = os.path.getsize(exefs) // media_unit
f = open(exefs, 'rb')
h = Crypto.sha256(f, 0x200)
hdr.exefs_hash = (c_uint8 * sizeof(hdr.exefs_hash))(*h)
f.close()
hdr.exefs_hash_size = 0x200 // media_unit
curr += os.path.getsize(exefs)
files['exefs.bin'] = {
'path': exefs
}
if romfs != '':
r = RomFSReader(romfs)
romfs_hash_size = roundup(0x60 + r.hdr.master_hash_size, media_unit) # RomFS hash in NCCH is over RomFS header + master hash
size_check.append(hdr.romfs_size == os.path.getsize(romfs) // media_unit)
if all(size_check): # RomFS offset may be 0x200 aligned ("SDK 2.x and prior" according to makerom). In order for rebuilt NCCH to match original, we don't overwrite the existing RomFS offset if all sizes in provided NCCH header match provided files
curr = hdr.romfs_offset * media_unit
else:
curr += align(curr, 0x1000)
hdr.romfs_offset = curr // media_unit
hdr.romfs_size = os.path.getsize(romfs) // media_unit
f = open(romfs, 'rb')
h = Crypto.sha256(f, romfs_hash_size)
hdr.romfs_hash = (c_uint8 * sizeof(hdr.romfs_hash))(*h)
f.close()
hdr.romfs_hash_size = romfs_hash_size // media_unit
curr += os.path.getsize(romfs)
files['romfs.bin'] = {
'path': romfs
}
hdr.ncch_size = curr // media_unit
# Generate header signature (if necessary)
if regen_sig == 'retail':
if hdr.flags[5] & 0x2: # CXI
sig = Crypto.sign_rsa_sha256(CTR.test_mod, CTR.test_priv, bytes(hdr)[0x100:])
hdr.sig = (c_uint8 * sizeof(hdr.sig))(*sig)
elif regen_sig == 'dev':
if hdr.flags[5] & 0x2: # CXI
sig = Crypto.sign_rsa_sha256(CTR.test_mod, CTR.test_priv, bytes(hdr)[0x100:])
hdr.sig = (c_uint8 * sizeof(hdr.sig))(*sig)
else: # CFA
sig = Crypto.sign_rsa_sha256(CTR.cfa_mod[1], CTR.cfa_priv[1], bytes(hdr)[0x100:])
hdr.sig = (c_uint8 * sizeof(hdr.sig))(*sig)
# Generate keys by using NCCHReader on dummy NCCH with only NCCH header and ExeFS header
with open('tmp', 'wb') as f:
f.write(bytes(hdr))
if hdr.exefs_offset:
f.seek(hdr.exefs_offset * media_unit)
with open(files['exefs.bin']['path'], 'rb') as g:
exefs_file_hdr = g.read(0xA0)
f.write(exefs_file_hdr)
ncch = NCCHReader('tmp', dev=dev, build=1)
# Write NCCH
f = open(f'{out}', 'wb')
f.write(bytes(hdr))
curr = 0x200
for name, info in files.items():
info.update(ncch.files[name])
g = open(info['path'], 'rb')
if curr < info['offset']:
pad_size = info['offset'] - curr
f.write(b'\x00' * pad_size)
curr += pad_size
if ncch.is_decrypted or info['crypt'] == 'none':
for data in read_chunks(g, info['size']):
f.write(data)
elif info['crypt'] == 'normal':
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
for data in read_chunks(g, info['size']):
f.write(cipher.encrypt(data))
elif info['crypt'] == 'exefs':
for off, size, key, _ in info['files']:
g.seek(off)
counter = Counter.new(128, initial_value=readbe(info['counter']) + (off // 16))
cipher = AES.new(info['key'][key], AES.MODE_CTR, counter=counter)
cipher.encrypt(b'\0' * (off % 16))
for data in read_chunks(g, size):
f.write(cipher.encrypt(data))
curr += info['size']
g.close()
f.close()
os.remove('tmp')
if os.path.isfile('exheader_mod'):
os.remove('exheader_mod')
print(f'Wrote to {out}')

436
lib/ctr_romfs.py Normal file
View File

@ -0,0 +1,436 @@
from .common import *
from .keys import *
unused = 0xFFFFFFFF
block_size = 0x1000
class RomFSHdr(Structure):
_pack_ = 1
_fields_ = [
('magic', c_char * 4),
('magic_num', c_uint32),
('master_hash_size', c_uint32),
('lvl1_logical_offset', c_uint64),
('lvl1_hash_size', c_uint64),
('lvl1_block_size', c_uint32),
('reserved1', c_uint32),
('lvl2_logical_offset', c_uint64),
('lvl2_hash_size', c_uint64),
('lvl2_block_size', c_uint32),
('reserved2', c_uint32),
('lvl3_logical_offset', c_uint64),
('lvl3_size', c_uint64),
('lvl3_block_size', c_uint32),
('reserved3', c_uint32),
('hdr_size', c_uint32),
('optional_size', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class RomFSL3Hdr(Structure):
_pack_ = 1
_fields_ = [
('hdr_len', c_uint32),
('dir_hash_off', c_uint32),
('dir_hash_len', c_uint32),
('dir_meta_off', c_uint32),
('dir_meta_len', c_uint32),
('file_hash_off', c_uint32),
('file_hash_len', c_uint32),
('file_meta_off', c_uint32),
('file_meta_len', c_uint32),
('file_data_off', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class RomFSDirMetaRecord(Structure):
_pack_ = 1
_fields_ = [
('parent_off', c_uint32),
('next_dir_off', c_uint32),
('first_child_dir_off', c_uint32),
('first_file_off', c_uint32),
('hash_pointer', c_uint32),
('name_len', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class RomFSFileMetaRecord(Structure):
_pack_ = 1
_fields_ = [
('parent_off', c_uint32),
('next_file_off', c_uint32),
('data_off', c_uint64),
('data_len', c_uint64),
('hash_pointer', c_uint32),
('name_len', c_uint32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
def get_hash_table_len(num):
count = num
if num < 3:
count = 3
elif num < 19:
count |= 1
else:
while (count % 2 == 0
or count % 3 == 0
or count % 5 == 0
or count % 7 == 0
or count % 11 == 0
or count % 13 == 0
or count % 17 == 0):
count += 1
return count
def calc_path_hash(name, parent_off):
h = parent_off ^ 123456789
for j in range(len(name) // 2):
i = j * 2
h = (h >> 5) | (h << 27)
h ^= (name[i]) | (name[i + 1] << 8)
h &= 0xFFFFFFFF
return h
class RomFSReader:
def __init__(self, file):
self.file = file
with open(file, 'rb') as f:
self.hdr = RomFSHdr(f.read(0x5C))
self.lvl1_block_size = 1 << self.hdr.lvl1_block_size
self.lvl2_block_size = 1 << self.hdr.lvl2_block_size
self.lvl3_block_size = 1 << self.hdr.lvl3_block_size
# Get offsets for RomFS components
hashes = {}
curr = 0x60
hashes['Master Hash'] = {
'size': self.hdr.master_hash_size,
'offset': curr
}
curr += hashes['Master Hash']['size']
curr += align(curr, self.lvl3_block_size)
self.lvl3_offset = curr
curr += self.hdr.lvl3_size
curr += align(curr, self.lvl1_block_size)
hashes['Level 1'] = {
'size': self.hdr.lvl1_hash_size,
'offset': curr
}
curr += hashes['Level 1']['size']
curr += align(curr, self.lvl2_block_size)
hashes['Level 2'] = {
'size': self.hdr.lvl2_hash_size,
'offset': curr
}
curr += hashes['Level 2']['size']
self.hashes = hashes
# Parse level 3 (actual data)
self.files = {}
self.dirs = [] # Save all dir paths in case of empty dir
def valid(a):
return a != unused
def extract_file(file_offset, parent_name):
while valid(file_offset):
f.seek(self.lvl3_offset + self.lvl3_hdr.file_meta_off + file_offset)
file_meta = RomFSFileMetaRecord(f.read(0x20))
name = f.read(file_meta.name_len).decode('utf-16le')
name2 = os.path.join(parent_name, name)
self.files[name2] = {
'size': file_meta.data_len,
'offset': self.lvl3_offset + self.lvl3_hdr.file_data_off + file_meta.data_off
}
file_offset = file_meta.next_file_off
def extract_dir(dir_offset, parent_name):
while valid(dir_offset):
f.seek(self.lvl3_offset + self.lvl3_hdr.dir_meta_off + dir_offset)
dir_meta = RomFSDirMetaRecord(f.read(0x18))
name = f.read(dir_meta.name_len).decode('utf-16le')
name2 = os.path.join(parent_name, name)
self.dirs.append(name2)
if valid(dir_meta.first_file_off):
extract_file(dir_meta.first_file_off, name2)
if valid(dir_meta.first_child_dir_off):
extract_dir(dir_meta.first_child_dir_off, name2)
dir_offset = dir_meta.next_dir_off
with open(file, 'rb') as f:
f.seek(self.lvl3_offset)
self.lvl3_hdr = RomFSL3Hdr(f.read(0x28))
extract_dir(0, '')
def extract(self):
output_dir = 'romfs/'
f = open(self.file, 'rb')
for i in self.dirs:
path = os.path.join(output_dir, i)
if not os.path.isdir(path):
os.makedirs(path, exist_ok=True) # Same function as mkdir -p
for path, info in self.files.items():
f.seek(info['offset'])
g = open(os.path.join(output_dir, path), 'wb')
for data in read_chunks(f, info['size']):
g.write(data)
g.close()
f.close()
print(f'Extracted to {output_dir}')
def verify(self):
f = open(self.file, 'rb')
hash_check = []
hash_check_info = [
('Master Hash', self.hashes['Level 1']['offset'], self.lvl1_block_size), # Master hash verifies level 1
('Level 1', self.hashes['Level 2']['offset'], self.lvl2_block_size), # Level 1 verifies level 2
('Level 2', self.lvl3_offset, self.lvl3_block_size) # Level 2 verifies level 3
]
for name, off, block_size in hash_check_info:
f.seek(self.hashes[name]['offset'])
hashes = f.read(self.hashes[name]['size'])
num_blocks = len(hashes) // 0x20
checks = []
f.seek(off)
for i in range(num_blocks):
h = hashlib.sha256()
h.update(f.read(block_size))
checks.append(h.digest() == hashes[i * 0x20:(i + 1) * 0x20])
hash_check.append((name, all(checks)))
f.close()
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
class RomFSBuilder:
def __init__(self, romfs_dir='', out='romfs.bin'):
'''
romfs_dir: path to directory where objects inside will be added to romfs
out: path to output file
'''
# Find total number of files and dirs to get length of file hash table and dir hash table
num_files = 0
num_dirs = 1
for root, dirs, files in os.walk(romfs_dir):
num_files += len(files)
num_dirs += len(dirs)
file_hash_table = [unused] * get_hash_table_len(num_files)
dir_hash_table = [unused] * get_hash_table_len(num_dirs)
# Create dir meta record for root dir
dir_meta_table = []
file_meta_table = []
file_data = []
dir_meta_off = file_meta_off = file_data_off = file_data_size = 0
root_dir_meta = RomFSDirMetaRecord(b'\x00' * 0x18)
root_dir_meta.next_dir_off = root_dir_meta.first_child_dir_off = root_dir_meta.first_file_off = unused
hash_index = calc_path_hash(b'', 0) % len(dir_hash_table)
root_dir_meta.hash_pointer = dir_hash_table[hash_index]
dir_hash_table[hash_index] = dir_meta_off
dir_meta_off += 0x18
dir_meta_table.append([root_dir_meta, b''])
# Recursively traverse romfs_dir to fill in dir meta, dir hash, file meta, file hash tables
def add_dir_children(path, parent_dir_off, parent_dir_idx):
nonlocal dir_meta_off, file_meta_off, file_data_off, file_data_size
objs = os.listdir(path)
files = []
dirs = []
for i in objs:
path2 = os.path.join(path, i)
if os.path.isfile(path2):
files.append(path2)
elif os.path.isdir(path2):
dirs.append(path2)
files.sort(key = lambda c: os.path.basename(c).upper())
dirs.sort(key = lambda c: os.path.basename(c).upper())
for i in range(len(files)):
if i == 0: # set parent dir_meta's first_file_off
dir_meta_table[parent_dir_idx][0].first_file_off = file_meta_off
file_meta = RomFSFileMetaRecord(b'\x00' * 0x20)
file_meta.parent_off = parent_dir_off
file_meta.data_off = file_data_off
file_meta.data_len = os.path.getsize(files[i])
utf16name = os.path.basename(files[i]).encode('utf_16_le')
hash_index = calc_path_hash(utf16name, parent_dir_off) % len(file_hash_table)
file_meta.hash_pointer = file_hash_table[hash_index]
file_hash_table[hash_index] = file_meta_off # separate chaining hash table, newly added file/dir is added as head element of linked list
file_meta.name_len = len(utf16name)
file_meta_off += 0x20 + len(utf16name) + align(len(utf16name), 4)
if i != len(files) - 1:
file_meta.next_file_off = file_meta_off
else:
file_meta.next_file_off = unused
file_data_off += file_meta.data_len + align(file_meta.data_len, 16)
file_meta_table.append([file_meta, utf16name])
file_data.append([files[i], file_meta.data_len])
file_data_size += align(file_data_size, 16)
file_data_size += file_meta.data_len
child_dirs = []
for i in range(len(dirs)):
if i == 0: # set parent dir_meta's first_child_dir_off
dir_meta_table[parent_dir_idx][0].first_child_dir_off = dir_meta_off
dir_meta = RomFSDirMetaRecord(b'\x00' * 0x18)
dir_meta.first_child_dir_off = dir_meta.first_file_off = unused
dir_meta.parent_off = parent_dir_off
utf16name = os.path.basename(dirs[i]).encode('utf_16_le')
hash_index = calc_path_hash(utf16name, parent_dir_off) % len(dir_hash_table)
dir_meta.hash_pointer = dir_hash_table[hash_index]
dir_hash_table[hash_index] = dir_meta_off
child_dirs.append((dirs[i], dir_meta_off, len(dir_meta_table))) # current dir_meta will have index len(dir_meta_table) after it is appended
dir_meta.name_len = len(utf16name)
dir_meta_off += 0x18 + len(utf16name) + align(len(utf16name), 4)
if i != len(dirs) - 1:
dir_meta.next_dir_off = dir_meta_off
else:
dir_meta.next_dir_off = unused
dir_meta_table.append([dir_meta, utf16name])
for path, dir_off, dir_idx in child_dirs: # current dir's subdirs are all added to dir_meta_table before subdir's subdirs are added
add_dir_children(path, dir_off, dir_idx)
add_dir_children(romfs_dir, 0, 0)
# Create level 3 header
lvl3_hdr = RomFSL3Hdr(b'\x00' * 0x28)
offset = 0x28
lvl3_hdr.hdr_len = 0x28
lvl3_hdr.dir_hash_off = offset
lvl3_hdr.dir_hash_len = 4 * len(dir_hash_table)
offset += lvl3_hdr.dir_hash_len
lvl3_hdr.dir_meta_off = offset
lvl3_hdr.dir_meta_len = dir_meta_off
offset += lvl3_hdr.dir_meta_len
lvl3_hdr.file_hash_off = offset
lvl3_hdr.file_hash_len = 4 * len(file_hash_table)
offset += lvl3_hdr.file_hash_len
lvl3_hdr.file_meta_off = offset
lvl3_hdr.file_meta_len = file_meta_off
offset += lvl3_hdr.file_meta_len
offset += align(offset, 16)
lvl3_hdr.file_data_off = offset
# Create RomFS header
hdr = RomFSHdr(b'\x00' * 0x5C)
hdr.magic = b'IVFC'
hdr.magic_num = 65536
hdr.lvl1_block_size = hdr.lvl2_block_size = hdr.lvl3_block_size = int(math.log2(block_size))
hdr.lvl3_size = lvl3_hdr.file_data_off + file_data_size
hdr.lvl2_hash_size = roundup(hdr.lvl3_size, block_size) // block_size * 0x20
hdr.lvl1_hash_size = roundup(hdr.lvl2_hash_size, block_size) // block_size * 0x20
hdr.master_hash_size = roundup(hdr.lvl1_hash_size, block_size) // block_size * 0x20
hdr.lvl2_logical_offset = roundup(hdr.lvl1_logical_offset + hdr.lvl1_hash_size, block_size)
hdr.lvl3_logical_offset = roundup(hdr.lvl2_logical_offset + hdr.lvl2_hash_size, block_size)
hdr.hdr_size = 0x5C
# Calculate offsets
lvl3_off = roundup(0x60 + hdr.master_hash_size, block_size)
lvl1_off = lvl3_off + roundup(hdr.lvl3_size, block_size)
lvl2_off = lvl1_off + roundup(hdr.lvl1_hash_size, block_size)
# Write RomFS header and level 3
with open(out, 'wb') as f:
f.write(bytes(hdr))
f.write(b'\x00' * (lvl3_off - f.tell()))
f.write(bytes(lvl3_hdr))
for i in dir_hash_table:
f.write(int32tobytes(i))
for dir_meta, name in dir_meta_table:
f.write(bytes(dir_meta))
f.write(name)
f.write(b'\x00' * align(len(name), 4))
for i in file_hash_table:
f.write(int32tobytes(i))
for file_meta, name in file_meta_table:
f.write(bytes(file_meta))
f.write(name)
f.write(b'\x00' * align(len(name), 4))
for file, size in file_data:
f.write(b'\x00' * align(f.tell(), 16))
g = open(file, 'rb')
for data in read_chunks(g, size):
f.write(data)
g.close()
# Calculate and write master hash, level 1, level 2
hash_info = [ (lvl3_off, hdr.lvl2_hash_size, lvl2_off),
(lvl2_off, hdr.lvl1_hash_size, lvl1_off),
(lvl1_off, hdr.master_hash_size, 0x60) ]
with open(out, 'r+b') as f:
f.seek(lvl3_off + hdr.lvl3_size)
f.write(b'\x00' * (lvl2_off + hdr.lvl2_hash_size - f.tell()))
f.write(b'\x00' * align(f.tell(), block_size)) # padding after level 2
for off_read, size, off_write in hash_info:
for i in range(size // 0x20):
f.seek(off_read + i * block_size)
h = hashlib.sha256()
h.update(f.read(block_size))
f.seek(off_write + i * 0x20)
f.write(h.digest())
print(f'Wrote to {out}')

218
lib/ctr_tik.py Normal file
View File

@ -0,0 +1,218 @@
from .common import *
from .keys import *
class tikData(BigEndianStructure):
_pack_ = 1
_fields_ = [
('issuer', c_char * 0x40),
('ecc_pubkey', c_uint8 * 0x3C),
('format_ver', c_uint8),
('ca_crl_ver', c_uint8),
('signer_crl_ver', c_uint8),
('enc_titlekey', c_uint8 * 16),
('reserved1', c_uint8),
('ticketID', c_uint64),
('consoleID', c_uint32),
('titleID', c_uint8 * 8),
('reserved2', c_uint16),
('title_ver', c_uint16),
('reserved3', c_uint64),
('license_type', c_uint8),
('common_key_index', c_uint8),
('reserved4', c_uint8 * 0x2A),
('eshop_acc_id', c_uint8 * 4),
('reserved5', c_uint8),
('audit', c_uint8),
('reserved6', c_uint8 * 0x42),
('limits', c_uint8 * 0x40),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
signature_types = { # Each tuple is (signature size, size of padding after signature)
# RSA_4096 SHA1 (unused on 3DS)
0x00010000: (0x200, 0x3C),
# RSA_2048 SHA1 (unused on 3DS)
0x00010001: (0x100, 0x3C),
# Elliptic Curve with SHA1 (unused on 3DS)
0x00010002: (0x3C, 0x40),
# RSA_4096 SHA256
0x00010003: (0x200, 0x3C),
# RSA_2048 SHA256
0x00010004: (0x100, 0x3C),
# ECDSA with SHA256
0x00010005: (0x3C, 0x40),
}
class tikReader:
def __init__(self, file, dev=0):
self.file = file
self.dev = dev
with open(file, 'rb') as f:
sig_type = readbe(f.read(4))
self.sig = f.read(signature_types[sig_type][0])
padding = f.read(signature_types[sig_type][1])
self.data = tikData(f.read(0x164))
self.content_index_hdr = f.read(0x28)
self.content_index_offset = f.read(4)
self.content_index = f.read(0x80)
# Decrypt TitleKey
normal_key = CTR.key_scrambler(CTR.KeyX0x3D[dev], CTR.KeyY0x3D[self.data.common_key_index][dev])
cipher = AES.new(normal_key, AES.MODE_CBC, iv=bytes(self.data.titleID)+(b'\0'*8))
self.titlekey = cipher.decrypt(bytes(self.data.enc_titlekey))
def verify(self, no_print=0): # 'no_print' parameter to facilitate CIAReader.verify()
sig_check = []
sig_check.append(('Ticket', Crypto.verify_rsa_sha256(CTR.tik_mod[self.dev], bytes(self.data) + self.content_index_hdr + self.content_index_offset + self.content_index, self.sig)))
if no_print == 0:
print('Signatures:')
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
return sig_check
def __str__(self):
enabled_content_idxs = []
for i in range(0, 0x80 * 8):
if self.content_index[i // 8] & (1 << (i % 8)):
enabled_content_idxs.append(hex(i)[2:].zfill(4))
contents = ''
for i in enabled_content_idxs:
contents += f' > {i}\n'
contents = contents[:-1] # Remove last '\n'
if self.content_index == b'\xff' * 0x80: # If all content indexes are enabled, make printout shorter
contents = f' > 0000 \n ...\n > 03ff'
return (
f'TitleKey: {hex(readbe(self.data.enc_titlekey))[2:].zfill(32)} (decrypted: {hex(readbe(self.titlekey))[2:].zfill(32)})\n'
f'TicketID: {hex(self.data.ticketID)[2:].zfill(16)}\n'
f'ConsoleID: {hex(self.data.consoleID)[2:].zfill(8)}\n'
f'TitleID: {hex(readbe(bytes(self.data.titleID)))[2:].zfill(16)}\n'
f'Title version: {self.data.title_ver}\n'
f'Common KeyY index: {self.data.common_key_index}\n'
f'eShop account ID: {hex(readle(bytes(self.data.eshop_acc_id)))[2:].zfill(8)}\n' # ctrtool shows this as LE
f'Enabled contents:\n'
f'{contents}'
)
class tikBuilder:
def __init__(self, tik='', titleID='', title_ver=-1, ticketID='', consoleID='', eshop_acc_id='', titlekey='', common_key_index=-1, regen_sig='', out='tik_new'):
'''
tik: path to ticket (if available)
Following parameters are required if no ticket is provided; if both ticket and parameter is supplied, the parameter overrides the ticket
- titleID: titleID in hex, e.g. '000400000FF3FF00'
- title_ver: title version in decimal
- ticketID, consoleID, eshop_acc_id: in hex
- titlekey: decrypted title key in hex (if not provided, use titlekey generation algorithm)
- common_key_index: 0 or 1 or 2 or 3 or 4 or 5
regen_sig: '' or 'retail' (test keys) or 'dev'
out: path to output file
'''
# Checks
if titleID != '':
if not all([i in string.hexdigits for i in titleID]) or len(titleID) != 16:
raise Exception('Invalid TitleID')
if titlekey != '':
if not all([i in string.hexdigits for i in titlekey]) or len(titlekey) != 32:
raise Exception('Invalid TitleKey')
# Defaults
if tik == '':
if regen_sig == '':
regen_sig = 'retail'
if ticketID == '':
ticketID = '0'
if consoleID == '':
consoleID = '0'
if eshop_acc_id == '':
eshop_acc_id = '0'
if common_key_index == -1:
common_key_index = 0
# Create (or modify) ticket data
if tik == '':
data = tikData(b'\x00' * 0x164)
data.format_ver = 1
data.audit = 1
if titlekey == '':
titlekey = CTR.titlekey_gen(titleID, 'mypass')
else:
with open(tik, 'rb') as f:
sig_type = readbe(f.read(4))
sig = f.read(signature_types[sig_type][0])
padding = f.read(signature_types[sig_type][1])
data = tikData(f.read(0x164))
content_index_hdr = f.read(0x28)
content_index_offset = f.read(4)
content_index = f.read(0x80)
if tik == '' or regen_sig != '':
data.issuer = b'Root-CA00000003-XS0000000c'
if regen_sig == 'dev':
data.issuer = b'Root-CA00000004-XS00000009'
if ticketID != '':
data.ticketID = int(ticketID, 16)
if consoleID != '':
data.consoleID = int(consoleID, 16)
if titleID != '':
titleID_bytes = int.to_bytes((int(titleID, 16)), 8, 'big')
data.titleID = (c_uint8 * sizeof(data.titleID))(*titleID_bytes)
if title_ver != -1:
data.title_ver = title_ver
if common_key_index != -1:
data.common_key_index = common_key_index
if eshop_acc_id != '':
eshop_acc_id_bytes = int32tobytes(int(eshop_acc_id, 16))
data.eshop_acc_id = (c_uint8 * sizeof(data.eshop_acc_id))(*eshop_acc_id_bytes)
if titlekey != '': # Encrypt TitleKey
if regen_sig == 'dev':
dev = 1
else:
dev = 0
normal_key = CTR.key_scrambler(CTR.KeyX0x3D[dev], CTR.KeyY0x3D[data.common_key_index][dev])
cipher = AES.new(normal_key, AES.MODE_CBC, iv=bytes(data.titleID)+(b'\0'*8))
enc_titlekey = cipher.encrypt(hextobytes(titlekey))
data.enc_titlekey = (c_uint8 * sizeof(data.enc_titlekey))(*enc_titlekey)
# Create content index
if tik == '':
content_index_hdr = hextobytes('00010014 000000AC 00000014 00010014 00000000 00000028 00000001 00000084 00000084 00030000'.strip())
content_index_offset = b'\x00' * 4
content_index = b'\xff' * 0x80 # Enable all content indexes
# Write ticket
if regen_sig == 'retail':
sig = Crypto.sign_rsa_sha256(CTR.test_mod, CTR.test_priv, bytes(data) + content_index_hdr + content_index_offset + content_index)
elif regen_sig == 'dev':
sig = Crypto.sign_rsa_sha256(CTR.tik_mod[1], CTR.tik_priv[1], bytes(data) + content_index_hdr + content_index_offset + content_index)
with open(f'{out}', 'wb') as f:
f.write(int.to_bytes(0x00010004, 4, 'big'))
f.write(sig)
f.write(b'\x00' * 0x3C)
f.write(bytes(data))
f.write(content_index_hdr)
f.write(content_index_offset)
f.write(content_index)
print(f'Wrote to {out}')

323
lib/ctr_tmd.py Normal file
View File

@ -0,0 +1,323 @@
from .common import *
from .keys import *
from .ctr_tik import signature_types
from .ctr_ncch import NCCHReader
class TMDHdr(BigEndianStructure):
_pack_ = 1
_fields_ = [
('issuer', c_char * 0x40),
('format_ver', c_uint8),
('ca_crl_ver', c_uint8),
('signer_crl_ver', c_uint8),
('reserved1', c_uint8),
('system_ver', c_uint64),
('titleID', c_uint8 * 8),
('title_type', c_uint32),
('groupID', c_uint16),
('save_data_size', c_uint32),
('priv_save_data_size', c_uint32),
('reserved2', c_uint32),
('twl_flag', c_uint8),
('reserved3', c_uint8 * 0x31),
('access_rights', c_uint32),
('title_ver', c_uint16),
('content_count', c_uint16),
('boot_content', c_uint16),
('reserved4', c_uint16),
('content_info_records_hash', c_uint8 * 32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class TMDContentInfoRecord(BigEndianStructure):
_pack_ = 1
_fields_ = [
('content_index_offset', c_uint16),
('content_command_count', c_uint16),
('content_chunk_record_hash', c_uint8 * 32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class TMDContentChunkRecord(BigEndianStructure):
_pack_ = 1
_fields_ = [
('contentID', c_uint32),
('content_index', c_uint16),
('content_type', c_uint16),
('content_size', c_uint64),
('content_hash', c_uint8 * 32),
]
def __new__(cls, buf):
return cls.from_buffer_copy(buf)
def __init__(self, data):
pass
class TMDReader:
def __init__(self, file, dev=0):
self.file = file
self.dev = dev
with open(file, 'rb') as f:
sig_type = readbe(f.read(4))
self.sig = f.read(signature_types[sig_type][0])
padding = f.read(signature_types[sig_type][1])
self.hdr = TMDHdr(f.read(0xC4))
self.titleID = hex(readbe(self.hdr.titleID))[2:].zfill(16)
content_infos_all = b''
content_infos = []
for _ in range(0x40):
content_info = f.read(0x24)
content_infos_all += content_info
if content_info != b'\0' * 0x24:
content_infos.append(TMDContentInfoRecord(content_info))
self.content_infos_all = content_infos_all
self.content_infos = content_infos
content_chunks = []
files = {}
for _ in range(self.hdr.content_count):
tmd_chunk = TMDContentChunkRecord(f.read(0x30))
content_chunks.append(tmd_chunk)
if tmd_chunk.content_index == 0 and self.titleID[3:5] == '48':
ext = 'nds'
else:
ext = 'ncch'
name = f'{hex(tmd_chunk.content_index)[2:].zfill(4)}.{hex(tmd_chunk.contentID)[2:].zfill(8)}.{ext}'
files[name] = {
'size': tmd_chunk.content_size,
'crypt': 'none',
'hash': bytes(tmd_chunk.content_hash),
}
if tmd_chunk.content_type & 1: # Encrypted flag set in content type
files[name]['crypt'] = 'normal'
files[name]['key'] = b''
files[name]['iv'] = int.to_bytes(tmd_chunk.content_index, 2, 'big') + (b'\0' * 14)
self.content_chunks = content_chunks
self.files = files
def verify(self, no_print=0): # 'no_print' parameter to facilitate CIAReader.verify()
hash_check = []
# Content info records hash in header
h = hashlib.sha256()
h.update(self.content_infos_all)
hash_check.append(('TMD CntInfo', h.digest() == bytes(self.hdr.content_info_records_hash)))
# Content chunk records hash in content info records
hashed = []
for i in self.content_infos:
to_hash = b''
for j in self.content_chunks[i.content_index_offset:i.content_index_offset + i.content_command_count]:
to_hash += (bytes(j))
h = hashlib.sha256()
h.update(to_hash)
hashed.append(h.digest() == bytes(i.content_chunk_record_hash))
hash_check.append(('TMD CntChunk', all(hashed)))
sig_check = []
sig_check.append(('TMD Header', Crypto.verify_rsa_sha256(CTR.tmd_mod[self.dev], bytes(self.hdr), self.sig)))
if no_print == 0:
print("Hashes:")
for i in hash_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
print('Signatures:')
for i in sig_check:
print(' > {0:15} {1:4}'.format(i[0] + ':', 'GOOD' if i[1] else 'FAIL'))
return (hash_check, sig_check)
def __str__(self):
contents = ''
for i in self.content_chunks:
contents += f' > {hex(i.content_index)[2:].zfill(4)}\n'
cid = f' Content ID: {hex(i.contentID)[2:].zfill(8)}'
if i.content_type & 1:
cid += f' [encrypted]'
if i.content_type & 0x4000:
cid += f' [optional]'
contents += f'{cid}\n'
contents += f' Content size: {i.content_size}\n'
contents += f' Content hash: {hex(readbe(bytes(i.content_hash)))[2:]}\n'
contents = contents[:-1] # Remove last '\n'
return (
f'TitleID: {self.titleID}\n'
f'Title version: {self.hdr.title_ver}\n'
f'Contents:\n'
f'{contents}'
)
class TMDBuilder:
def __init__(self, tmd='', content_files=[], content_files_dev=0, titleID='', title_ver=-1, save_data_size='', priv_save_data_size='', twl_flag='', crypt=1, regen_sig='', out='tmd_new'):
'''
tmd: path to TMD (if available)
Following parameters are required if no TMD is provided:
- content_files: list containing filenames of content files, which must each be named '[content index in hex, 4 chars].[contentID in hex, 8 chars].[ncch/nds]'
- content_files_dev: 0 or 1 (whether content files are dev-crypted)
Following parameters are required if no TMD is provided; if both TMD and parameter is supplied, the parameter overrides the TMD
- titleID: titleID in hex, e.g. '000400000FF3FF00'
- title_ver: title version in decimal
- save_data_size (leave blank for auto)
- priv_save_data_size (leave blank for auto)
- twl_flag (leave blank for auto)
- crypt: 0 or 1
regen_sig: '' or 'retail' (test keys) or 'dev'
out: path to output file
'''
# Checks
if titleID != '':
if not all([i in string.hexdigits for i in titleID]) or len(titleID) != 16:
raise Exception('Invalid TitleID')
# Defaults
if tmd == '':
if regen_sig == '':
regen_sig = 'retail'
if content_files[0].endswith('.nds'): # Get public and private savedata size from TWL header
with open(content_files[0], 'rb') as f:
f.seek(0x238)
if save_data_size == '' or priv_save_data_size == '':
save_data_size = readbe(f.read(4))
priv_save_data_size = readbe(f.read(4))
f.seek(0x1BF)
if twl_flag == '':
twl_flag = (readbe(f.read(1)) & 6) >> 1
else:
if save_data_size == '':
ncch = NCCHReader(content_files[0], dev=content_files_dev)
if 'exheader.bin' in ncch.files.keys(): # If exheader exists, read savedata size from it. Otherwise, savedata size is set to 0
info = ncch.files['exheader.bin']
with open(content_files[0], 'rb') as f:
f.seek(info['offset'])
if ncch.is_decrypted:
exheader = f.read(info['size'])
else:
counter = Counter.new(128, initial_value=readbe(info['counter']))
cipher = AES.new(info['key'], AES.MODE_CTR, counter=counter)
exheader = cipher.decrypt(f.read(info['size']))
save_data_size = readbe(exheader[0x1C0:0x1C4])
# Create (or modify) TMD header
if tmd == '':
content_files.sort(key=lambda h: int(h.split('.')[0], 16)) # Sort list of content files by content index (since that is how content chunk records are ordered)
hdr = TMDHdr(b'\x00' * 0xC4)
hdr.format_ver = 1
hdr.title_type = 0x40
hdr.content_count = len(content_files)
else:
with open(tmd, 'rb') as f:
sig_type = readbe(f.read(4))
sig = f.read(signature_types[sig_type][0])
padding = f.read(signature_types[sig_type][1])
hdr = TMDHdr(f.read(0xC4))
content_infos_all = f.read(0x900)
content_chunks_all = f.read(0x30 * hdr.content_count)
if tmd == '' or regen_sig != '':
hdr.issuer = b'Root-CA00000003-CP0000000b'
if regen_sig == 'dev':
hdr.issuer = b'Root-CA00000004-CP0000000a'
if titleID != '':
titleID_bytes = int.to_bytes((int(titleID, 16)), 8, 'big')
hdr.titleID = (c_uint8 * sizeof(hdr.titleID))(*titleID_bytes)
if title_ver != -1:
hdr.title_ver = title_ver
if save_data_size != '':
hdr.save_data_size = save_data_size
if priv_save_data_size != '':
hdr.priv_save_data_size = priv_save_data_size
if twl_flag != '':
hdr.twl_flag = twl_flag
# Create (or modify) content chunk records
content_chunks = b''
if tmd == '':
for i in range(len(content_files)):
tmd_chunk = TMDContentChunkRecord(b'\x00' * 0x30)
# Get content index and contentID from file name
name = content_files[i].split('.')
tmd_chunk.content_index = int(name[0], 16)
tmd_chunk.contentID = int(name[1], 16)
if crypt:
tmd_chunk.content_type |= 1
if titleID[3:8].lower() == '4008c' and i >= 1:
tmd_chunk.content_type |= 0x4000 # Set Optional flag
# Calculate hashes
f = open(content_files[i], 'rb')
tmd_chunk.content_size = os.path.getsize(content_files[i])
hashed = Crypto.sha256(f, tmd_chunk.content_size)
tmd_chunk.content_hash = (c_uint8 * sizeof(tmd_chunk.content_hash))(*hashed)
f.close()
content_chunks += bytes(tmd_chunk)
else:
for i in range(hdr.content_count):
tmd_chunk = TMDContentChunkRecord(content_chunks_all[i * 0x30:(i + 1) * 0x30])
tmd_chunk.content_type &= ~1 # Reset flags
if crypt:
tmd_chunk.content_type |= 1
content_chunks += bytes(tmd_chunk)
# Create content info records
content_info = TMDContentInfoRecord(b'\x00' * 0x24)
content_info.content_command_count = hdr.content_count
h = hashlib.sha256()
h.update(content_chunks)
hashed = h.digest()
content_info.content_chunk_record_hash = (c_uint8 * sizeof(content_info.content_chunk_record_hash))(*hashed)
content_infos = bytes(content_info) + (b'\x00' * 0x24 * 0x3F) # Only fill first content info record
# Finalise header
h = hashlib.sha256()
h.update(content_infos)
hashed = h.digest()
hdr.content_info_records_hash = (c_uint8 * sizeof(hdr.content_info_records_hash))(*hashed)
if regen_sig == 'retail':
sig = Crypto.sign_rsa_sha256(CTR.test_mod, CTR.test_priv, bytes(hdr))
elif regen_sig == 'dev':
sig = Crypto.sign_rsa_sha256(CTR.tmd_mod[1], CTR.tmd_priv[1], bytes(hdr))
# Write TMD
with open(f'{out}', 'wb') as f:
f.write(int.to_bytes(0x00010004, 4, 'big'))
f.write(sig)
f.write(b'\x00' * 0x3C)
f.write(bytes(hdr))
f.write(content_infos)
f.write(content_chunks)
print(f'Wrote to {out}')

64
lib/keys.py Normal file
View File

@ -0,0 +1,64 @@
from .common import *
class CTR: # For tuples: index 0 for retail, index 1 for dev
fixed_system = 0x527CE630A9CA305F3696F3CDE954194B
# AES keyslots
KeyX0x2C = (0xb98e95ceca3e4d171f76a94de934c053, 0x510207515507cbb18e243dcb85e23a1d)
KeyX0x25 = (0xCEE7D8AB30C00DAE850EF5E382AC5AF3, 0x81907A4B6F1B47323A677974CE4AD71B)
KeyX0x18 = (0x82E9C9BEBFB8BDB875ECC0A07D474374, 0x304BF1468372EE64115EBD4093D84276)
KeyX0x1B = (0x45AD04953992C7C893724A9A7BCE6182, 0x6C8B2944A0726035F941DFC018524FB6)
KeyX0x3D = (0x617085719b7cfb316df4df2e8362c6e2, 0xBD4FE7E733C755FCE7540EABBD8AC30D)
KeyY0x3D = (
(0xD07B337F9CA4385932A2E25723232EB9, 0x85215E96CB95A9ECA4B4DE601CB562C7),
(0x0C767230F0998F1C46828202FAACBE4C, 0x0C767230F0998F1C46828202FAACBE4C),
(0xC475CB3AB8C788BB575E12A10907B8A4, 0xC475CB3AB8C788BB575E12A10907B8A4),
(0xE486EEE3D0C09C902F6686D4C06F649F, 0xE486EEE3D0C09C902F6686D4C06F649F),
(0xED31BA9C04B067506C4497A35B7804FC, 0xED31BA9C04B067506C4497A35B7804FC),
(0x5E66998AB4E8931606850FD7A16DD755, 0x5E66998AB4E8931606850FD7A16DD755)
)
KeyX0x3B = (0xB529221CDDB5DB5A1BF26EFF2041E875, 0x5C3D38AC1740994EFC8FD0BE8D8097B3)
# RSA signing keys
test_priv = bytes.fromhex('3E2BBEBA7F290252BF1BF1E4212FD9761E39234A6DFF99F633AA2B62030A0E15AC16B9856377F5742461B1016EEB72241E5DFA8FA85A101447BD05A07EE5FF60872A1831C1396CD545BB290504FB7AA268215FED4EFE646069BD96D0A7063D537B6892885086EE065D72739A39B6723B200139DF37281EF53963BC2AF25EAB1A99E45BEBE636306C40016160CC55896DCA7EE064787F7B26AE3EA3124516F6C8D0B94F91111211BBBB7FABC782DC4A619C14AE29FD3A601393192F5449B244345814D72F7025A048667655879B25776D0B75988BA639403C217F2A24C1A5C1DC5A5754F603F6AD5133406D5C265E299282E529137D7DFE0873BC5DC4E92BD671')
test_mod = bytes.fromhex('CAC588C7F12A092B7649C0A835751082C2B5E5B2E9C81888F39889BF9DE6E40B715DDD3F138271F2ED318699D947FEC57A7593E1F86DC63D9BE11599E1C2E05C384B35A24D3EE2CEFBB308A3DD0C2631849227C88A8EC883A86CA7A339719EF1349101DF114A9CF98BF92F46440A7238F38B6D233389BF6634A786E6ADF2DEF9AB16A140EED8F76CDC0092CB3149FC266424088FC660FF1EE3F0DDFB6D0D0F497CAD03EC9F6358FA46DFA2640ECC8557E72C617F59B8627D590EF684969942B0398380B5522E073F92E39EF547EBA7D7D415F1228232BE2AD08C01CC30A91196F6E92BEA0EF82D0DB191D51A9451B98539B0AF9F549E99E146E56FE25F4B4E23')
accessdesc_mod = (bytes.fromhex('B1E3E35F013980D156789DB706F71DBF3E2276EDF95DA236B630610596D300B9EDF1D7E01DA04FB7CF5A198775498840EDE36F7C904A644598D704B95A6B45AA7E94C0B3B7DB7B665920B708E2F383A37FE32021A0EBB7280FF32B15A4C9D0AB8939997E765F9E4D1E01228D74A6EB9AA39D45E510616E20FD2375C0C50503C54C024F544B5708B446C32CF1F9526CCD1455A855926DE24A4146EB08C5F3B48D0D5E21EAAF4D274DDE779397E2C76B661FDB2D6EA95F6114177B2B665AB50189F2237525259C869A89FF641D5BCED77E3F2DA8DAB55AC55F5920B0ED1C91FFA327B88ECF8215E549EFE458E15F8F53B9332A5624AAA1D36E471A634419B38EA5'),
bytes.fromhex('F43C4582FBF8905D07029F2A988B63B7D38F3CE2E0E093BFDF32434DBEF4D17A3A4E5431D773AE994CC41F3C3EF05705A38A455460D88FD91D680D0E2EEFC8E83DC919F3731E2DDA77883ECA5E25704BF77095835424E0C31A75DF613DD142EC351B38D6C1F67E182A8485DD57741F0A2EF6B294A23EE9A1D009F73A998005AF5755EF52FA243E7FD47C41447B067FB95B2E8E96AE46124D6421E50F85CCEB92E5F0F5A742273BECF8E781756F630A8B0D773851E66633BA79DC2F2C8FC32806BB039CDBD1640A66F0F8C12A491D0C6E35BBEAB35C0DE9957C67BE6577EC07C023050A724886E99EFC2515E7C82165E01BD5D50ED31154BB2978BF2A3C3BB6B1'))
accessdesc_priv = (None, bytes.fromhex('0cfbb8bec12e0a1f95e030eda2e1130e9a3d9cb11b5ef8dfbbe9d83bd56d3e8702bb37fcd3cd1ba09cdcf6a39d5d69a4dc381d1e228d549d4833c830e3341e40214bb3d8561bea38593f1a06683991860a4bf6f8bda3fdba45697373e005442b5a19ddbc45e0987ca8a2f59a49fc3b6baf4a303d584023ab954b7853ea45ccd776251d88ec4349a361e34878e5e658adb71b7b68ecf7171464d0d175f07cb769a929ef6a97cbef802f13ae3df772bbeb27807d8c6ab0ac2275905ca736c6a27cfb792f7becc36721e6fa9ceab04a24219dd99556a0df9c854ddf8753976a2edcddc8deec1f7f241a8897fcbbb823b7afc1a5e39d410bf291029bcf6c4ec1ebc1'))
cfa_mod = (bytes.fromhex('FBDEB82B40930FF6B19A08061B86FED0DF1079173D8CE27ACE8F2345B90A6DED300EC1A892C4BD1ACEA7AC77AA47E5204A4491DF1CFE8628122D66DFBEAD9661EDF2F7417B57886B241E7DECBE986565366599A9FE24678599EE2AAEEEB1811A22E36D756E21BCEF115C61AF0C3000B6A223EDFE7015DA52E1E62DCE34E8AA4CF1D6675657D3DBC090496F4573934E303070F5C98F3125F2C2E7337F4EB6F52ADF2000E579B2D0F917F77E1690400057914478EF1CE08509DAF4147E4BD735D687548F2AB5A76F50D0F7D1F119C9AC227E0511F5F26DEE9227575FE5150D2768BF52657473A6586D7918AC31DDDD808B7524E117E195251629AB6969C828EE5D'),
bytes.fromhex('B90CC4C678F86E300528C1CBD2CFA7805C574D169CAFA6CD01BB8333AD03BB0663D817F5E3DFDA0D3B860EA2804794446FD9977E786AC39393EF02FC229F80778C70921C43B1374C76E0573BAB89FFEFE5BB3EAB9139B8D9660B64289192E9D0B3DFD14BC173B53F56A04010FE152B1FA27ADE31B02640C357FD35CBF0FAFFFB6FDBCD341D512D2D8118FF0C0851D5B44B5616029F4E6ADF066ECB7285E92E43A208780C389C19BD7B747468C42DC1359E653BD899041C8B938E7E927CBBDD60ECE7FE0E9D4F3646E6F15C9470EE675F362B70448DCA09B95867D29FAD1F135474ADA6844428F3DE7E4C202BC5E912E95EFB8D77A9A4D20D3C3824BEF58AB5F5'))
cfa_priv = (None, bytes.fromhex('323643C2B31A7E13ABA2B68B4F05A7A6CDE7A6744749E651E47174157691F792B14EF699731ECFB51D7CAFC5EA5701E55C1047EA3A5486032A7605725316C2AE2DBE71F7176B23DD2CB88D1314E5DA3BC7337ABAE52A2B7D5A12273856DFED70030EED64C7F654ACFE1D77A4E4BCEBB9A6C5FE3AAF5881E43FA0E693132D987DB3E2C9C8D63191739DCAC944EFD039BF38FD1C91729340A98A0D3E32C4594B0CC7EA50419FF5E2B7507CE3C9EC4618ACB4912A32E0D8106FFC81B395F3FC78C0EFE57B8D14D436265FC632C019875C772637D8AE66D60B2826437C25DB6D5CE8948FA97707B2C085CD41BA48887334D5208A0FE39E99F0C8E8D92C2A2169E4C1'))
tik_mod = (bytes.fromhex('AD505BB6C67E2E5BDD6A3BEC43D910C772E9CC290DA58588B77DCC11680BB3E29F4EABBB26E98C2601985C041BB14378E689181AAD770568E928A2B98167EE3E10D072BEEF1FA22FA2AA3E13F11E1836A92A4281EF70AAF4E462998221C6FBB9BDD017E6AC590494E9CEA9859CEB2D2A4C1766F2C33912C58F14A803E36FCCDCCCDC13FD7AE77C7A78D997E6ACC35557E0D3E9EB64B43C92F4C50D67A602DEB391B06661CD32880BD64912AF1CBCB7162A06F02565D3B0ECE4FCECDDAE8A4934DB8EE67F3017986221155D131C6C3F09AB1945C206AC70C942B36F49A1183BCD78B6E4B47C6C5CAC0F8D62F897C6953DD12F28B70C5B7DF751819A9834652625'), bytes.fromhex('C0844CEB7EB0CFF0AEB777698593E4995A954E581738CED681B0BD7709E7F89ADFAD054883F6C3FDDF7B83E00C2681544329EA826C89F0A67442864D3260327DA77A13406659DA3E416B2794034FAA229DD55452DB270A6AA23D19B1661B197DABC70E881791A12AB43C6CCBF5AA7C3ADD36FB35717B20015900D6F69039354131F8C1C0573A35185890B1AD9A0EECE0F47A7DA52748C972AB0D087B62354091142BB11D1AFAF9CD5C1713535271CAE22A78B17F4ACD59D8BA1D7D705F781B9F9D37188ED7CD0D49577469883A6B8E4E1B85DDBE39450589561297599A09A4C82D2FF5CFB47370DB581EB24E776FA47E62DFB705E880425CB87887977F662C5F')) # XS0000000c, XS00000009
tik_priv = (None, bytes.fromhex('74CBCF1ED02DD4F9E005CE9C663DE36266624EB582E1241B5F732A7F1DB36E500783A0C0EDCEB7F93DAC61C57B99A0BCCE428FD3B0A5BF2A3D3E5EDC56C3A5DE35CD0A00F8176B2079EFD88323BF2128FF387D800715186CB920F88577BCD92A351CFEE3F1E8982EA04A48773503C97AACDABE6D1DFBE4DEEC7065FA1065A4B86ADF326B8E2879258772C07C5B81BC8192447DEA61BD3C48F30E18DC8D89A034C3AE9C5772A6D77C79F7E9146E15AC01FAFFC8A22A3AAB243C7E2EC5DA83D59D2410837AF4BBA36F88CEEC241BF4362E96C96D1902FEAA213E95A7FE83C8997FD1CB7C1F9130DBA4D3DDDA9B124E24D1A56F15FC2C72982C89C57D89DE2B4E01'))
tmd_mod = (bytes.fromhex('A689C590FD0B2F0D4F56B632FB934ED0739517B33A79DE040EE92DC31D37C7F73BF04BD3E44E20AB5A6FEAF5984CC1F6062E9A9FE56C3285DC6F25DDD5D0BF9FE2EFE835DF2634ED937FAB0214D104809CF74B860E6B0483F4CD2DAB2A9602BC56F0D6BD946AED6E0BE4F08F26686BD09EF7DB325F82B18F6AF2ED525BFD828B653FEE6ECE400D5A48FFE22D538BB5335B4153342D4335ACF590D0D30AE2043C7F5AD214FC9C0FE6FA40A5C86506CA6369BCEE44A32D9E695CF00B4FD79ADB568D149C2028A14C9D71B850CA365B37F70B657791FC5D728C4E18FD22557C4062D74771533C70179D3DAE8F92B117E45CB332F3B3C2A22E705CFEC66F6DA3772B'), bytes.fromhex('AA7F9380289BE89863107AE10C592C2F7CFFBDAADD74F4A2FBACD76F00934206347156D84049729F3E24FA5E19D15B635CD2EF09DE32EE6B6FC8FA328E2E96B99441047D076295DA0D91D80935D0DE8E6BC6AB1427019CFE4996FC9B54794DEBD7C66673A6DD3A77654794EC1C87AA46D978A97DDB11226ED412C2784B218392C710C77419FFAAF60B75D823DD33C3A15BA72D30A5A4D8F80FD673FD26CB29A6EF5039E25F5961846BDA2EC7CBE4384B28FB0DD58E7CAA7D4B373AD781DD73E30993BDBD7E08554A8CA5C9842D7101A22A01B015FB3078B913F4C73FB5A6F1A25E22B002B6E009547F0FBDF0FEA5501D9315F93D830F0F0E3DE23D96E709D977')) # CP0000000b, CP0000000a
tmd_priv = (None, bytes.fromhex('28CEDC39027F3E8EAAB75911E068BF80A64477DB1BA250A369E596B2C4CA7A350DFC4AB2FBC018A530B49D1044D1AD33FD15A78D0F17D5A4F55E7F33F68004276AEA9CEE68041AA5D435A225A231D9F2F0ACDE69B66456752E9BEADE2ABBD600AAE69BC2F69F60CD0EFAB1144A47D6639ACD9C93B90942DA8FFBE57BF14F9633F9455BCC84ABC2D8C40C85FA5128B99795238CB31D4EB61CCC6041FB26C7D6CB7718F7EACD103C5BA3C0774C11F37450EE2380C45DDD57F57D49574ABA62BF06D9D17F9110896F4909D7E9AF4C9F679D8982E4D5C19ADC5579E7E92D8142145561479BED76921D2FB57C284BFF7BC23B367399A621430EA11F82B8917111B2C1'))
root_mod = (bytes.fromhex('F8246C58BAE7500301FBB7C2EBE0010571DA922378F0514EC0031DD0D21ED3D07EFC852069B5DE9BB951A8BC90A244926D379295AE9436AAA6A302510C7B1DEDD5FB20869D7F3016F6BE65D383A16DB3321B95351890B17002937EE193F57E99A2474E9D3824C7AEE38541F567E7518C7A0E38E7EBAF41191BCFF17B42A6B4EDE6CE8DE7318F7F5204B3990E226745AFD485B24493008B08C7F6B7E56B02B3E8FE0C9D859CB8B68223B8AB27EE5F6538078B2DB91E2A153E85818072A23B6DD93281054F6FB0F6F5AD283ECA0B7AF35455E03DA7B68326F3EC834AF314048AC6DF20D28508673CAB62A2C7BC131A533E0B66806B1C30664B372331BDC4B0CAD8D11EE7BBD9285548AAEC1F66E821B3C8A0476900C5E688E80CCE3C61D69CBBA137C6604F7A72DD8C7B3E3D51290DAA6A597B081F9D3633A3467A356109ACA7DD7D2E2FB2C1AEB8E20F4892D8B9F8B46F4E3C11F4F47D8B757DFEFEA3899C33595C5EFDEBCBABE8413E3A9A803C69356EB2B2AD5CC4C858455EF5F7B30644B47C64068CDF809F76025A2DB446E03D7CF62F34E702457B02A4CF5D9DD53CA53A7CA629788C67CA08BFECCA43A957AD16C94E1CD875CA107DCE7E0118F0DF6BFEE51DDBD991C26E60CD4858AA592C820075F29F526C917C6FE5403EA7D4A50CEC3B7384DE886E82D2EB4D4E42B5F2B149A81EA7CE7144DC2994CFC44E1F91CBD495'), bytes.fromhex('D01FE100D43556B24B56DAE971B5A5D384B93003BE1BBF28A2305B060645467D5B0251D2561A274F9E9F9CEC646150AB3D2AE3366866ACA4BAE81AE3D79AA6B04A8BCBA7E6FB648945EBDFDB85BA091FD7D114B5A3A780E3A22E6ECD87B5A4C6F910E4032208814B0CEEA1A17DF739695F617EF63528DB949637A056037F7B32413895C0A8F1982E1565E38EEDC22E590EE2677B8609F48C2E303FBC405CAC18042F822084E4936803DA7F41349248562B8EE12F78F803246330BC7BE7EE724AF458A472E7AB46A1A7C10C2F18FA07C3DDD89806A11C9CC130B247A33C8D47DE67F29E5577B11C43493D5BBA7634A7E4E71531B7DF5981FE24A114554CBD8F005CE1DB35085CCFC77806B6DE254068A26CB5492D4580438FE1E5A9ED75C5ED451DCE789439CCC3BA28A2312A1B8719EF0F73B713950C02591A7462A607F37C0AA7A18FA943A36D752A5F4192F0136100AA9CB41BBE14BEB1F9FC692FDFA09446DE5A9DDE2CA5F68C1C0C21429287CB2DAAA3D263752F73E09FAF4479D2817429F69800AFDE6B592DC19882BDF581CCABF2CB91029EF35C4CFDBBFF49C1FA1B2FE31DE7A560ECB47EBCFE32425B956F81B69917487E3B789151DB2E78B1FD2EBE7E626B3EA165B4FB00CCB751AF507329C4A3939EA6DD9C50A0E7386B0145796B41AF61F78555944F3BC22DC3BD0D00F8798A42B1AAA08320659AC7395AB4F329'))
cci_mod = cfa_mod
cci_priv = cfa_priv
crr_mod = (bytes.fromhex('D00E6756858ABCF213D87FBB29D192349EE42743D1111DFE9593C4783F4A2D2AB1CA6FB0BA32A2242E2BE5726EF0B49768CCDA171514F074D435B4EE04D89CA9ECDBCB29E895F257BABD65473D6CE50DE5103EB5C37302AEC893777CC523220171CCC60BEACAA0FED4CDA949655A3B0D5BB7B2EA329A43856CCA045C900D9E34B9EF802B2E6D96BC31157E1401C000E0197A2E17E58BA37AD2C247779A8B832E9866A1A1020F0299E7D79FDCC852A22823081B87FFE4BA90025015C720A0AB76607D1FC1BF339D66246A803BF4A27F15B10C36EF44E8D036949A7484488789499A0B7E10F27355623D68AC9E5FAE1D54F8BA628E29BC49E7FDEEBD3D7BCCF8BF'), bytes.fromhex('c5f709805fdadcbd460752aa6dcd72b2500977478a4f092ae291b45f04975175c9196f95bb23147d34df777807e8d11011afce02b873a9fb64b76587e567dd673775fd0fe297bc79a8ccf9fa18b2624df7536b9c0e1aab90e65286c81c922c6153a901da4393d0422eddd54c8c4ce89156ecee12700b64f00ad6aff860c2a8267ac8ba559aa144fd094726a0c1999ef124dfa3c2bbff0745d9d6a4c0ff6c6c787b6d708c7444b095e6c6665e7ebe71c5913473a7d44c0dfca921499492a16a4d30a3d69f6c60400ceeebf89922e16ffc3c9623aa1134004efc2d604145e35d7806b1f1c307b9d347d0f18c26331f6b46ddf3e38464a7f15311e1534e99dbac53'))
crr_priv = (None, bytes.fromhex('3cde28a33a95ca323e126955f29ded4aab3645e10ec31f5ffabaef191a09c6a7af15ed8bbdb6db096654e1605c1a3711b8467452bcd0ff6e46f8e6b462924d6e7b8c812b3db206e6e74263080548efd5e61a55a76d64c0e4b414a2d16b8b7c2eceb0d3db7ed4b77f69f6962b4ba62809ec0ebc6299e5befad9724a37d5d4742b9a37efd8f68040f520ba28e2be7df98f04373534f13215f8fd9567bcad60867406c4a55aea403851026177e07bfee17b8cf9b341fdb76085d98ccbb02904028a6366baece494af4dd59a4c221e437575159d7cd8528a797ff196401e9ebc9a9eaa055a721be7a1f4fbe43ff499f344f5a54f36e73545201b339557d12562a781'))
crr_body_mod = bytes.fromhex('E2ADA6EACAA3E8CCA9701D2E234BC655CE13D9A758B4C773961DE8C3094D9BC3EB69A237835DD83704727A4FEA53989D0E0134709A8206E76AC9F80E495AA4E70EFAD4AB3BC5D7F1A4FC927FD0F3CDD5B92A1A4162B37B3E1E3546418EB2531A60F8C2D194B39D769F1D98ACF0CFE3A90585F2BF55761B891CC3192EEE94BE750FA3768B242497FAC05324F58502199DC5112E6BA326FEF755D4230A733F3753EAC2B7C1C9D8F32F78764AA0696091C27D1174EF96D97453B11CB0C43216823BAF61B2DE38873E374BA395888EE0279A1F7DB823C363E86851D98C4CC2598649F7469E3CD79F8923B473352F182376A39F400B769085C889DA65E76EEF2EF567')
crr_body_priv = bytes.fromhex('8D27296BC7A7EDCD942D365E86A826E7439E64C8AA9A582107F7B3FBCF8D3E53F002257B80182E0D847D6CE0DAC017A6A513E6FDBF98FC879A9E0E138766248DA56C588610809089EEFD4094CB1F26ABD1D3FFE97B76DC65C015D89BF629E149E9DCBE2417FF092CD6C46D5033DCA09D9DCCDD6E7BDF42224D80C7EBCBB1602F04EE040E4C76495592A5C113600A80153D1CC646572E7CB03D870687FD31F8E714972A5740AC9461CDCFDE8C404695A0D6F92C089B12BFF1889C5D40326D9D99A480A6C2455AD322FEFA175411EA41B4BD681EDD3FE592CB9EF8C00A8BF589A40368F8F8997CFEAD32DD5CB40629DA968BBACB15DE380DCAF70165F62D366E71')
def rol(val, r_bits, max_bits):
return (val << r_bits % max_bits) & (2 ** max_bits - 1) | ((val & (2 ** max_bits - 1)) >> (max_bits - (r_bits % max_bits)))
def key_scrambler(keyX, keyY):
return CTR.rol((CTR.rol(keyX, 2, 128) ^ keyY) + 0x1FF9E9AAC5FE0408024591DC5D52768A, 87, 128).to_bytes(0x10, 'big')
def titlekey_gen(titleID: str, password: str):
secret = hextobytes('fd040105060b111c2d49')
tid = hextobytes(titleID).lstrip(b'\x00')
salt = hashlib.md5(secret + tid).digest()
titlekey = hashlib.pbkdf2_hmac('sha1', password.encode(), salt, 20, 16)
return hex(readbe(titlekey))[2:]

BIN
lib/resources/3dstool.exe Normal file

Binary file not shown.

BIN
lib/resources/3dstool_linux Executable file

Binary file not shown.

BIN
lib/resources/3dstool_macos Executable file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
lib/resources/seeddb.bin Normal file

Binary file not shown.

Binary file not shown.

48
ntool.py Executable file
View File

@ -0,0 +1,48 @@
#!/usr/bin/python3
import sys
from utils import *
if sys.argv[1] in ['cia_dev2retail', 'cia_retail2dev', 'cci_dev2retail', 'cci_retail2dev', 'csu2retailcias']:
path = sys.argv[2]
out = ''
for i in range(2, len(sys.argv)):
if sys.argv[i] == '--out':
out = sys.argv[i + 1]
eval(sys.argv[1])(path, out)
elif sys.argv[1] in ['ncch_extractall', 'ncch_rebuildall', 'cci_extractall', 'cci_rebuildall', 'cia_extractall', 'cia_rebuildall']:
path = sys.argv[2]
dev = 0
for i in range(2, len(sys.argv)):
if sys.argv[i] == '--dev':
dev = 1
eval(sys.argv[1])(path, dev)
elif sys.argv[1] == 'cci2cia':
path = sys.argv[2]
out = ''
cci_dev = cia_dev = 0
for i in range(2, len(sys.argv)):
if sys.argv[i] == '--out':
out = sys.argv[i + 1]
elif sys.argv[i] == '--cci-dev':
cci_dev = 1
elif sys.argv[i] == '--cia-dev':
cia_dev = 1
cci2cia(path, out, cci_dev, cia_dev)
elif sys.argv[1] == 'cdn2cia':
path = sys.argv[2]
out = ''
title_ver = ''
cdn_dev = cia_dev = 0
for i in range(2, len(sys.argv)):
if sys.argv[i] == '--out':
out = sys.argv[i + 1]
elif sys.argv[i] == '--title-ver':
title_ver = sys.argv[i + 1]
elif sys.argv[i] == '--cdn-dev':
cdn_dev = 1
elif sys.argv[i] == '--cia-dev':
cia_dev = 1
cdn2cia(path, out, title_ver, cdn_dev, cia_dev)

593
utils.py Normal file
View File

@ -0,0 +1,593 @@
from lib.common import *
from lib.ctr_cia import CIAReader, CIABuilder
from lib.ctr_cci import CCIReader, CCIBuilder
from lib.ctr_ncch import NCCHReader, NCCHBuilder
from lib.ctr_exefs import ExeFSReader, ExeFSBuilder
from lib.ctr_romfs import RomFSReader, RomFSBuilder
from lib.ctr_crr import crrReader
from lib.ctr_tmd import TMDReader, TMDBuilder
from lib.ctr_tik import tikReader, tikBuilder
from lib.ctr_cdn import CDNReader
from lib.ctr_cnt import cntReader
def cia_dev2retail(path, out=''):
name = os.path.splitext(os.path.basename(path))[0]
if out == '':
out = f'{name}_retail.cia'
cia = CIAReader(path, dev=1)
cia.extract()
cf = list(cia.files.keys())
cf.remove('cia_header.bin')
cf.remove('cert.bin')
cf.remove('tik')
cf.remove('tmd')
if 'meta.bin' in cf:
meta = 1
cf.remove('meta.bin')
else:
meta = 0
for i in cf:
if i.endswith('.ncch'):
ncch = NCCHReader(i, dev=1)
ncch.extract() # NOTE: no need to resign CRR since CRR body sig will pass (all that matters)
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
else:
romfs = ''
os.remove(i)
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='Secure1', regen_sig='retail', dev=0, out=i)
for j in [ncch_header, exheader, logo, plain, exefs, romfs]:
if j != '':
os.remove(j)
tmd = TMDReader('tmd', dev=1)
TMDBuilder(content_files=cf, content_files_dev=0, titleID=tmd.titleID, title_ver=tmd.hdr.title_ver, save_data_size=tmd.hdr.save_data_size, priv_save_data_size=tmd.hdr.priv_save_data_size, twl_flag=tmd.hdr.twl_flag, crypt=0, regen_sig='retail')
os.remove('tmd')
tik = tikReader('tik', dev=1)
tikBuilder(tik='tik', titlekey=hex(readbe(tik.titlekey))[2:].zfill(32), regen_sig='retail') # Use original (decrypted) titlekey
os.remove('tik')
CIABuilder(content_files=cf, tik='tik_new', tmd='tmd_new', meta=meta, dev=0, out=out)
for i in cf + ['tmd_new', 'tik_new', 'cia_header.bin', 'cert.bin', 'meta.bin']:
if os.path.isfile(i):
os.remove(i)
def cia_retail2dev(path, out=''):
name = os.path.splitext(os.path.basename(path))[0]
if out == '':
out = f'{name}_dev.cia'
cia = CIAReader(path, dev=0)
cia.extract()
cf = list(cia.files.keys())
cf.remove('cia_header.bin')
cf.remove('cert.bin')
cf.remove('tik')
cf.remove('tmd')
if 'meta.bin' in cf:
meta = 1
cf.remove('meta.bin')
else:
meta = 0
for i in cf:
if i.endswith('.ncch'):
ncch = NCCHReader(i, dev=0)
ncch.extract()
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
romfs_rdr = RomFSReader('romfs.bin')
if '.crr/static.crr' in romfs_rdr.files.keys() or '.crr\\static.crr' in romfs_rdr.files.keys():
romfs_rdr.extract()
crr = crrReader('romfs/.crr/static.crr')
crr.regen_sig(dev=1)
os.remove('romfs.bin')
RomFSBuilder(romfs_dir='romfs/', out='romfs.bin')
shutil.rmtree('romfs/')
else:
romfs = ''
os.remove(i)
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='Secure1', regen_sig='dev', dev=1, out=i)
for j in [ncch_header, exheader, logo, plain, exefs, romfs]:
if j != '':
os.remove(j)
tmd = TMDReader('tmd', dev=0)
TMDBuilder(content_files=cf, content_files_dev=1, titleID=tmd.titleID, title_ver=tmd.hdr.title_ver, save_data_size=tmd.hdr.save_data_size, priv_save_data_size=tmd.hdr.priv_save_data_size, twl_flag=tmd.hdr.twl_flag, crypt=1, regen_sig='dev')
os.remove('tmd')
tik = tikReader('tik', dev=0)
tikBuilder(tik='tik', titlekey=hex(readbe(tik.titlekey))[2:].zfill(32), regen_sig='dev') # Use original (decrypted) titlekey
os.remove('tik')
CIABuilder(content_files=cf, tik='tik_new', tmd='tmd_new', meta=meta, dev=1, out=out)
for i in cf + ['tmd_new', 'tik_new', 'cia_header.bin', 'cert.bin', 'meta.bin']:
if os.path.isfile(i):
os.remove(i)
def cci_dev2retail(path, out=''):
name = os.path.splitext(os.path.basename(path))[0]
if out == '':
out = f'{name}_retail.3ds'
cci = CCIReader(path, dev=1)
cci.extract()
parts = list(cci.files.keys())
parts.remove('cci_header.bin')
parts.remove('card_info.bin')
parts.remove('mastering_info.bin')
parts.remove('initialdata.bin')
if 'card_device_info.bin' in parts:
parts.remove('card_device_info.bin')
for i in parts:
if i.endswith('.ncch'):
ncch = NCCHReader(i, dev=1)
ncch.extract() # NOTE: no need to resign CRR since CRR body sig will pass (all that matters)
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
else:
romfs = ''
os.remove(i)
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='Secure1', regen_sig='retail', dev=0, out=i)
for j in [ncch_header, exheader, logo, plain, exefs, romfs]:
if j != '':
os.remove(j)
CCIBuilder(cci_header='cci_header.bin', card_info='card_info.bin', mastering_info='mastering_info.bin', initialdata='', card_device_info='', ncchs=parts, cardbus_crypto='Secure0', regen_sig='retail', dev=0, gen_card_device_info=0, out=out)
for i in parts + ['cci_header.bin', 'card_info.bin', 'mastering_info.bin', 'initialdata.bin', 'card_device_info.bin']:
if os.path.isfile(i):
os.remove(i)
def cci_retail2dev(path, out=''):
name = os.path.splitext(os.path.basename(path))[0]
if out == '':
out = f'{name}_dev.3ds'
cci = CCIReader(path, dev=0)
cci.extract()
parts = list(cci.files.keys())
parts.remove('cci_header.bin')
parts.remove('card_info.bin')
parts.remove('mastering_info.bin')
parts.remove('initialdata.bin')
if 'card_device_info.bin' in parts:
parts.remove('card_device_info.bin')
for i in parts:
if i.endswith('.ncch'):
ncch = NCCHReader(i, dev=0)
ncch.extract()
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
romfs_rdr = RomFSReader('romfs.bin')
if '.crr/static.crr' in romfs_rdr.files.keys() or '.crr\\static.crr' in romfs_rdr.files.keys():
romfs_rdr.extract()
crr = crrReader('romfs/.crr/static.crr')
crr.regen_sig(dev=1)
os.remove('romfs.bin')
RomFSBuilder(romfs_dir='romfs/', out='romfs.bin')
shutil.rmtree('romfs/')
else:
romfs = ''
os.remove(i)
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, crypto='Secure1', regen_sig='dev', dev=1, out=i)
for j in [ncch_header, exheader, logo, plain, exefs, romfs]:
if j != '':
os.remove(j)
CCIBuilder(cci_header='cci_header.bin', card_info='card_info.bin', mastering_info='mastering_info.bin', initialdata='', card_device_info='', ncchs=parts, cardbus_crypto='fixed', regen_sig='dev', dev=1, gen_card_device_info=1, out=out)
for i in parts + ['cci_header.bin', 'card_info.bin', 'mastering_info.bin', 'initialdata.bin', 'card_device_info.bin']:
if os.path.isfile(i):
os.remove(i)
def ncch_extractall(path, dev=0):
name = os.path.splitext(os.path.basename(path))[0]
os.mkdir(name)
ncch = NCCHReader(path, dev)
ncch.extract()
exefs_code_compress = 0
for i in ['ncch_header.bin', 'exheader.bin', 'logo.bin', 'plain.bin', 'exefs.bin', 'romfs.bin']:
if os.path.isfile(i):
if i == 'exheader.bin':
with open(i, 'rb') as f:
f.seek(0xD)
flag = readle(f.read(1))
if flag & 1:
exefs_code_compress = 1
shutil.move(i, os.path.join(name, i))
os.chdir(name)
# Extract ExeFS
if os.path.isfile('exefs.bin'):
exefs = ExeFSReader('exefs.bin')
exefs.extract(code_compressed=exefs_code_compress)
os.mkdir('exefs')
for i in exefs.files.keys():
shutil.move(i, os.path.join('exefs', i))
if exefs_code_compress:
os.remove(os.path.join('exefs', '.code.bin'))
shutil.move('code-decompressed.bin', os.path.join('exefs', '.code.bin'))
# Extract RomFS
if os.path.isfile('romfs.bin'):
romfs = RomFSReader('romfs.bin')
romfs.extract()
os.chdir('..')
def macos_clean(path):
proc = subprocess.call(['dot_clean', path], stdout=None, stderr=None)
proc = subprocess.call(['find', path, '-type', 'f', '-name', '.DS_Store', '-exec', 'rm', '{}', ';'], stdout=None, stderr=None)
def ncch_rebuildall(path, dev=0):
os.chdir(path)
name = os.path.basename(os.getcwd())
out = f'{name}.ncch'
if os.path.isdir('exefs/'):
if platform.system() == 'Darwin':
macos_clean('exefs/')
if os.path.isfile('exefs.bin'):
os.remove('exefs.bin')
exefs_code_compress = 0
if os.path.isfile('exheader.bin'):
with open('exheader.bin', 'rb') as f:
f.seek(0xD)
flag = readle(f.read(1))
if flag & 1:
exefs_code_compress = 1
ExeFSBuilder(exefs_dir='exefs/', code_compress=exefs_code_compress)
if os.path.isdir('romfs/'):
if platform.system() == 'Darwin':
macos_clean('romfs/')
if os.path.isfile('romfs.bin'):
os.remove('romfs.bin')
RomFSBuilder(romfs_dir='romfs/')
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
else:
romfs = ''
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, dev=dev, out=out)
if not os.path.isfile(f'../{out}'):
shutil.move(out, f'../{out}')
else:
shutil.move(out, f'../{name} (new).ncch')
os.chdir('..')
def cci_extractall(path, dev=0):
name = os.path.splitext(os.path.basename(path))[0]
os.mkdir(name)
cci = CCIReader(path, dev)
cci.extract()
for i in cci.files.keys():
shutil.move(i, os.path.join(name, i))
if i.endswith('.ncch'):
os.chdir(name)
ncch_extractall(i)
os.chdir('..')
def cci_rebuildall(path, dev=0):
os.chdir(path)
name = os.path.basename(os.getcwd())
out = f'{name}.3ds'
ncchs = []
card_device_info = ''
if os.path.isfile('card_device_info.bin'):
card_device_info = 'card_device_info.bin'
for i in os.listdir('.'):
if os.path.isdir(i):
ncchs.append(f'{i}.ncch')
if os.path.isfile(f'{i}.ncch'):
os.remove(f'{i}.ncch')
ncch_rebuildall(i, dev)
CCIBuilder(cci_header='cci_header.bin', card_info='card_info.bin', mastering_info='mastering_info.bin', initialdata='initialdata.bin', card_device_info=card_device_info, ncchs=ncchs, dev=dev, out=out)
if not os.path.isfile(f'../{out}'):
shutil.move(out, f'../{out}')
else:
shutil.move(out, f'../{name} (new).3ds')
os.chdir('..')
def cia_extractall(path, dev=0):
name = os.path.splitext(os.path.basename(path))[0]
os.mkdir(name)
cia = CIAReader(path, dev)
cia.extract()
for i in cia.files.keys():
shutil.move(i, os.path.join(name, i))
if i.endswith('.ncch'):
os.chdir(name)
ncch_extractall(i)
os.chdir('..')
def cia_rebuildall(path, dev=0):
os.chdir(path)
name = os.path.basename(os.getcwd())
out = f'{name}.cia'
cf = []
meta = 0
if os.path.isfile('meta.bin'):
meta = 1
for i in os.listdir('.'):
if os.path.isdir(i) or (os.path.isfile(i) and i.endswith('.nds')):
if os.path.isdir(i):
cf.append(f'{i}.ncch')
if os.path.isfile(f'{i}.ncch'):
os.remove(f'{i}.ncch')
ncch_rebuildall(i, dev)
else:
cf.append(i)
CIABuilder(certs='cert.bin', content_files=cf, tik='tik', tmd='tmd', meta=meta, dev=dev, out=out)
if not os.path.isfile(f'../{out}'):
shutil.move(out, f'../{out}')
else:
shutil.move(out, f'../{name} (new).cia')
os.chdir('..')
def cci2cia(path, out='', cci_dev=0, cia_dev=0):
name = os.path.splitext(os.path.basename(path))[0]
if out == '':
out = f'{name}_conv.cia'
cci = CCIReader(path, cci_dev)
cci.extract()
ncchs = [i for i in cci.files.keys() if i.endswith('.ncch')]
for i in ['content6.update_n3ds.ncch', 'content7.update_o3ds.ncch']:
if i in ncchs:
ncchs.remove(i)
os.remove(i)
if cia_dev == 0:
regen_sig = 'retail'
else:
regen_sig = 'dev'
for i in ncchs:
n = NCCHReader(i, dev=cci_dev)
n.extract()
os.remove(i)
if i.startswith('content0'):
with open('exheader.bin', 'r+b') as f:
f.seek(0xD)
flag = readle(f.read(1))
f.seek(0xD)
f.write(int8tobytes(flag | 2)) # Set SDApplication bit
ncch_header = 'ncch_header.bin'
if os.path.isfile('exheader.bin'):
exheader = 'exheader.bin'
else:
exheader = ''
if os.path.isfile('logo.bin'):
logo = 'logo.bin'
else:
logo = ''
if os.path.isfile('plain.bin'):
plain = 'plain.bin'
else:
plain = ''
if os.path.isfile('exefs.bin'):
exefs = 'exefs.bin'
else:
exefs = ''
if os.path.isfile('romfs.bin'):
romfs = 'romfs.bin'
else:
romfs = ''
NCCHBuilder(ncch_header=ncch_header, exheader=exheader, logo=logo, plain=plain, exefs=exefs, romfs=romfs, regen_sig=regen_sig, dev=cia_dev, out=i)
for j in [ncch_header, exheader, logo, plain, exefs, romfs]:
if j != '':
os.remove(j)
cf = []
d = {
'content0.game.ncch': '0000.00000000.ncch',
'content1.manual.ncch': '0001.00000001.ncch',
'content2.dlp.ncch': '0002.00000002.ncch'
}
for i in ncchs:
cf.append(d[i])
shutil.move(i, d[i])
TMDBuilder(content_files=cf, content_files_dev=cia_dev, titleID=hex(readle(cci.hdr.mediaID))[2:].zfill(16), title_ver=0, crypt=0, regen_sig=regen_sig, out='tmd')
tikBuilder(titleID=hex(readle(cci.hdr.mediaID))[2:].zfill(16), title_ver=0, regen_sig=regen_sig, out='tik')
CIABuilder(content_files=cf, tik='tik', tmd='tmd', meta=1, dev=cia_dev, out=out)
for i in ['cci_header.bin', 'card_info.bin', 'mastering_info.bin', 'initialdata.bin', 'card_device_info.bin', 'tmd', 'tik'] + cf:
if os.path.exists(i):
os.remove(i)
def cdn2cia(path, out='', title_ver='', cdn_dev=0, cia_dev=0):
os.chdir(path)
name = os.path.basename(os.getcwd())
content_files = []
tmds = []
tmd = ''
tik = ''
for i in os.listdir('.'):
if i.startswith('tmd'):
tmds.append(i)
elif i == 'cetk':
tik = i
elif i.startswith('0'):
content_files.append(i)
if len(tmds) == 1: # If only one tmd in CDN dir, use it
tmd = tmds[0]
else:
tmds.sort(key=lambda h: int(h.split('.')[1]))
if title_ver == '': # If title version not provided, use latest one
tmd = tmds[-1]
else:
tmd = f'tmd.{title_ver}'
if cia_dev == 0:
regen_sig = 'retail'
else:
regen_sig = 'dev'
t = TMDReader(tmd)
out = f'{name}.{t.hdr.title_ver}.cia'
if tik == '':
tikBuilder(titleID=t.titleID, title_ver=t.hdr.title_ver, regen_sig=regen_sig, out='tik')
tik = 'tik'
cdn = CDNReader(content_files=content_files, tmd=tmd, tik=tik, dev=cdn_dev)
cdn.decrypt()
cf = [i for i in os.listdir('.') if i.endswith('.ncch') or i.endswith('.nds')]
CIABuilder(content_files=cf, tik=tik, tmd=tmd, meta=1, dev=cia_dev, out=out)
for i in cf:
os.remove(i)
if os.path.isfile('tik'):
os.remove('tik')
if not os.path.isfile(f'../{out}'):
shutil.move(out, f'../{out}')
else:
shutil.move(out, f'../{name}.{t.hdr.title_ver} (new).cia')
os.chdir('..')
def csu2retailcias(path, out=''):
if out == '':
out = 'updates_retail/'
cci = CCIReader(path, dev=1)
cci.extract()
n = NCCHReader('content0.game.ncch', dev=1)
n.extract()
romfs = RomFSReader('romfs.bin')
romfs.extract()
cnt = cntReader('romfs/contents/CupList', 'romfs/contents/Contents.cnt')
cnt.extract()
for i in ['cci_header.bin', 'card_info.bin', 'mastering_info.bin', 'initialdata.bin', 'card_device_info.bin', 'content0.game.ncch', 'ncch_header.bin', 'exheader.bin', 'logo.bin', 'plain.bin', 'exefs.bin', 'romfs.bin']:
if os.path.exists(i):
os.remove(i)
shutil.rmtree('romfs/')
if not os.path.isdir(out):
os.mkdir(out)
for i in os.listdir('updates/'):
cia_dev2retail(path=os.path.join('updates/', i), out=os.path.join(out, i))
shutil.rmtree('updates/')