mirror of
https://github.com/JJTech0130/pypush.git
synced 2025-01-22 11:18:29 +00:00
Rewrite: APNs from the ground up (#100)
Removes everything except APNs for now. Don't worry, it will be coming back!
This commit is contained in:
parent
902965a410
commit
b1c30a98ff
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,6 +1,8 @@
|
||||
config.json
|
||||
IMDAppleServices
|
||||
|
||||
pypush/_version.py
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
39
README.md
39
README.md
@ -1,5 +1,5 @@
|
||||
> [!WARNING]
|
||||
> Pypush is broken at the moment. We thank you for your continued support of the project! The Pypush demo will not work without significant modification to the code to remediate challenges posed as a response to third party iMessage clients growing in popularity.
|
||||
> `pypush` is undergoing a major rewrite. The current version is not stable and may not work as expected.
|
||||
|
||||
# pypush
|
||||
`pypush` is a POC demo of my recent iMessage reverse-engineering.
|
||||
@ -8,33 +8,18 @@ It can currently register as a new device on an Apple ID, set up encryption keys
|
||||
`pypush` is completely platform-independent, and does not require a Mac or other Apple device to use!
|
||||
|
||||
## Installation
|
||||
It's pretty self explanatory:
|
||||
1. `git clone https://github.com/JJTech0130/pypush`
|
||||
2. If on a Mac, ensure `cmake` is installed. Otherwise, run `brew install cmake`
|
||||
3. `pip3 install -r requirements.txt`
|
||||
4. `python3 ./demo.py`
|
||||
|
||||
## Troubleshooting
|
||||
If you have any issues, please join [the Discord](https://discord.gg/BVvNukmfTC) and ask for help.
|
||||
|
||||
## Operation
|
||||
`pypush` will generate a `config.json` in the repository when you run demo.py. DO NOT SHARE THIS FILE.
|
||||
It contains all the encryption keys necessary to log into you Apple ID and send iMessages as you.
|
||||
|
||||
Once it loads, it should prompt you with `>>`. Type `help` and press enter for a list of supported commands.
|
||||
|
||||
## Special Notes
|
||||
### Unicorn dependency
|
||||
`pypush` currently uses the Unicorn CPU emulator and a custom MachO loader to load a framework from an old version of macOS,
|
||||
in order to call some obfuscated functions.
|
||||
|
||||
This is only necessary during initial registration, so theoretically you can register on one device, and then copy the `config.json`
|
||||
to another device that doesn't support the Unicorn emulator. Or you could switch out the emulator for another x86 emulator if you really wanted to.
|
||||
|
||||
## "data.plist" and Mac serial numbers
|
||||
This repository contains a sample [`data.plist`](https://github.com/JJTech0130/pypush/blob/main/emulated/data.plist), which contains the serial number and several other identifiers from a real Mac device. If you run into issues related to rate-limiting or messages failing to deliver, you may regenerate this file by cloning [nacserver](https://github.com/JJTech0130/nacserver) and running `build.sh` on a non-M1 Mac. It should place the generated file in the current directory, which you can then copy to the emulated/ folder in pypush.
|
||||
Simple installation:
|
||||
```bash
|
||||
pip install git+https://github.com/JJTech0130/pypush
|
||||
```
|
||||
Editable installation (for development):
|
||||
```bash
|
||||
git clone https://github.com/JJTech0130/pypush
|
||||
cd pypush
|
||||
pip install -e .
|
||||
```
|
||||
|
||||
## Licensing
|
||||
This project is licensed under the terms of the [SSPL](https://www.mongodb.com/licensing/server-side-public-license). Portions of this project are based on [macholibre by Aaron Stephens](https://github.com/aaronst/macholibre/blob/master/LICENSE) under the Apache 2.0 license.
|
||||
This project is licensed under the terms of the [SSPL](https://www.mongodb.com/licensing/server-side-public-license)
|
||||
|
||||
This project has been purchased by [Beeper](https://github.com/beeper), please contact them with any questions about licensing.
|
||||
|
323
apns.py
323
apns.py
@ -1,323 +0,0 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import random
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
from hashlib import sha1
|
||||
from base64 import b64encode, b64decode
|
||||
import logging
|
||||
logger = logging.getLogger("apns")
|
||||
|
||||
import tlslite
|
||||
if tlslite.__version__ != "0.8.0-alpha43":
|
||||
logger.warning("tlslite-ng is not the correct version!")
|
||||
logger.warning("Please install tlslite-ng==0.8.0a43 or you will experience issues!")
|
||||
|
||||
import albert
|
||||
import bags
|
||||
|
||||
#COURIER_HOST = "windows.courier.push.apple.com" # TODO: Get this from config
|
||||
# Pick a random courier server from 01 to APNSCourierHostcount
|
||||
COURIER_HOST = f"{random.randint(1, bags.apns_init_bag()['APNSCourierHostcount'])}-{bags.apns_init_bag()['APNSCourierHostname']}"
|
||||
COURIER_PORT = 5223
|
||||
ALPN = [b"apns-security-v2"]
|
||||
|
||||
|
||||
# Connect to the courier server
|
||||
def _connect(private_key: str, cert: str) -> tlslite.TLSConnection:
|
||||
# Connect to the courier server
|
||||
sock = socket.create_connection((COURIER_HOST, COURIER_PORT))
|
||||
# Wrap the socket in TLS
|
||||
sock = tlslite.TLSConnection(sock)
|
||||
# Parse the certificate and private key
|
||||
cert = tlslite.X509CertChain([tlslite.X509().parse(cert)])
|
||||
private_key = tlslite.parsePEMKey(private_key, private=True)
|
||||
# Handshake with the server
|
||||
sock.handshakeClientCert(cert, private_key, alpn=ALPN)
|
||||
|
||||
logger.info(f"Connected to APNs ({COURIER_HOST})")
|
||||
|
||||
return sock
|
||||
|
||||
|
||||
class IncomingQueue:
|
||||
def __init__(self):
|
||||
self.queue = []
|
||||
self.lock = threading.Lock()
|
||||
|
||||
def append(self, item):
|
||||
with self.lock:
|
||||
self.queue.append(item)
|
||||
|
||||
def pop(self, index = -1):
|
||||
with self.lock:
|
||||
return self.queue.pop(index)
|
||||
|
||||
def __getitem__(self, index):
|
||||
with self.lock:
|
||||
return self.queue[index]
|
||||
|
||||
def __len__(self):
|
||||
with self.lock:
|
||||
return len(self.queue)
|
||||
|
||||
def find(self, finder):
|
||||
with self.lock:
|
||||
return next((i for i in self.queue if finder(i)), None)
|
||||
|
||||
def pop_find(self, finder):
|
||||
with self.lock:
|
||||
found = next((i for i in self.queue if finder(i)), None)
|
||||
if found is not None:
|
||||
# We have the lock, so we can safely remove it
|
||||
self.queue.remove(found)
|
||||
return found
|
||||
|
||||
def remove_all(self, id):
|
||||
with self.lock:
|
||||
self.queue = [i for i in self.queue if i[0] != id]
|
||||
|
||||
def wait_pop_find(self, finder, delay=0.1):
|
||||
found = None
|
||||
while found is None:
|
||||
found = self.pop_find(finder)
|
||||
if found is None:
|
||||
time.sleep(delay)
|
||||
return found
|
||||
|
||||
|
||||
class APNSConnection:
|
||||
incoming_queue = IncomingQueue()
|
||||
|
||||
# Sink everything in the queue
|
||||
def sink(self):
|
||||
self.incoming_queue = IncomingQueue()
|
||||
|
||||
def _queue_filler(self):
|
||||
while True and not self.sock.closed:
|
||||
payload = _deserialize_payload(self.sock)
|
||||
|
||||
if payload is not None:
|
||||
# Automatically ACK incoming notifications to prevent APNs from getting mad at us
|
||||
if payload[0] == 0x0A:
|
||||
logger.debug("Sending automatic ACK")
|
||||
self._send_ack(_get_field(payload[1], 4))
|
||||
logger.debug(f"Received payload: {payload}")
|
||||
self.incoming_queue.append(payload)
|
||||
logger.debug(f"Queue length: {len(self.incoming_queue)}")
|
||||
|
||||
def _keep_alive_loop(self):
|
||||
while True and not self.sock.closed:
|
||||
time.sleep(300)
|
||||
self._keep_alive()
|
||||
|
||||
def __init__(self, private_key=None, cert=None):
|
||||
# Generate the private key and certificate if they're not provided
|
||||
if private_key is None or cert is None:
|
||||
logger.debug("APNs needs a new push certificate")
|
||||
self.private_key, self.cert = albert.generate_push_cert()
|
||||
else:
|
||||
self.private_key, self.cert = private_key, cert
|
||||
|
||||
self.sock = _connect(self.private_key, self.cert)
|
||||
|
||||
self.queue_filler_thread = threading.Thread(
|
||||
target=self._queue_filler, daemon=True
|
||||
)
|
||||
self.queue_filler_thread.start()
|
||||
|
||||
self.keep_alive_thread = threading.Thread(
|
||||
target=self._keep_alive_loop, daemon=True
|
||||
)
|
||||
self.keep_alive_thread.start()
|
||||
|
||||
|
||||
def connect(self, root: bool = True, token: bytes = None):
|
||||
if token is None:
|
||||
logger.debug(f"Sending connect message without token (root={root})")
|
||||
else:
|
||||
logger.debug(f"Sending connect message with token {b64encode(token).decode()} (root={root})")
|
||||
flags = 0b01000001
|
||||
if root:
|
||||
flags |= 0b0100
|
||||
|
||||
if token is None:
|
||||
payload = _serialize_payload(
|
||||
7, [(2, 0x01.to_bytes(1, "big")), (5, flags.to_bytes(4, "big"))]
|
||||
)
|
||||
else:
|
||||
payload = _serialize_payload(
|
||||
7,
|
||||
[
|
||||
(1, token),
|
||||
(2, 0x01.to_bytes(1, "big")),
|
||||
(5, flags.to_bytes(4, "big")),
|
||||
],
|
||||
)
|
||||
|
||||
self.sock.write(payload)
|
||||
|
||||
payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 8)
|
||||
|
||||
if (
|
||||
payload == None
|
||||
or payload[0] != 8
|
||||
or _get_field(payload[1], 1) != 0x00.to_bytes(1, "big")
|
||||
):
|
||||
raise Exception("Failed to connect")
|
||||
|
||||
new_token = _get_field(payload[1], 3)
|
||||
if new_token is not None:
|
||||
self.token = new_token
|
||||
elif token is not None:
|
||||
self.token = token
|
||||
else:
|
||||
raise Exception("No token")
|
||||
|
||||
logger.debug(f"Recieved connect response with token {b64encode(self.token).decode()}")
|
||||
|
||||
return self.token
|
||||
|
||||
def filter(self, topics: list[str]):
|
||||
logger.debug(f"Sending filter message with topics {topics}")
|
||||
fields = [(1, self.token)]
|
||||
|
||||
for topic in topics:
|
||||
fields.append((2, sha1(topic.encode()).digest()))
|
||||
|
||||
payload = _serialize_payload(9, fields)
|
||||
|
||||
self.sock.write(payload)
|
||||
|
||||
def send_message(self, topic: str, payload: str, id=None):
|
||||
logger.debug(f"Sending message to topic {topic} with payload {payload}")
|
||||
if id is None:
|
||||
id = random.randbytes(4)
|
||||
|
||||
payload = _serialize_payload(
|
||||
0x0A,
|
||||
[
|
||||
(4, id),
|
||||
(1, sha1(topic.encode()).digest()),
|
||||
(2, self.token),
|
||||
(3, payload),
|
||||
],
|
||||
)
|
||||
|
||||
self.sock.write(payload)
|
||||
|
||||
# Wait for ACK
|
||||
payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 0x0B)
|
||||
|
||||
if payload[1][0][1] != 0x00.to_bytes(1, "big"):
|
||||
raise Exception("Failed to send message")
|
||||
|
||||
def set_state(self, state: int):
|
||||
logger.debug(f"Sending state message with state {state}")
|
||||
self.sock.write(
|
||||
_serialize_payload(
|
||||
0x14,
|
||||
[(1, state.to_bytes(1, "big")), (2, 0x7FFFFFFF.to_bytes(4, "big"))],
|
||||
)
|
||||
)
|
||||
|
||||
def _keep_alive(self):
|
||||
logger.debug("Sending keep alive message")
|
||||
self.sock.write(_serialize_payload(0x0C, []))
|
||||
# Remove any keep alive responses we have or missed
|
||||
self.incoming_queue.remove_all(0x0D)
|
||||
|
||||
|
||||
def _send_ack(self, id: bytes):
|
||||
logger.debug(f"Sending ACK for message {id}")
|
||||
payload = _serialize_payload(0x0B, [(1, self.token), (4, id), (8, b"\x00")])
|
||||
self.sock.write(payload)
|
||||
# #self.sock.write(_serialize_payload(0x0B, [(4, id)])
|
||||
# #pass
|
||||
|
||||
# def recieve_message(self):
|
||||
# payload = self.incoming_queue.wait_pop_find(lambda i: i[0] == 0x0A)
|
||||
# # Send ACK
|
||||
# self._send_ack(_get_field(payload[1], 4))
|
||||
# return _get_field(payload[1], 3)
|
||||
|
||||
# TODO: Find a way to make this non-blocking
|
||||
# def expect_message(self) -> tuple[int, list[tuple[int, bytes]]] | None:
|
||||
# return _deserialize_payload(self.sock)
|
||||
|
||||
|
||||
def _serialize_field(id: int, value: bytes) -> bytes:
|
||||
return id.to_bytes(1, "big") + len(value).to_bytes(2, "big") + value
|
||||
|
||||
|
||||
def _serialize_payload(id: int, fields: list[(int, bytes)]) -> bytes:
|
||||
payload = b""
|
||||
|
||||
for fid, value in fields:
|
||||
if fid is not None:
|
||||
payload += _serialize_field(fid, value)
|
||||
|
||||
return id.to_bytes(1, "big") + len(payload).to_bytes(4, "big") + payload
|
||||
|
||||
|
||||
def _deserialize_field(stream: bytes) -> tuple[int, bytes]:
|
||||
id = int.from_bytes(stream[:1], "big")
|
||||
length = int.from_bytes(stream[1:3], "big")
|
||||
value = stream[3 : 3 + length]
|
||||
return id, value
|
||||
|
||||
|
||||
# Note: Takes a stream, not a buffer, as we do not know the length of the payload
|
||||
# WILL BLOCK IF THE STREAM IS EMPTY
|
||||
def _deserialize_payload(stream) -> tuple[int, list[tuple[int, bytes]]] | None:
|
||||
id = int.from_bytes(stream.read(1), "big")
|
||||
|
||||
if id == 0x0:
|
||||
return None
|
||||
|
||||
length = int.from_bytes(stream.read(4), "big")
|
||||
|
||||
buffer = stream.read(length)
|
||||
|
||||
fields = []
|
||||
|
||||
while len(buffer) > 0:
|
||||
fid, value = _deserialize_field(buffer)
|
||||
fields.append((fid, value))
|
||||
buffer = buffer[3 + len(value) :]
|
||||
|
||||
return id, fields
|
||||
|
||||
|
||||
def _deserialize_payload_from_buffer(
|
||||
buffer: bytes,
|
||||
) -> tuple[int, list[tuple[int, bytes]]] | None:
|
||||
id = int.from_bytes(buffer[:1], "big")
|
||||
|
||||
if id == 0x0:
|
||||
return None
|
||||
|
||||
length = int.from_bytes(buffer[1:5], "big")
|
||||
|
||||
buffer = buffer[5:]
|
||||
|
||||
if len(buffer) < length:
|
||||
raise Exception("Buffer is too short")
|
||||
|
||||
fields = []
|
||||
|
||||
while len(buffer) > 0:
|
||||
fid, value = _deserialize_field(buffer)
|
||||
fields.append((fid, value))
|
||||
buffer = buffer[3 + len(value) :]
|
||||
|
||||
return id, fields
|
||||
|
||||
|
||||
# Returns the value of the first field with the given id
|
||||
def _get_field(fields: list[tuple[int, bytes]], id: int) -> bytes:
|
||||
for field_id, value in fields:
|
||||
if field_id == id:
|
||||
return value
|
||||
return None
|
86
bags.py
86
bags.py
@ -1,86 +0,0 @@
|
||||
import plistlib
|
||||
|
||||
import requests
|
||||
import logging
|
||||
logger = logging.getLogger("bags")
|
||||
|
||||
OLD_APNS_BAG = None
|
||||
def apns_init_bag_old():
|
||||
global OLD_APNS_BAG
|
||||
|
||||
if OLD_APNS_BAG is not None:
|
||||
return OLD_APNS_BAG
|
||||
|
||||
r = requests.get("https://init.push.apple.com/bag", verify=False)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to get APNs init bag")
|
||||
|
||||
# Parse the config as a plist
|
||||
bag = plistlib.loads(r.content)
|
||||
|
||||
logger.debug("Received APNs old-style init bag")
|
||||
|
||||
OLD_APNS_BAG = bag
|
||||
|
||||
return bag
|
||||
|
||||
|
||||
# This is the same as the above, but the response has a signature which we unwrap
|
||||
APNS_BAG = None
|
||||
def apns_init_bag():
|
||||
global APNS_BAG
|
||||
|
||||
if APNS_BAG is not None:
|
||||
return APNS_BAG
|
||||
|
||||
r = requests.get("http://init-p01st.push.apple.com/bag", verify=False)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to get APNs init bag 2")
|
||||
|
||||
content = plistlib.loads(r.content)
|
||||
bag = plistlib.loads(content["bag"])
|
||||
|
||||
logger.debug("Received APNs new init bag")
|
||||
|
||||
APNS_BAG = bag
|
||||
|
||||
return bag
|
||||
|
||||
|
||||
IDS_BAG = None
|
||||
def ids_bag():
|
||||
global IDS_BAG
|
||||
|
||||
if IDS_BAG is not None:
|
||||
return IDS_BAG
|
||||
|
||||
r = requests.get(
|
||||
"https://init.ess.apple.com/WebObjects/VCInit.woa/wa/getBag?ix=3", verify=False
|
||||
)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to get IDS bag")
|
||||
|
||||
# Parse the config as a plist
|
||||
content = plistlib.loads(r.content)
|
||||
# Load the inner bag
|
||||
bag = plistlib.loads(content["bag"])
|
||||
|
||||
logger.debug("Recieved IDS bag")
|
||||
|
||||
IDS_BAG = bag
|
||||
|
||||
return bag
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# config = get_config()
|
||||
# print(config)
|
||||
# print(apns_init_bag_2())
|
||||
# print(apns_init_bag_2() == apns_init_bag())
|
||||
bag = ids_bag()
|
||||
for key in bag:
|
||||
# print(key)
|
||||
# print(bag[key])
|
||||
if type(bag[key]) == str:
|
||||
if "http" in bag[key]:
|
||||
print(key, bag[key])
|
248
demo.py
248
demo.py
@ -1,248 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
from base64 import b64decode, b64encode
|
||||
from getpass import getpass
|
||||
|
||||
from rich.logging import RichHandler
|
||||
|
||||
import apns
|
||||
import ids
|
||||
import imessage
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.NOTSET, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()]
|
||||
)
|
||||
|
||||
# Set sane log levels
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("py.warnings").setLevel(logging.ERROR) # Ignore warnings from urllib3
|
||||
logging.getLogger("asyncio").setLevel(logging.WARNING)
|
||||
logging.getLogger("jelly").setLevel(logging.INFO)
|
||||
logging.getLogger("nac").setLevel(logging.INFO)
|
||||
logging.getLogger("apns").setLevel(logging.INFO)
|
||||
logging.getLogger("albert").setLevel(logging.INFO)
|
||||
logging.getLogger("ids").setLevel(logging.DEBUG)
|
||||
logging.getLogger("bags").setLevel(logging.INFO)
|
||||
logging.getLogger("imessage").setLevel(logging.DEBUG)
|
||||
|
||||
logging.captureWarnings(True)
|
||||
|
||||
# Try and load config.json
|
||||
try:
|
||||
with open("config.json", "r") as f:
|
||||
CONFIG = json.load(f)
|
||||
except FileNotFoundError:
|
||||
CONFIG = {}
|
||||
|
||||
conn = apns.APNSConnection(
|
||||
CONFIG.get("push", {}).get("key"), CONFIG.get("push", {}).get("cert")
|
||||
)
|
||||
|
||||
|
||||
def safe_b64decode(s):
|
||||
try:
|
||||
return b64decode(s)
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
conn.connect(token=safe_b64decode(CONFIG.get("push", {}).get("token")))
|
||||
conn.set_state(1)
|
||||
conn.filter(["com.apple.madrid"])
|
||||
|
||||
user = ids.IDSUser(conn)
|
||||
|
||||
if CONFIG.get("auth", {}).get("cert") is not None:
|
||||
auth_keypair = ids._helpers.KeyPair(CONFIG["auth"]["key"], CONFIG["auth"]["cert"])
|
||||
user_id = CONFIG["auth"]["user_id"]
|
||||
handles = CONFIG["auth"]["handles"]
|
||||
user.restore_authentication(auth_keypair, user_id, handles)
|
||||
else:
|
||||
username = input("Username: ")
|
||||
password = getpass("Password: ")
|
||||
|
||||
user.authenticate(username, password)
|
||||
|
||||
user.encryption_identity = ids.identity.IDSIdentity(
|
||||
encryption_key=CONFIG.get("encryption", {}).get("rsa_key"),
|
||||
signing_key=CONFIG.get("encryption", {}).get("ec_key"),
|
||||
)
|
||||
|
||||
if (
|
||||
CONFIG.get("id", {}).get("cert") is not None
|
||||
and user.encryption_identity is not None
|
||||
):
|
||||
id_keypair = ids._helpers.KeyPair(CONFIG["id"]["key"], CONFIG["id"]["cert"])
|
||||
user.restore_identity(id_keypair)
|
||||
else:
|
||||
logging.info("Registering new identity...")
|
||||
import emulated.nac
|
||||
|
||||
vd = emulated.nac.generate_validation_data()
|
||||
vd = b64encode(vd).decode()
|
||||
|
||||
user.register(vd)
|
||||
|
||||
logging.info("Waiting for incoming messages...")
|
||||
|
||||
# Write config.json
|
||||
CONFIG["encryption"] = {
|
||||
"rsa_key": user.encryption_identity.encryption_key,
|
||||
"ec_key": user.encryption_identity.signing_key,
|
||||
}
|
||||
CONFIG["id"] = {
|
||||
"key": user._id_keypair.key,
|
||||
"cert": user._id_keypair.cert,
|
||||
}
|
||||
CONFIG["auth"] = {
|
||||
"key": user._auth_keypair.key,
|
||||
"cert": user._auth_keypair.cert,
|
||||
"user_id": user.user_id,
|
||||
"handles": user.handles,
|
||||
}
|
||||
CONFIG["push"] = {
|
||||
"token": b64encode(user.push_connection.token).decode(),
|
||||
"key": user.push_connection.private_key,
|
||||
"cert": user.push_connection.cert,
|
||||
}
|
||||
|
||||
with open("config.json", "w") as f:
|
||||
json.dump(CONFIG, f, indent=4)
|
||||
|
||||
im = imessage.iMessageUser(conn, user)
|
||||
|
||||
INPUT_QUEUE = apns.IncomingQueue()
|
||||
|
||||
def input_thread():
|
||||
from prompt_toolkit import prompt
|
||||
while True:
|
||||
|
||||
try:
|
||||
msg = prompt('>> ')
|
||||
except:
|
||||
msg = 'quit'
|
||||
INPUT_QUEUE.append(msg)
|
||||
|
||||
threading.Thread(target=input_thread, daemon=True).start()
|
||||
|
||||
print("Type 'help' for help")
|
||||
|
||||
def fixup_handle(handle):
|
||||
if handle.startswith('tel:+'):
|
||||
return handle
|
||||
elif handle.startswith('mailto:'):
|
||||
return handle
|
||||
elif handle.startswith('tel:'):
|
||||
return 'tel:+' + handle[4:]
|
||||
elif handle.startswith('+'):
|
||||
return 'tel:' + handle
|
||||
# If the handle starts with a number
|
||||
elif handle[0].isdigit():
|
||||
# If the handle is 10 digits, assume it's a US number
|
||||
if len(handle) == 10:
|
||||
return 'tel:+1' + handle
|
||||
# If the handle is 11 digits, assume it's a US number with country code
|
||||
elif len(handle) == 11:
|
||||
return 'tel:+' + handle
|
||||
else: # Assume it's an email
|
||||
return 'mailto:' + handle
|
||||
|
||||
current_participants = []
|
||||
current_effect = None
|
||||
while True:
|
||||
msg = im.receive()
|
||||
if msg is not None:
|
||||
# print(f'[{msg.sender}] {msg.text}')
|
||||
print(msg.to_string())
|
||||
|
||||
attachments = msg.attachments()
|
||||
if len(attachments) > 0:
|
||||
attachments_path = f"attachments/{msg.id}/"
|
||||
os.makedirs(attachments_path, exist_ok=True)
|
||||
|
||||
for attachment in attachments:
|
||||
with open(attachments_path + attachment.name, "wb") as attachment_file:
|
||||
attachment_file.write(attachment.versions[0].data())
|
||||
|
||||
print(f"({len(attachments)} attachment{'s have' if len(attachments) != 1 else ' has'} been downloaded and put "
|
||||
f"in {attachments_path})")
|
||||
|
||||
if len(INPUT_QUEUE) > 0:
|
||||
msg = INPUT_QUEUE.pop()
|
||||
if msg == '': continue
|
||||
if msg == 'help' or msg == 'h':
|
||||
print('help (h): show this message')
|
||||
print('quit (q): quit')
|
||||
#print('send (s) [recipient] [message]: send a message')
|
||||
print('filter (f) [recipient]: set the current chat')
|
||||
print('effect (e): adds an iMessage effect to the next sent message')
|
||||
print('note: recipient must start with tel: or mailto: and include the country code')
|
||||
print('handle <handle>: set the current handle (for sending messages)')
|
||||
print('\\: escape commands (will be removed from message)')
|
||||
elif msg == 'quit' or msg == 'q':
|
||||
break
|
||||
elif msg == 'effect' or msg == 'e' or msg.startswith("effect ") or msg.startswith("e "):
|
||||
msg = msg.split(" ")
|
||||
if len(msg) < 2 or msg[1] == "":
|
||||
print("effect [effect namespace]")
|
||||
else:
|
||||
print(f"next message will be sent with [{msg[1]}]")
|
||||
current_effect = msg[1]
|
||||
elif msg == 'filter' or msg == 'f' or msg.startswith('filter ') or msg.startswith('f '):
|
||||
# Set the curernt chat
|
||||
msg = msg.split(' ')
|
||||
if len(msg) < 2 or msg[1] == '':
|
||||
print('filter [recipients]')
|
||||
else:
|
||||
print(f'Filtering to {[fixup_handle(h) for h in msg[1:]]}')
|
||||
current_participants = [fixup_handle(h) for h in msg[1:]]
|
||||
elif msg == 'handle' or msg.startswith('handle '):
|
||||
msg = msg.split(' ')
|
||||
if len(msg) < 2 or msg[1] == '':
|
||||
print('handle [handle]')
|
||||
print('Available handles:')
|
||||
for h in user.handles:
|
||||
if h == user.current_handle:
|
||||
print(f'\t{h} (current)')
|
||||
else:
|
||||
print(f'\t{h}')
|
||||
else:
|
||||
h = msg[1]
|
||||
h = fixup_handle(h)
|
||||
if h in user.handles:
|
||||
print(f'Using {h} as handle')
|
||||
user.current_handle = h
|
||||
else:
|
||||
print(f'Handle {h} not found')
|
||||
|
||||
elif current_participants != []:
|
||||
if msg.startswith('\\'):
|
||||
msg = msg[1:]
|
||||
im.send(imessage.iMessage(
|
||||
text=msg,
|
||||
participants=current_participants,
|
||||
sender=user.current_handle,
|
||||
effect=current_effect
|
||||
))
|
||||
current_effect = None
|
||||
else:
|
||||
print('No chat selected, use help for help')
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
# elif msg.startswith('send') or msg.startswith('s'):
|
||||
# msg = msg.split(' ')
|
||||
# if len(msg) < 3:
|
||||
# print('send [recipient] [message]')
|
||||
# else:
|
||||
# im.send(imessage.iMessage(
|
||||
# text=' '.join(msg[2:]),
|
||||
# participants=[msg[1], user.handles[0]],
|
||||
# #sender=user.handles[0]
|
||||
# ))
|
||||
|
@ -1,17 +0,0 @@
|
||||
import apns
|
||||
import ids
|
||||
|
||||
conn1 = apns.APNSConnection()
|
||||
conn1.connect()
|
||||
|
||||
# Uncomment these for greater parity with apsd
|
||||
# conn1.keep_alive()
|
||||
# conn1.set_state(0x01)
|
||||
# conn1.filter([])
|
||||
# conn1.connect(False)
|
||||
|
||||
conn1.filter(["com.apple.madrid"])
|
||||
|
||||
# print(ids.lookup(conn1, ["mailto:jjtech@jjtech.dev"]))
|
||||
|
||||
#print(ids.register(conn1, "user_test2@icloud.com", "wowSecure1"))
|
@ -1,296 +0,0 @@
|
||||
import plistlib
|
||||
import zlib
|
||||
from base64 import b64decode, b64encode
|
||||
from hashlib import sha1
|
||||
|
||||
# Taken from debug logs of apsd
|
||||
enabled_topics = "(com.apple.icloud-container.com.apple.avatarsd, com.icloud.askpermission, com.apple.icloud-container.com.apple.Safari, com.apple.itunesstored, com.apple.icloud-container.clouddocs.com.apple.CloudDocs.health, com.apple.passd.usernotifications, com.apple.icloud-container.com.apple.donotdisturbd, com.apple.icloud-container.clouddocs.iCloud.com.reddit.reddit, com.apple.mobileme.fmf3, com.apple.icloud-container.com.apple.cloudpaird, com.apple.icloud-container.clouddocs.com.apple.Pages, com.apple.appstored-testflight, com.apple.askpermissiond, com.apple.icloud-container.com.apple.willowd, com.me.cal, com.apple.icloud-container.com.apple.suggestd, com.apple.icloud-container.clouddocs.F3LWYJ7GM7.com.apple.garageband10, com.apple.icloud-container.clouddocs.com.apple.CloudDocs.container-metadata, com.apple.icloud-container.com.apple.callhistory.sync-helper, com.apple.icloud-container.com.apple.syncdefaultsd, com.apple.icloud-container.com.apple.SafariShared.Settings, com.apple.pay.services.products.prod, com.apple.icloud-container.com.apple.StatusKitAgent, com.apple.icloud-container.com.apple.siriknowledged, com.me.contacts, com.apple.icloud-container.com.apple.TrustedPeersHelper, com.apple.icloud-container.clouddocs.iCloud.com.apple.iBooks, com.apple.icloud-container.clouddocs.iCloud.dk.simonbs.Scriptable, com.apple.icloud-container.clouddocs.com.apple.ScriptEditor2, com.icloud.family, com.apple.idmsauth, com.apple.watchList, com.apple.icloud-container.clouddocs.com.apple.TextEdit, com.apple.icloud-container.com.apple.VoiceMemos, com.apple.sharedstreams, com.apple.pay.services.apply.prod, com.apple.icloud-container.com.apple.SafariShared.CloudTabs, com.apple.wallet.sharing.qa, com.apple.appstored, com.apple.icloud-container.clouddocs.3L68KQB4HG.com.readdle.CommonDocuments, com.apple.icloud-container.clouddocs.com.apple.CloudDocs.pp-metadata, com.me.setupservice, com.apple.icloud-container.com.apple.amsengagementd, com.apple.icloud-container.com.apple.appleaccount.beneficiary.private, com.apple.icloud-container.com.apple.appleaccount.beneficiary, com.apple.icloud-container.clouddocs.com.apple.mail, com.apple.icloud-container.com.apple.appleaccount.custodian, com.apple.icloud-container.com.apple.securityd, com.apple.icloud-container.com.apple.iBooksX, com.apple.icloud-container.clouddocs.com.apple.QuickTimePlayerX, com.apple.icloud-container.clouddocs.com.apple.TextInput, com.apple.icloud-container.com.apple.icloud.fmfd, com.apple.tv.favoriteTeams, com.apple.pay.services.ownershipTokens.prod, com.apple.icloud-container.com.apple.passd, com.apple.amsaccountsd, com.apple.pay.services.devicecheckin.prod.us, com.apple.storekit, com.apple.icloud-container.com.apple.keyboardservicesd, paymentpass.com.apple, com.apple.aa.setupservice, com.apple.icloud-container.clouddocs.com.apple.shoebox, com.apple.icloud-container.clouddocs.F3LWYJ7GM7.com.apple.mobilegarageband, com.apple.icloud-container.com.apple.icloud.searchpartyuseragent, com.apple.icloud-container.clouddocs.iCloud.com.apple.configurator.ui, com.apple.icloud-container.com.apple.gamed, com.apple.icloud-container.clouddocs.com.apple.Keynote, com.apple.icloud-container.com.apple.willowd.homekit, com.apple.amsengagementd.notifications, com.apple.icloud.presence.mode.status, com.apple.aa.idms, com.apple.icloud-container.clouddocs.iCloud.com.apple.MobileSMS, com.apple.gamed, com.apple.icloud-container.clouddocs.iCloud.is.workflow.my.workflows, com.apple.icloud-container.clouddocs.iCloud.md.obsidian, com.apple.icloud-container.clouddocs.com.apple.CloudDocs, com.apple.wallet.sharing, com.apple.icloud-container.clouddocs.iCloud.com.apple.iBooks.iTunesU, com.apple.icloud.presence.shared.experience, com.apple.icloud-container.com.apple.imagent, com.apple.icloud-container.com.apple.financed, com.apple.pay.services.account.prod, com.apple.icloud-container.com.apple.assistant.assistantd, com.apple.pay.services.ck.zone.prod, com.apple.icloud-container.com.apple.security.cuttlefish, com.apple.icloud-container.clouddocs.com.apple.iBooks.cloudData, com.apple.peerpayment, com.icloud.quota, com.apple.pay.provision, com.apple.icloud-container.com.apple.upload-request-proxy.com.apple.photos.cloud, com.apple.icloud-container.com.apple.appleaccount.custodian.private, com.apple.icloud-container.clouddocs.com.apple.Preview, com.apple.maps.icloud, com.apple.icloud-container.com.apple.reminders, com.apple.icloud-container.com.apple.SafariShared.WBSCloudBookmarksStore, com.apple.idmsauthagent, com.apple.icloud-container.clouddocs.com.apple.Numbers, com.apple.bookassetd, com.apple.pay.auxiliary.registration.requirement.prod, com.apple.icloud.fmip.voiceassistantsync)"
|
||||
opportunistic_topics = "(com.apple.madrid, com.apple.icloud-container.com.apple.photos.cloud, com.apple.private.alloy.ded, com.apple.private.ac, com.apple.private.alloy.coreduet.sync, com.apple.private.alloy.sms, com.apple.private.alloy.ids.cloudmessaging, com.apple.private.alloy.maps, com.apple.private.alloy.facetime.mw, com.apple.private.alloy.facetime.sync, com.apple.private.alloy.maps.eta, com.apple.private.alloy.thumper.keys, com.apple.private.alloy.phonecontinuity, com.apple.private.alloy.continuity.tethering, com.apple.private.alloy.biz, com.apple.private.alloy.tips, com.apple.private.alloy.keytransparency.accountkey.pinning, com.apple.private.alloy.nearby, com.apple.private.alloy.itunes, com.apple.private.alloy.status.keysharing, com.apple.private.alloy.facetime.video, com.apple.private.alloy.screentime.invite, com.apple.private.alloy.proxiedcrashcopier.icloud, com.apple.private.alloy.classroom, com.apple.private.alloy.carmelsync, com.apple.ess, com.apple.private.alloy.facetime.lp, com.apple.private.alloy.icloudpairing, com.apple.private.alloy.accounts.representative, com.apple.private.alloy.gamecenter.imessage, com.apple.private.alloy.photostream, com.apple.private.alloy.electrictouch, com.apple.private.alloy.messagenotification, com.apple.private.alloy.avconference.icloud, com.apple.private.alloy.fmd, com.apple.private.alloy.usagetracking, com.apple.private.alloy.fmf, com.apple.private.alloy.home.invite, com.apple.private.alloy.phone.auth, com.apple.private.alloy.quickrelay, com.apple.private.alloy.copresence, com.apple.private.alloy.home, com.apple.private.alloy.digitalhealth, com.apple.private.alloy.multiplex1, com.apple.private.alloy.screensharing.qr, com.apple.private.alloy.contextsync, com.apple.private.alloy.facetime.audio, com.apple.private.alloy.willow.stream, com.apple.private.ids, com.apple.private.alloy.continuity.activity, com.apple.private.alloy.gamecenter, com.apple.private.alloy.clockface.sharing, com.apple.private.alloy.safeview, com.apple.private.alloy.continuity.unlock, com.apple.private.alloy.continuity.encryption, com.apple.private.alloy.facetime.multi, com.apple.private.alloy.notes, com.apple.private.alloy.screentime, com.apple.private.alloy.willow, com.apple.private.alloy.accessibility.switchcontrol, com.apple.private.alloy.status.personal, com.apple.triald, com.apple.private.alloy.screensharing, com.apple.private.alloy.gelato, com.apple.private.alloy.safari.groupactivities, com.apple.private.alloy.applepay, com.apple.private.alloy.amp.potluck, com.apple.private.alloy.sleep.icloud, com.apple.icloud-container.com.apple.knowledge-agent)"
|
||||
paused_topics = "(com.apple.icloud-container.company.thebrowser.Browser, com.apple.icloud-container.com.apple.sociallayerd, com.apple.icloud.fmip.app.push, com.apple.iBooksX, company.thebrowser.Browser, com.apple.icloud-container.com.apple.Maps, com.apple.mobileme.fmf2, com.apple.findmy, com.apple.iWork.Numbers, com.apple.jalisco, com.apple.iWork.Pages, com.apple.Notes, com.apple.Maps, com.apple.icloud-container.com.apple.Notes, com.apple.dt.Xcode, com.apple.sagad, com.apple.icloud.presence.channel.management, com.apple.icloud-container.com.apple.protectedcloudstorage.protectedcloudkeysyncing, com.apple.TestFlight, com.apple.icloud-container.com.apple.news, com.apple.music.social, com.apple.icloud-container.com.apple.iWork.Numbers, com.apple.news, com.apple.tilt, com.apple.icloud-container.com.apple.findmy, com.apple.icloud-container.com.apple.iWork.Pages)"
|
||||
|
||||
# Parse the topics into a list
|
||||
enabled_topics = enabled_topics[1:-1].split(", ")
|
||||
opportunistic_topics = opportunistic_topics[1:-1].split(", ")
|
||||
paused_topics = paused_topics[1:-1].split(", ")
|
||||
|
||||
topics = enabled_topics + opportunistic_topics + paused_topics
|
||||
|
||||
# Calculate the SHA1 hash of each topic
|
||||
topics_lookup = [(topic, sha1(topic.encode()).digest()) for topic in topics]
|
||||
|
||||
|
||||
class bcolors:
|
||||
HEADER = "\033[95m"
|
||||
OKBLUE = "\033[94m"
|
||||
OKCYAN = "\033[96m"
|
||||
OKGREEN = "\033[92m"
|
||||
WARNING = "\033[93m"
|
||||
FAIL = "\033[91m"
|
||||
ENDC = "\033[0m"
|
||||
BOLD = "\033[1m"
|
||||
UNDERLINE = "\033[4m"
|
||||
|
||||
|
||||
def _lookup_topic(hash: bytes):
|
||||
for topic_lookup in topics_lookup:
|
||||
if topic_lookup[1] == hash:
|
||||
return topic_lookup[0]
|
||||
return None
|
||||
|
||||
|
||||
# Returns the value of the first field with the given id
|
||||
def _get_field(fields: list[tuple[int, bytes]], id: int) -> bytes:
|
||||
for field_id, value in fields:
|
||||
if field_id == id:
|
||||
return value
|
||||
return None
|
||||
|
||||
|
||||
def _p_filter(prefix, fields: list[tuple[int, bytes]]):
|
||||
enabled = []
|
||||
ignored = []
|
||||
oppertunistic = []
|
||||
paused = []
|
||||
|
||||
token = ""
|
||||
|
||||
for field in fields:
|
||||
if field[0] == 1:
|
||||
token = b64encode(field[1])
|
||||
# print(f"Push Token: {b64encode(field[1])}")
|
||||
elif field[0] == 2:
|
||||
enabled.append(_lookup_topic(field[1]))
|
||||
elif field[0] == 3:
|
||||
ignored.append(_lookup_topic(field[1]))
|
||||
elif field[0] == 4:
|
||||
oppertunistic.append(_lookup_topic(field[1]))
|
||||
elif field[0] == 5:
|
||||
paused.append(_lookup_topic(field[1]))
|
||||
else:
|
||||
pass # whatever, there's a 6 but it's not documented
|
||||
# print(f"Unknown field ID: {field[0]}")
|
||||
|
||||
# Remove None values
|
||||
enabled = [topic.strip() for topic in enabled if topic is not None]
|
||||
ignored = [topic.strip() for topic in ignored if topic is not None]
|
||||
oppertunistic = [topic.strip() for topic in oppertunistic if topic is not None]
|
||||
paused = [topic.strip() for topic in paused if topic is not None]
|
||||
|
||||
enabled = ", ".join(enabled)
|
||||
ignored = ", ".join(ignored)
|
||||
oppertunistic = ", ".join(oppertunistic)
|
||||
paused = ", ".join(paused)
|
||||
|
||||
if not enabled:
|
||||
enabled = "None"
|
||||
if not ignored:
|
||||
ignored = "None"
|
||||
if not oppertunistic:
|
||||
oppertunistic = "None"
|
||||
if not paused:
|
||||
paused = "None"
|
||||
|
||||
# Trim the list of topics
|
||||
if len(enabled) > 100:
|
||||
enabled = enabled[:100] + "..."
|
||||
if len(ignored) > 100:
|
||||
ignored = ignored[:100] + "..."
|
||||
if len(oppertunistic) > 100:
|
||||
oppertunistic = oppertunistic[:100] + "..."
|
||||
if len(paused) > 100:
|
||||
paused = paused[:100] + "..."
|
||||
# (Token: {token.decode()})
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Filter{bcolors.ENDC} {bcolors.WARNING}Enabled{bcolors.ENDC}: {enabled} {bcolors.FAIL}Ignored{bcolors.ENDC}: {ignored} {bcolors.OKBLUE}Oppertunistic{bcolors.ENDC}: {oppertunistic} {bcolors.OKGREEN}Paused{bcolors.ENDC}: {paused}"
|
||||
)
|
||||
|
||||
|
||||
import apns
|
||||
|
||||
|
||||
def pretty_print_payload(
|
||||
prefix, payload: tuple[int, list[tuple[int, bytes]]]
|
||||
) -> bytes | None:
|
||||
id = payload[0]
|
||||
|
||||
if id == 9:
|
||||
_p_filter(prefix, payload[1])
|
||||
elif id == 8:
|
||||
token_str = ""
|
||||
if _get_field(payload[1], 3):
|
||||
token_str = f"{bcolors.WARNING}Token{bcolors.ENDC}: {b64encode(_get_field(payload[1], 3)).decode()}"
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Connected{bcolors.ENDC} {token_str} {bcolors.OKBLUE}{_get_field(payload[1], 1).hex()}{bcolors.ENDC}"
|
||||
)
|
||||
elif id == 7:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Connect Request{bcolors.ENDC}",
|
||||
end="",
|
||||
)
|
||||
if _get_field(payload[1], 1):
|
||||
print(
|
||||
f" {bcolors.WARNING}Token{bcolors.ENDC}: {b64encode(_get_field(payload[1], 1)).decode()}",
|
||||
end="",
|
||||
)
|
||||
if _get_field(payload[1], 0x0C):
|
||||
print(f" {bcolors.OKBLUE}SIGNED{bcolors.ENDC}", end="")
|
||||
if (
|
||||
_get_field(payload[1], 0x5)
|
||||
and int.from_bytes(_get_field(payload[1], 0x5)) & 0x4
|
||||
):
|
||||
print(f" {bcolors.FAIL}ROOT{bcolors.ENDC}", end="")
|
||||
print()
|
||||
|
||||
# for field in payload[1]:
|
||||
# print(f"Field ID: {field[0]}")
|
||||
# print(f"Field Value: {field[1]}")
|
||||
|
||||
# 65 (user) or 69 (root)
|
||||
|
||||
for i in range(len(payload[1])):
|
||||
# if payload[1][i][0] == 5:
|
||||
# if payload[1][i][1] == b'\x00\x00\x00A': # user
|
||||
# payload[1][i][1] = b'\x00\x00\x00E'
|
||||
# elif payload[1][i][1] == b'\x00\x00\x00E': # root
|
||||
# payload[1][i][1] = b'\x00\x00\x00A'
|
||||
# else:
|
||||
# print("Unknown field value: ", payload[1][i][1])
|
||||
if payload[1][i][0] == 1:
|
||||
pass
|
||||
# payload[1][i] = (None, None)
|
||||
# payload[1][i] = (1, b64decode("D3MtN3e18QE8rve3n92wp+CwK7u/bWk/5WjQUOBN640="))
|
||||
|
||||
out = apns._serialize_payload(payload[0], payload[1])
|
||||
# return out
|
||||
elif id == 0xC:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Keep Alive{bcolors.ENDC}"
|
||||
)
|
||||
elif id == 0xD:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Keep Alive Ack{bcolors.ENDC}"
|
||||
)
|
||||
elif id == 0x14:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Set State{bcolors.ENDC}: {_get_field(payload[1], 1).hex()}"
|
||||
)
|
||||
elif id == 0x1D or id == 0x20:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.WARNING}PubSub ??{bcolors.ENDC}"
|
||||
)
|
||||
elif id == 0xE:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.WARNING}Token Confirmation{bcolors.ENDC}"
|
||||
)
|
||||
elif id == 0xA:
|
||||
topic = ""
|
||||
# topic = _lookup_topic(_get_field(payload[1], 1))
|
||||
# if it has apsd -> APNs in the prefix, it's an outgoing notification
|
||||
if "apsd -> APNs" in prefix:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKBLUE}OUTGOING Notification{bcolors.ENDC}",
|
||||
end="",
|
||||
)
|
||||
topic = _lookup_topic(_get_field(payload[1], 1))
|
||||
# topic = _lookup_topic(_get_field(payload[1], 1))
|
||||
# if b"bplist" in _get_field(payload[1], 3):
|
||||
# print(f" {bcolors.OKCYAN}Binary{bcolors.ENDC}", end="")
|
||||
# if topic == "com.apple.madrid":
|
||||
# print(f" {bcolors.FAIL}Madrid{bcolors.ENDC}", end="")
|
||||
# import plistlib
|
||||
# plist = plistlib.loads(_get_field(payload[1], 3))
|
||||
# #payload = plist["P"]
|
||||
# #print(f" {bcolors.WARNING}Payload{bcolors.ENDC}: {payload}", end="")
|
||||
|
||||
# for key in plist:
|
||||
# print(f" {bcolors.OKBLUE}{key}{bcolors.ENDC}: {plist[key]}", end="")
|
||||
|
||||
else:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Notification{bcolors.ENDC}",
|
||||
end="",
|
||||
)
|
||||
topic = _lookup_topic(_get_field(payload[1], 2))
|
||||
# if b"bplist" in _get_field(payload[1], 3):
|
||||
# print(f" {bcolors.OKBLUE}Binary{bcolors.ENDC}", end="")
|
||||
# print(f" {bcolors.WARNING}Topic{bcolors.ENDC}: {_lookup_topic(_get_field(payload[1], 2))}")
|
||||
|
||||
print(f" {bcolors.WARNING}Topic{bcolors.ENDC}: {topic}", end="")
|
||||
|
||||
if topic == "com.apple.madrid":
|
||||
print(f" {bcolors.FAIL}Madrid{bcolors.ENDC}", end="")
|
||||
orig_payload = payload
|
||||
payload = plistlib.loads(_get_field(payload[1], 3))
|
||||
|
||||
# print(payload)
|
||||
if "cT" in payload and False:
|
||||
# It's HTTP over APNs
|
||||
if "hs" in payload:
|
||||
print(
|
||||
f" {bcolors.WARNING}HTTP Response{bcolors.ENDC}: {payload['hs']}",
|
||||
end="",
|
||||
)
|
||||
else:
|
||||
print(f" {bcolors.WARNING}HTTP Request{bcolors.ENDC}", end="")
|
||||
# print(f" {bcolors.WARNING}HTTP{bcolors.ENDC} {payload['hs']}", end="")
|
||||
if "u" in payload:
|
||||
print(f" {bcolors.OKCYAN}URL{bcolors.ENDC}: {payload['u']}", end="")
|
||||
print(
|
||||
f" {bcolors.FAIL}Content Type{bcolors.ENDC}: {payload['cT']}",
|
||||
end="",
|
||||
)
|
||||
if "h" in payload:
|
||||
print(
|
||||
f" {bcolors.FAIL}Headers{bcolors.ENDC}: {payload['h']}", end=""
|
||||
)
|
||||
if "b" in payload:
|
||||
# What am I really supposed to put in WBITS? Got this from a random SO answer
|
||||
# print(payload["b"])
|
||||
body = zlib.decompress(payload["b"], 16 + zlib.MAX_WBITS)
|
||||
if b"plist" in body:
|
||||
body = plistlib.loads(body)
|
||||
print(f" {bcolors.FAIL}Body{bcolors.ENDC}: {body}", end="")
|
||||
#if not "cT" in payload:
|
||||
for key in payload:
|
||||
print(f" {bcolors.OKBLUE}{key}{bcolors.ENDC}: {payload[key]}")
|
||||
|
||||
if 'dtl' in payload:
|
||||
print("OVERRIDE DTL")
|
||||
payload['dtl'][0].update({'sT': b64decode("jJ86jTYbv1mGVwO44PyfuZ9lh3o56QjOE39Jk8Z99N8=")})
|
||||
|
||||
# Re-serialize the payload
|
||||
payload = plistlib.dumps(payload, fmt=plistlib.FMT_BINARY)
|
||||
# Construct APNS message
|
||||
# Get the original fields except 3
|
||||
fields = orig_payload[1]
|
||||
fields = [field for field in fields if field[0] != 3]
|
||||
# Add the new field
|
||||
fields.append((3, payload))
|
||||
payload = apns._serialize_payload(0xA, fields)
|
||||
|
||||
# Use the override payload
|
||||
|
||||
#print(payload, orig_payload)
|
||||
#print(payload == orig_payload)
|
||||
return payload
|
||||
|
||||
print()
|
||||
|
||||
# print(f" {bcolors.WARNING}{bcolors.ENDC}: {payload['cT']}")
|
||||
|
||||
# for field in payload[1]:
|
||||
# print(f"Field ID: {field[0]}")
|
||||
# print(f"Field Value: {field[1]}")
|
||||
elif id == 0xB:
|
||||
print(
|
||||
f"{bcolors.OKGREEN}{prefix}{bcolors.ENDC}: {bcolors.OKCYAN}Notification Ack{bcolors.ENDC} {bcolors.OKBLUE}{_get_field(payload[1], 8).hex()}{bcolors.ENDC}"
|
||||
)
|
||||
else:
|
||||
print(prefix, f"Payload ID: {hex(payload[0])}")
|
||||
for field in payload[1]:
|
||||
print(f"Field ID: {field[0]}")
|
||||
print(f"Field Value: {field[1]}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(f"{bcolors.OKGREEN}Enabled:{bcolors.ENDC}")
|
@ -1,32 +0,0 @@
|
||||
set -e
|
||||
# Use brew's openssl
|
||||
export PATH="/opt/homebrew/opt/openssl@3/bin:$PATH"
|
||||
|
||||
openssl req -newkey rsa:2048 -nodes -keyout root_key.pem -x509 -days 3650 -out root_certificate.pem \
|
||||
-subj "/C=US/O=Apple Inc./OU=Apple Certification Authority/CN=Apple Root CA" \
|
||||
-addext "basicConstraints=critical, CA:true" -addext "keyUsage=critical, digitalSignature, keyCertSign, cRLSign"
|
||||
|
||||
openssl req -newkey rsa:2048 -nodes -keyout intermediate_key.pem -out intermediate_certificate.csr \
|
||||
-subj "/CN=Apple Server Authentication CA/OU=Certification Authority/O=Apple Inc./C=US" \
|
||||
-addext "basicConstraints=critical, CA:true" -addext "keyUsage=critical, keyCertSign, cRLSign"
|
||||
# Need 1.2.840.113635.100.6.2.12?
|
||||
|
||||
openssl x509 -req -CAkey root_key.pem -CA root_certificate.pem -days 3650 \
|
||||
-in intermediate_certificate.csr -out intermediate_certificate.pem -CAcreateserial -copy_extensions copyall
|
||||
|
||||
openssl req -newkey rsa:2048 -nodes -keyout push_key.pem -out push_certificate.csr \
|
||||
-subj "/CN=courier.push.apple.com/O=Apple Inc./ST=California/C=US" \
|
||||
-addext "basicConstraints=critical, CA:false" \
|
||||
-addext "subjectAltName = DNS:courier.push.apple.com, DNS:courier2.push.apple.com" \
|
||||
-addext "keyUsage = critical, digitalSignature, keyEncipherment" \
|
||||
-addext "extendedKeyUsage = serverAuth"
|
||||
|
||||
openssl x509 -req -CAkey intermediate_key.pem -CA intermediate_certificate.pem -days 365 \
|
||||
-in push_certificate.csr -out push_certificate.pem -CAcreateserial -copy_extensions copyall
|
||||
|
||||
cat push_certificate.pem intermediate_certificate.pem root_certificate.pem > push_certificate_chain.pem
|
||||
|
||||
# Remove the leftover files
|
||||
rm intermediate_certificate.csr intermediate_certificate.pem intermediate_key.pem intermediate_certificate.srl
|
||||
rm push_certificate.csr push_certificate.pem
|
||||
rm root_certificate.pem root_key.pem root_certificate.srl
|
@ -1,5 +0,0 @@
|
||||
# Print out the additions to /etc/hosts for the proxy
|
||||
|
||||
for i in range(1, 50):
|
||||
print(f"127.0.0.1 {i}-courier.push.apple.com")
|
||||
print(f"127.0.0.1 {i}.courier.push.apple.com")
|
@ -1,197 +0,0 @@
|
||||
# TLS server to proxy APNs traffic
|
||||
|
||||
import socket
|
||||
import sys
|
||||
import threading
|
||||
|
||||
import tlslite
|
||||
|
||||
# setting path
|
||||
sys.path.append("../")
|
||||
sys.path.append("../../")
|
||||
|
||||
# APNs server to proxy traffic to
|
||||
APNS_HOST = "windows.courier.push.apple.com"
|
||||
APNS_PORT = 5223
|
||||
ALPN = b"apns-security-v3"
|
||||
# ALPN = b"apns-security-v2"
|
||||
# ALPN = b"apns-pack-v1"
|
||||
|
||||
global_cnt = 0
|
||||
|
||||
|
||||
# Connect to the APNs server
|
||||
def connect() -> tlslite.TLSConnection:
|
||||
# Connect to the APNs server
|
||||
sock = socket.create_connection((APNS_HOST, APNS_PORT))
|
||||
# Wrap the socket in TLS
|
||||
ssock = tlslite.TLSConnection(sock)
|
||||
# print("Handshaking with APNs")
|
||||
# Handshake with the server
|
||||
if ALPN == b"apns-security-v3":
|
||||
print("Using v3")
|
||||
ssock.handshakeClientCert(alpn=[ALPN])
|
||||
else:
|
||||
import albert
|
||||
|
||||
private_key, cert = albert.generate_push_cert()
|
||||
cert = tlslite.X509CertChain([tlslite.X509().parse(cert)])
|
||||
private_key = tlslite.parsePEMKey(private_key, private=True)
|
||||
# Handshake with the server
|
||||
ssock.handshakeClientCert(cert, private_key, alpn=[ALPN])
|
||||
|
||||
return ssock
|
||||
|
||||
|
||||
cert: str = None
|
||||
key: str = None
|
||||
|
||||
|
||||
import printer
|
||||
|
||||
import apns
|
||||
|
||||
outgoing_list = []
|
||||
incoming_list = []
|
||||
# last_outgoing = b""
|
||||
|
||||
|
||||
def proxy(conn1: tlslite.TLSConnection, conn2: tlslite.TLSConnection, prefix: str = ""):
|
||||
try:
|
||||
while True:
|
||||
# Read data from the first connection
|
||||
data = conn1.read()
|
||||
# print(prefix, "data: ", data)
|
||||
# If there is no data, the connection has closed
|
||||
if not data:
|
||||
print(prefix, "Connection closed due to no data")
|
||||
break
|
||||
|
||||
try:
|
||||
override = printer.pretty_print_payload(
|
||||
prefix, apns._deserialize_payload_from_buffer(data)
|
||||
)
|
||||
except Exception as e:
|
||||
print(e) # Can't crash the proxy over parsing errors
|
||||
if override is not None:
|
||||
data = override
|
||||
print("OVERRIDE: ", end="")
|
||||
printer.pretty_print_payload(
|
||||
prefix, apns._deserialize_payload_from_buffer(data)
|
||||
)
|
||||
|
||||
if "apsd -> APNs" in prefix:
|
||||
global outgoing_list
|
||||
outgoing_list.insert(0, data)
|
||||
if len(outgoing_list) > 100:
|
||||
outgoing_list.pop()
|
||||
elif "APNs -> apsd" in prefix:
|
||||
global incoming_list
|
||||
incoming_list.insert(0, data)
|
||||
if len(incoming_list) > 100:
|
||||
incoming_list.pop()
|
||||
|
||||
# print(prefix, data)
|
||||
# Write the data to the second connection
|
||||
conn2.write(data)
|
||||
except OSError as e:
|
||||
if e.errno == 9:
|
||||
print(prefix, "Connection closed due to OSError 9")
|
||||
pass # Probably a connection closed error
|
||||
else:
|
||||
raise e
|
||||
except tlslite.TLSAbruptCloseError as e:
|
||||
print(prefix, "Connection closed abruptly: ", e)
|
||||
print("Connection closed")
|
||||
# Close the connections
|
||||
conn1.close()
|
||||
conn2.close()
|
||||
|
||||
|
||||
repl_lock = False
|
||||
|
||||
|
||||
def repl(conn1: tlslite.TLSConnection, conn2: tlslite.TLSConnection):
|
||||
global repl_lock
|
||||
if repl_lock:
|
||||
print("REPL already running")
|
||||
return
|
||||
repl_lock = True
|
||||
import IPython
|
||||
|
||||
IPython.embed()
|
||||
|
||||
|
||||
def handle(conn: socket.socket):
|
||||
# Wrap the socket in TLS
|
||||
s_conn = tlslite.TLSConnection(conn)
|
||||
global cert, key
|
||||
chain = tlslite.X509CertChain()
|
||||
chain.parsePemList(cert)
|
||||
# print(chain)
|
||||
# cert = tlslite.X509CertChain([tlslite.X509().parse(cert)])
|
||||
key_parsed = tlslite.parsePEMKey(key, private=True)
|
||||
# print(key_parsed)
|
||||
s_conn.handshakeServer(
|
||||
certChain=chain, privateKey=key_parsed, reqCert=False, alpn=[ALPN]
|
||||
)
|
||||
|
||||
print("Handling connection")
|
||||
# Connect to the APNs server
|
||||
apns = connect()
|
||||
print("Connected to APNs")
|
||||
|
||||
threading.Thread(target=repl, args=(s_conn, apns)).start()
|
||||
|
||||
global global_cnt
|
||||
global_cnt += 1
|
||||
# Proxy data between the connections
|
||||
# Create a thread to proxy data from the APNs server to the client
|
||||
threading.Thread(
|
||||
target=proxy, args=(s_conn, apns, f"{global_cnt} apsd -> APNs")
|
||||
).start()
|
||||
# Just proxy data from the client to the APNs server in this thread
|
||||
proxy(apns, s_conn, f"{global_cnt} APNs -> apsd")
|
||||
|
||||
|
||||
def serve():
|
||||
# Create a socket to listen for connections
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
# Allow the socket to be reused
|
||||
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
sock.bind(("localhost", 5223))
|
||||
sock.listen()
|
||||
|
||||
print("Listening for connections...")
|
||||
|
||||
# Handshake with the client
|
||||
# Read the certificate and private key from the config
|
||||
with open("push_certificate_chain.pem", "r") as f:
|
||||
global cert
|
||||
cert = f.read()
|
||||
|
||||
# NEED TO USE OPENSSL, SEE CORETRUST CMD, MIMIC ENTRUST? OR AT LEAST SEE PUSHPROXY FOR EXTRACTION & REPLACEMENT
|
||||
with open("push_key.pem", "r") as f:
|
||||
global key
|
||||
key = f.read()
|
||||
|
||||
conns = []
|
||||
# Accept connections
|
||||
try:
|
||||
while True:
|
||||
# Accept a connection
|
||||
conn, addr = sock.accept()
|
||||
conns.append(conn)
|
||||
# Create a thread to handle the connection
|
||||
# handle(conn)
|
||||
thread = threading.Thread(target=handle, args=(conn,))
|
||||
thread.start()
|
||||
except KeyboardInterrupt:
|
||||
print("Keyboard interrupt, closing sockets")
|
||||
for conn in conns:
|
||||
conn.close()
|
||||
sock.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
serve()
|
@ -1,69 +0,0 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEBjCCAu6gAwIBAgIULoswBSnhsTUwq4AYoAdnf46gKa0wDQYJKoZIhvcNAQEL
|
||||
BQAwbTEnMCUGA1UEAwweQXBwbGUgU2VydmVyIEF1dGhlbnRpY2F0aW9uIENBMSAw
|
||||
HgYDVQQLDBdDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTETMBEGA1UECgwKQXBwbGUg
|
||||
SW5jLjELMAkGA1UEBhMCVVMwHhcNMjMwNDA3MDU0NjUwWhcNMjQwNDA2MDU0NjUw
|
||||
WjBYMR8wHQYDVQQDDBZjb3VyaWVyLnB1c2guYXBwbGUuY29tMRMwEQYDVQQKDApB
|
||||
cHBsZSBJbmMuMRMwEQYDVQQIDApDYWxpZm9ybmlhMQswCQYDVQQGEwJVUzCCASIw
|
||||
DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALMnRj3/NBqeFDE+ZYALffJRcNtn
|
||||
Z0vMZLbFwAv5k9ybXKUY5OMHM74urjHgbvSz1VU73dqI0x1yTwDp+5v4bySb7TCY
|
||||
jhXznMNfw6mpbSTHfucheNgtYQcFZlcgzMeDjrjFBphiuV8IekvzhwUhye5XtCC4
|
||||
9n0cNU3xPqYTwBSjmYD9Dabfn175RSJWnSiS+NzbBEopZQNB5exoWG/G+LvdIwXv
|
||||
6p3xWYZsTEqYKi1F/AXkwmzKOPiCeP6Nxa7GkzWE0BjfB2YLqOxPojm6ldI1a2Je
|
||||
Hi/OkzElsNVV4OmJlVa5c5B9ahfDREPsjUug9zskjIs5mowBVGUoxwt+Kn8CAwEA
|
||||
AaOBsjCBrzAMBgNVHRMBAf8EAjAAMDoGA1UdEQQzMDGCFmNvdXJpZXIucHVzaC5h
|
||||
cHBsZS5jb22CF2NvdXJpZXIyLnB1c2guYXBwbGUuY29tMA4GA1UdDwEB/wQEAwIF
|
||||
oDATBgNVHSUEDDAKBggrBgEFBQcDATAdBgNVHQ4EFgQUscg6bogx1eW0qxjpHvXd
|
||||
QtCE+HgwHwYDVR0jBBgwFoAULCD7ZnF0OUO5w+TLs+dO1H+LnOwwDQYJKoZIhvcN
|
||||
AQELBQADggEBAH3R+ErzeiRqp57hMzIw4FUntvt4JbN4PBnLekbaD582QJb6rccn
|
||||
21ZaTvVRw0rlwfWXxEZXG4iSqVe5ZUI55DqqnI5P5WU9q1h58nwJv1vBVp0a5/AQ
|
||||
FczbMfG+d3Fdh88Oic47WRHSg6gghrY2Vi326FOeKPyy+DnoY1Qx1zSyF9Zh7/Gz
|
||||
EA/V7hczn8io8Qyr5xapFuCtH+z5W0IvX3ConBfogbozHM/z1ZF0R4vmCl/YbWPY
|
||||
qF87w6w2Ov0tfjipOYLZhssRaf4U7Ppq2K8KRCmJ7ha73otsqJDZ2on9dMnVq6b+
|
||||
I5JFOB5GYNhivmmH+pkpP7WjAhyAZu1byuo=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDwDCCAqigAwIBAgIUZZzOEefzPPJIshhkspF84IGOf60wDQYJKoZIhvcNAQEL
|
||||
BQAwYjELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xJjAkBgNVBAsM
|
||||
HUFwcGxlIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRYwFAYDVQQDDA1BcHBsZSBS
|
||||
b290IENBMB4XDTIzMDQwNzA1NDY1MFoXDTMzMDQwNDA1NDY1MFowbTEnMCUGA1UE
|
||||
AwweQXBwbGUgU2VydmVyIEF1dGhlbnRpY2F0aW9uIENBMSAwHgYDVQQLDBdDZXJ0
|
||||
aWZpY2F0aW9uIEF1dGhvcml0eTETMBEGA1UECgwKQXBwbGUgSW5jLjELMAkGA1UE
|
||||
BhMCVVMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC34BqN3F/RUi2x
|
||||
XFKBTbYj+6z1mqifV+j4+PpXUawAb8/jiVomDKACOz+dtoUmKtZrGfKQT/8jmA90
|
||||
atZwG/l7UD+BV4R5CTFODgJ5w1qisf+fNU1XAEiakLVRTUGO2IEGCUx3BrX+OLbV
|
||||
8H1t9jqp/+bCE9mpkgiAfJHhs3/90JdKwx8Rw9rR0mvR8gpygicBw4yaKSSjgQaz
|
||||
rVW4Li++Mk3dLipIAkSNf/3zne3Rjurxz87GsQ17M4zZ8AkzyvYptUoQhNkzhlo6
|
||||
iBWw0yrYIce1XLKo+2ykvT56WG0hqcupNskHXD0/KfHfoGiJPSrZrU67eTBfIYFr
|
||||
X98+K/JXAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEG
|
||||
MB0GA1UdDgQWBBQsIPtmcXQ5Q7nD5Muz507Uf4uc7DAfBgNVHSMEGDAWgBSlIMsP
|
||||
LlzUQiFAbd4WdR9DQmaSYjANBgkqhkiG9w0BAQsFAAOCAQEAduyTbo18KSykgxH0
|
||||
433x/UKxp88AvQpweG6VNrXhFkCAKwrxRPPY9/m5OasHpJuL6/hZF5LMMzKXPNlX
|
||||
sFitEAoFmCkSDGCK/mItu43E2XPyfyVBg7vIi74lypmmVJEExqk4W0SSi6KSz6k4
|
||||
8H28ezyS7K2x0ESnm8H9raE4mNdwq3wLERO4dQCv+qBXdRVBf2gmjLB1t1zIOwzF
|
||||
GNGl+zkZ0cXN9XOpJwoQQBlsP8J6fyMFGXCcWtTRTJQKsTtebx2rY1ydDhWPmnhX
|
||||
tkSV1N1ktfbjqv8KuUaw15seHXkWjKqszXl6BbqFsdPqB13fBmigB+51PPSWhCPF
|
||||
Ocu/tg==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDtTCCAp2gAwIBAgIUM/+kpUeTCAkr5w84cRYALQ7XXt0wDQYJKoZIhvcNAQEL
|
||||
BQAwYjELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xJjAkBgNVBAsM
|
||||
HUFwcGxlIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRYwFAYDVQQDDA1BcHBsZSBS
|
||||
b290IENBMB4XDTIzMDQwNzA1NDY1MFoXDTMzMDQwNDA1NDY1MFowYjELMAkGA1UE
|
||||
BhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xJjAkBgNVBAsMHUFwcGxlIENlcnRp
|
||||
ZmljYXRpb24gQXV0aG9yaXR5MRYwFAYDVQQDDA1BcHBsZSBSb290IENBMIIBIjAN
|
||||
BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlCcyUv6XI39lXHzAkI03l7QDzVFl
|
||||
Jhw7FrG6eyjh1YXUEQcDzp0v5m5BamM0gazXYfYRIpzXbg5vV9bdZrTUF2DOMh0N
|
||||
xCCMuEUYy3PWOPILj3SvWiL+v3ReXgX1RT97UzOrZwSthacNGTbEUHnWAoSMbOrG
|
||||
r8nX/mfpYqMNcNjwzKrooLetSOD6RobM5BdRHqFu7H/UKLDhPK/qftSsgesroT1N
|
||||
l+rzgxkrNjiiYgB8KLTxYTQHMmFKDpjD0YteTtSZfZvkn03p5pnLAr33sewFuOiQ
|
||||
+pCDJh0J4zJhmpH3m67LzN6MSUpX6Bgqr2TOa+7vnFocU6slg9aKN0PkhwIDAQAB
|
||||
o2MwYTAdBgNVHQ4EFgQUpSDLDy5c1EIhQG3eFnUfQ0JmkmIwHwYDVR0jBBgwFoAU
|
||||
pSDLDy5c1EIhQG3eFnUfQ0JmkmIwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
|
||||
BAMCAYYwDQYJKoZIhvcNAQELBQADggEBAFXtllENQx2qu3LvYkJzzKyTA+DKzjOk
|
||||
j6vvqTnEPcjJRd01Iluy+EIvj5jFdJYTIeEoERRa0e0/kXY+SKMrhCe64w6U3SnO
|
||||
xIagSfTXR5eBmQqL05dDOcx7Us7JBPG+QHxN8bIqVYD5lqJDUcjYCGE6svGD02Tv
|
||||
VViG+ATDqPRuJcviuIjBLPlySW8L6uuU398aLZs9s/TuvRslPUASul8kxHdjjQk+
|
||||
ruwRE4vi1zGaORl9Mk7jeAEaYUHoF4v4aP9lQU/H1L0R/JIRFVBNJpoT+NY58Wah
|
||||
tpfvVem4xGMi0gsi71mvLFJF+M44iAeN2NqTidVJvuvUPha/tJCNd3s=
|
||||
-----END CERTIFICATE-----
|
@ -1,28 +0,0 @@
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCzJ0Y9/zQanhQx
|
||||
PmWAC33yUXDbZ2dLzGS2xcAL+ZPcm1ylGOTjBzO+Lq4x4G70s9VVO93aiNMdck8A
|
||||
6fub+G8km+0wmI4V85zDX8OpqW0kx37nIXjYLWEHBWZXIMzHg464xQaYYrlfCHpL
|
||||
84cFIcnuV7QguPZ9HDVN8T6mE8AUo5mA/Q2m359e+UUiVp0okvjc2wRKKWUDQeXs
|
||||
aFhvxvi73SMF7+qd8VmGbExKmCotRfwF5MJsyjj4gnj+jcWuxpM1hNAY3wdmC6js
|
||||
T6I5upXSNWtiXh4vzpMxJbDVVeDpiZVWuXOQfWoXw0RD7I1LoPc7JIyLOZqMAVRl
|
||||
KMcLfip/AgMBAAECggEAHygN5btLzvM99MByu32EJk+2jlmh08NUopqqhwqdBek5
|
||||
B/dX2wnGHGWW4tKyyTi7OasDLr3L5VubVL/cg6gFgDrj2ac2Uqf+09WEPC3cbuCI
|
||||
995Q21us+6EbRrzMEEiXWhfdyUOmFFpvlmTgTuqs6Rf0mhpAt8pflNIHQK+/oHb1
|
||||
tHxdHTkfaRhuePZkkXSXhXQBL7CBnbTzH1y55v6k78lyuY0mppHApPnZMk3r2R8z
|
||||
Q7ujt40XNAPwJwtkgIpaqoO5ltyNGY7V8RB//o3mrWCHHDG4barzEnhWF5wBqOVx
|
||||
Rn/P2NYfxZ06IY6IuAxYTZj9/iCYb7oWu+ia9UJ5lQKBgQC1PqIgOXT+GahmS/EJ
|
||||
WZsAroLfPVw9KgIrtNg8xtqy7lY5+WqBsgV2LIWYCLagz+rOrQgGdej0rv3EVjAH
|
||||
CC9f74e301QS/CCL32wVVNrTFnm9o+W8YcT7DYGhlShkrvH1yV4hdRRFmWE0OQn/
|
||||
uDHR3w1ezkVgFEeVbpul8uXYWwKBgQD9C9RTx9EmhwG/9LYFYAHi9E3pG7jx7/HB
|
||||
9YfhoHFeBHVHNKlu0AnHamvk9fLfg1v0YEmr6WKaY6jufs0Mq7cIwe5nJyk6PiSr
|
||||
gwvvHXSx3rvtEWlaU+teFySxVLoyoyswSAdXnjDgCcmlyLjhrfWRSVQ0Dua3dM4Y
|
||||
t7KWCurvrQKBgBREwdq6sjOsaOGvwm/aPnRQD8CpWwRYC1AZ9EivsEtsTeZD1z5T
|
||||
yzqQ9guvhcTsG4UrYodebkPWwfhP7yUKLEodPl83WeKZoYYi4TJtf981Vz328z0i
|
||||
7eVpeEK6rH699b8X2tgBANhbs7wkWNLpJWEkAZnFlYqOGgdDDFL9m+ibAoGBANIJ
|
||||
hZ5dFy/BlCQ+/O8NN8OoQRsilarMHYF/SeBwCmmdjbMi9Rgzuko5/YC43XXFVQhM
|
||||
/BmjyOKZolkECzNfPLgaMMaSbNgZSNrMFZXOf1Q8qwQmFaqSSaV51E0VJNBvO6QB
|
||||
xYZmme78cNIY/8zjVhfYI/pp+/t/Cqrdn3CHc2apAoGANPGDJNbmux4EQCszW8fB
|
||||
Zjv1K1wm0XRXo7UGTe56sTDyW+d6TUj2q5Jmk2+VegstpofoQKGBJJeM9+ssn6Xw
|
||||
f284Ug1e+90QG9luiRIirFYGagTEHCmxZZknio4EcduPbqxl+G+5xHJLSgvZReYp
|
||||
aeio1XrlmyIdivwT7bj6QqM=
|
||||
-----END PRIVATE KEY-----
|
@ -1,85 +0,0 @@
|
||||
import hashlib
|
||||
import plistlib
|
||||
import zlib
|
||||
from base64 import b64decode
|
||||
|
||||
import ids
|
||||
|
||||
with open("body.txt", "r") as f:
|
||||
BODY = f.read()
|
||||
|
||||
CERT = "MIIIKTCCBxGgAwIBAgIRAMRS4zTbARHt//////////8wDQYJKoZIhvcNAQEFBQAwbjELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xEjAQBgNVBAsMCUFwcGxlIElEUzEOMAwGA1UEDwwFZHMtaWQxJjAkBgNVBAMMHUFwcGxlIElEUyBEUy1JRCBSZWFsbSBDQSAtIFIxMB4XDTIzMDQxNDIwMjAxNVoXDTMzMDQxMTIwMjAxNVowXDELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xCTAHBgNVBAsMADEOMAwGA1UEDwwFZHMtaWQxHTAbBgoJkiaJk/IsZAEBDA1EOjEwMTA0MjI0OTc0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAteCyewpP4kvYA3Yb8T5Pt2S1Y8T1BRStnM4pZelzN+61sQvgFgbnO+5cs0swDKxexRpbHQ4Lo7FrVQhHry0AhxI4FAw7L4dilRH9XAvWvt+VrOiDY6V2ha+DQwpLjZpgLJ0Zgofh35CxGg5A/uUmeNhldGfo8DxdnR6t8FvE/qkkePNYZDMtk9X9xa3XcQypH89iG7AqIDnueTGReZ0IOPwOWb6AQ2HUQz2Ihz3PwfHxknQcYMMnm9iRFsDGeit/hByYTKvGzpcsd+2A5jRg5jeiPYi7olNOi2qaDEGaOa4vsJV3Z9aJpFPGTxXFDzSM+5sSP9XZtrfQ9WxExeW1FwIDAQABo4IE0jCCBM4wggRKBgNVHREEggRBMIIEPaAgBgoqhkiG92NkBgQEAxIAAwAAAAEAAAAAAAAGXgAAAACgggQXBgoqhkiG92NkBgQHA4IEBwBGVVNQAPM8GcTGrDQ7T/92lgt2SSgzrnmJhCZ8Ix6ahDnaNY+VMvm1sfFUziTt6fS18G9QDdNTHKpBuB4Ond4gCgIWBroGzmCvjFpRR8/dGkY+Ho80Q0wGLX/Au9ITmeWdk8xtkdaQ65n+ICVyfQXaMCI0J+kpC33hrytrMz/LPZ6c2tfKcykBR4Gp9RuwwUc1V+PsNSFeNqiLszBR1c95n4LLqoc4j2IC3vX+3QCfIJPc/zQqPaw6CWlKS/DJM2vGVhwlahGJyVZsc6bIKVftHoG4Jmzq25Itqg0V8PlJiqHAMhYdgCCy7s++L2NhkVecpKzyDW3CP0RPE2oJeinkxNxEA+V++4myoYBi4xsejhOLYMIOS21msqgmHKhi98xtkMUXD3tsLfymqwlL+EluurfzetV/baRqL88stFHakmlEspGuwaoTSsMisJ0B4HADw5digUH/tpUhFeaB2dfD+PRzqzp055V8JcpXoBN548oBA7IMbDjMH9TSdB0ZkexaB3v0TWpsTagxy0oNnSM3MdhoyGGFUB81vulo5YrO5kz/t3EC1BDuoVFBFIcLY2V5549UNyYksg7YxSzgVUHDclSpA/+TUWrT7SQS5dcvXXVktO0s05hxc7Itpb+FiGqhY+9zcrOGUKy1hKWEk4XAXYSIVORJdu7cXBgyGJ1RSubNl8+1dgZLhA45vSKyhhQWVPY0R8HBMb7Rg7NGjO29xsjD9jA3/03bxvM+X4vXpfO5sJtolyMxvlM4X3vIyEsHGtPLrwDjB0yuYJmqlTdQQZLWL8fi93XqKdt+xaCN8M+ATOUlBIhwr7SNNLIlZ38LsX5hwHUkGONuxiaU57kY9GhvRr7Tw0m8Hu2xjD1KkE0iAQEOcOkN6UcO9QbfCi1JQIV6vDpzuIuiNasQXOmnHYrkXYNf/JFZt4BAIFa1qoxHHLQ8aljz9vAyc7dIwEg6AIPOhcBHsb23GLFKVZ0Q2tQf9ci+r23iKFWhDP9RFEm/B6E7FcW5DIFifR9cYEBnRTtI2BlO49k3jGbHVj5L16VN8eY5HRSYXYpgpTmmDgIbD191nhtpMhKpKMrk8k8wJdL1YAYSVA2alC374y5hlm3p6F9ciYBoZBYUiP5npnt79HpmnQt2tiN41obyQ2SUShhjdm+Nhbr4qvYwafsBUHPDwxniArCs7Orek3gAjpP8Jq7QFMG/nlvN55STKKG01+4eTdggZkSeSbEAySY/b35/Ip98jhyICEDrsIPcv8UAnq1fgzDnRvvIJqEqZC9J0f+aylhNsWytLHECPIMBMM9lRNU2HWAI+pFI+J2QEWl8AkM8RAIniACVRbW0BbfWg3ZTb/NQgKWlkUQqT3xYHSsSgruxMB8GA1UdIwQYMBaAFITTbIZYMHdiREysh4kURPIcsTtjMB0GA1UdDgQWBBQea+P2ao26hYm1WZ9AcyBfo4VdlzAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwID+DAgBgNVHSUBAf8EFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDQYJKoZIhvcNAQEFBQADggEBADK7x9QZfg2vtK0IUYiI0OiTHgXYAlLuYjos6qLgSAtARoEzzRuA8sGlJ5JRYsWZqkUj2ERoOzq4S88heXlD+Dlj07RAMXsB0guxiwpsIzxZ7M/S2zOmRtlvCKxxdfKtg8ohNfbQfC/SmfhL+I9X7rm4hJOj+NkpgmhRfgPOWIbHHguaDhPIXmhgqLwAODpvYBBKjuMLSlkZZsOrpxfS79f5NcObnBKlTkmiKTb2NXeEZ8n6+qnaNJdN3moRN2Mp1IB5gEXD//ZT+9K1O4ge/r9p+TRInjyBuCwGo7y8bXVhShwjXvpqtAWmElwpQ9MMDt1BxAxGBk7Otc8f5G7ewkA="
|
||||
SIG = "AQEZs/u9Ptb8AmFpCv5XgzUsskvcleZDBxYTe5JOoshFCxpnByTwFA0mxplklHqT2rTEeF+Bu0Bo0vEPlh9KslmIoQLo6ej25bbtFN07dnHNwd84xzQzWBa4VHLQE1gNjSpcorppxpAUon/eFRu5yRxmwQVmqo+XmmxSuFCzxUaAZAPFPDna+8tvRwd0q3kuK9b0w/kuT16X1SL166fFNzmsQGcBqob9C9xX0VlYGqSd4K975gWdYsPo/kiY0ni4Q130oc6oAANr8ATN0bEeAO6/AfVM2aqHJTGlYlekBFWf8Tp8AJLUc4cm676346IEBST+l4rYGxYYStV2PEmp9cZ+"
|
||||
TOKEN = "5V7AY+ikHr4DiSfq1W2UBa71G3FLGkpUSKTrOLg81yk="
|
||||
NONCE = "AQAAAYeBb0XwKDMBW5PfAPM="
|
||||
|
||||
|
||||
def extract_hash(sig: bytes, cert: str) -> str:
|
||||
# sig = b64decode(SIG)[2:]
|
||||
sig = int.from_bytes(sig)
|
||||
|
||||
# Get the correct hash
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.x509 import load_pem_x509_certificate
|
||||
|
||||
# key, cert = load_keys()
|
||||
cert = "-----BEGIN CERTIFICATE-----\n" + cert + "\n-----END CERTIFICATE-----"
|
||||
cert = load_pem_x509_certificate(cert.encode(), default_backend())
|
||||
modulus = cert.public_key().public_numbers().n
|
||||
power = cert.public_key().public_numbers().e
|
||||
# print(hex(pow(sig, power, modulus)))
|
||||
|
||||
return pow(sig, power, modulus)
|
||||
# from cryptography import x509
|
||||
|
||||
# cert = "-----BEGIN CERTIFICATE-----\n" + cert + "\n-----END CERTIFICATE-----"
|
||||
# cert = x509.load_pem_x509_certificate(cert.encode())
|
||||
# pub = cert.public_key()
|
||||
# modulus = pub.public_numbers().n
|
||||
# # Get the power
|
||||
# power = pub.public_numbers().e
|
||||
# # Get the hash from the sig
|
||||
# sig = b64decode(sig)
|
||||
# sig = sig[2:]
|
||||
# sig = int.from_bytes(sig)
|
||||
# hash = pow(sig, power, modulus)
|
||||
|
||||
# print(hex(hash))
|
||||
|
||||
# return hash
|
||||
|
||||
|
||||
body = plistlib.dumps(plistlib.loads(BODY.encode()))
|
||||
body = zlib.compress(BODY.encode(), wbits=16 + zlib.MAX_WBITS)
|
||||
|
||||
p = ids._create_payload("id-register", "", TOKEN, body, b64decode(NONCE))[0]
|
||||
s = hashlib.sha1(p).digest()
|
||||
print(s.hex())
|
||||
# extract_hash(SIG, CERT)
|
||||
|
||||
# Loop through all POSSIBLE ranges
|
||||
# sig = b64decode(SIG)
|
||||
# print(sig[:2])
|
||||
# for i in range(len(sig)):
|
||||
# for j in range(i, len(sig)):
|
||||
# h = extract_hash(sig[i:j], CERT)
|
||||
# t = hex(h)
|
||||
# if 'ffffff' in t:
|
||||
# print(i, j, t)
|
||||
# sig = b64decode(SIG)[2:]
|
||||
# for i in range(len(sig)):
|
||||
# h = extract_hash(sig[:i], CERT)
|
||||
# t = hex(h)
|
||||
# if 'ffff' in t:
|
||||
# print(i, t)
|
||||
|
||||
# #print(hex(extract_hash(SIG, CERT)))
|
||||
|
||||
# CERT2 = "MIICnjCCAgegAwIBAgIKBAr40/DyW42YxjANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJVUzETMBEGA1UEChMKQXBwbGUgSW5jLjEVMBMGA1UECxMMQXBwbGUgaVBob25lMR8wHQYDVQQDExZBcHBsZSBpUGhvbmUgRGV2aWNlIENBMB4XDTIzMDQwNzE0MTUwNVoXDTI0MDQwNzE0MjAwNVowLzEtMCsGA1UEAxYkOUNCOTkzMTYtNkJERi00REYzLUFCRjUtNzcxNDU5MjFFQkY1MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCwsLiv8cifPdQZJQZtWvoD0WoTekSGwRj7KhxOi+AC1EUTdByWna8l7DDnixqww01FyA9pCBwottv0Xk9lOsrJrK05RXS+A7IieycejnUMdmRkgS7AsHIXOUSjtlkg2sfz5eYV9cqemTJnhdOvKtbqb9lYVN/8EehXD5JuogN+vwIDAQABo4GVMIGSMB8GA1UdIwQYMBaAFLL+ISNEhpVqedWBJo5zENinTI50MB0GA1UdDgQWBBQCl798/NQ3s5KywbJjoCjfjvWvmDAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwIFoDAgBgNVHSUBAf8EFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwEAYKKoZIhvdjZAYKBAQCBQAwDQYJKoZIhvcNAQEFBQADgYEAfBwkujrswCn+wtu0eKCa39Cv58YC3AhK24Aj2iwXbddHaj9B9ye6HDy1BHPG21LKNGqm4X/XEtJQ3ZY/hGr4eenmtYjOI4a/oi127mrSt7uZmoib9x5S6w68eCCKkO+DD2JqDbMr2ATUhVNUxMegrzYdju8LofYqXBKzkoZ0/nk="
|
||||
# SIG2 = "AQGOTyyRBWMxoGWqEUl5bZJXssL6bkK4acxIDOCJTUy0MMavNEwtFThZkqVpQFqjB7eXNBM6PxwPtmwHmf/5IWgIkBUthIwhGJV3pLUkhDHTVX5YjbUSF7Z4y+Y39BQ2hhYjfcz1bw2KH40MByt+bnk28Xv2XaKWYuBinH9PVajp3g=="
|
||||
|
||||
|
||||
SIG3 = "AQEZs/u9Ptb8AmFpCv5XgzUsskvcleZDBxYTe5JOoshFCxpnByTwFA0mxplklHqT2rTEeF+Bu0Bo0vEPlh9KslmIoQLo6ej25bbtFN07dnHNwd84xzQzWBa4VHLQE1gNjSpcorppxpAUon/eFRu5yRxmwQVmqo+XmmxSuFCzxUaAZAPFPDna+8tvRwd0q3kuK9b0w/kuT16X1SL166fFNzmsQGcBqob9C9xX0VlYGqSd4K975gWdYsPo/kiY0ni4Q130oc6oAANr8ATN0bEeAO6/AfVM2aqHJTGlYlekBFWf8Tp8AJLUc4cm676346IEBST+l4rYGxYYStV2PEmp9cZ+"
|
||||
CERT3 = "MIIIKTCCBxGgAwIBAgIRAMRS4zTbARHt//////////8wDQYJKoZIhvcNAQEFBQAwbjELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xEjAQBgNVBAsMCUFwcGxlIElEUzEOMAwGA1UEDwwFZHMtaWQxJjAkBgNVBAMMHUFwcGxlIElEUyBEUy1JRCBSZWFsbSBDQSAtIFIxMB4XDTIzMDQxNDIwMjAxNVoXDTMzMDQxMTIwMjAxNVowXDELMAkGA1UEBhMCVVMxEzARBgNVBAoMCkFwcGxlIEluYy4xCTAHBgNVBAsMADEOMAwGA1UEDwwFZHMtaWQxHTAbBgoJkiaJk/IsZAEBDA1EOjEwMTA0MjI0OTc0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAteCyewpP4kvYA3Yb8T5Pt2S1Y8T1BRStnM4pZelzN+61sQvgFgbnO+5cs0swDKxexRpbHQ4Lo7FrVQhHry0AhxI4FAw7L4dilRH9XAvWvt+VrOiDY6V2ha+DQwpLjZpgLJ0Zgofh35CxGg5A/uUmeNhldGfo8DxdnR6t8FvE/qkkePNYZDMtk9X9xa3XcQypH89iG7AqIDnueTGReZ0IOPwOWb6AQ2HUQz2Ihz3PwfHxknQcYMMnm9iRFsDGeit/hByYTKvGzpcsd+2A5jRg5jeiPYi7olNOi2qaDEGaOa4vsJV3Z9aJpFPGTxXFDzSM+5sSP9XZtrfQ9WxExeW1FwIDAQABo4IE0jCCBM4wggRKBgNVHREEggRBMIIEPaAgBgoqhkiG92NkBgQEAxIAAwAAAAEAAAAAAAAGXgAAAACgggQXBgoqhkiG92NkBgQHA4IEBwBGVVNQAPM8GcTGrDQ7T/92lgt2SSgzrnmJhCZ8Ix6ahDnaNY+VMvm1sfFUziTt6fS18G9QDdNTHKpBuB4Ond4gCgIWBroGzmCvjFpRR8/dGkY+Ho80Q0wGLX/Au9ITmeWdk8xtkdaQ65n+ICVyfQXaMCI0J+kpC33hrytrMz/LPZ6c2tfKcykBR4Gp9RuwwUc1V+PsNSFeNqiLszBR1c95n4LLqoc4j2IC3vX+3QCfIJPc/zQqPaw6CWlKS/DJM2vGVhwlahGJyVZsc6bIKVftHoG4Jmzq25Itqg0V8PlJiqHAMhYdgCCy7s++L2NhkVecpKzyDW3CP0RPE2oJeinkxNxEA+V++4myoYBi4xsejhOLYMIOS21msqgmHKhi98xtkMUXD3tsLfymqwlL+EluurfzetV/baRqL88stFHakmlEspGuwaoTSsMisJ0B4HADw5digUH/tpUhFeaB2dfD+PRzqzp055V8JcpXoBN548oBA7IMbDjMH9TSdB0ZkexaB3v0TWpsTagxy0oNnSM3MdhoyGGFUB81vulo5YrO5kz/t3EC1BDuoVFBFIcLY2V5549UNyYksg7YxSzgVUHDclSpA/+TUWrT7SQS5dcvXXVktO0s05hxc7Itpb+FiGqhY+9zcrOGUKy1hKWEk4XAXYSIVORJdu7cXBgyGJ1RSubNl8+1dgZLhA45vSKyhhQWVPY0R8HBMb7Rg7NGjO29xsjD9jA3/03bxvM+X4vXpfO5sJtolyMxvlM4X3vIyEsHGtPLrwDjB0yuYJmqlTdQQZLWL8fi93XqKdt+xaCN8M+ATOUlBIhwr7SNNLIlZ38LsX5hwHUkGONuxiaU57kY9GhvRr7Tw0m8Hu2xjD1KkE0iAQEOcOkN6UcO9QbfCi1JQIV6vDpzuIuiNasQXOmnHYrkXYNf/JFZt4BAIFa1qoxHHLQ8aljz9vAyc7dIwEg6AIPOhcBHsb23GLFKVZ0Q2tQf9ci+r23iKFWhDP9RFEm/B6E7FcW5DIFifR9cYEBnRTtI2BlO49k3jGbHVj5L16VN8eY5HRSYXYpgpTmmDgIbD191nhtpMhKpKMrk8k8wJdL1YAYSVA2alC374y5hlm3p6F9ciYBoZBYUiP5npnt79HpmnQt2tiN41obyQ2SUShhjdm+Nhbr4qvYwafsBUHPDwxniArCs7Orek3gAjpP8Jq7QFMG/nlvN55STKKG01+4eTdggZkSeSbEAySY/b35/Ip98jhyICEDrsIPcv8UAnq1fgzDnRvvIJqEqZC9J0f+aylhNsWytLHECPIMBMM9lRNU2HWAI+pFI+J2QEWl8AkM8RAIniACVRbW0BbfWg3ZTb/NQgKWlkUQqT3xYHSsSgruxMB8GA1UdIwQYMBaAFITTbIZYMHdiREysh4kURPIcsTtjMB0GA1UdDgQWBBQea+P2ao26hYm1WZ9AcyBfo4VdlzAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQEAwID+DAgBgNVHSUBAf8EFjAUBggrBgEFBQcDAQYIKwYBBQUHAwIwDQYJKoZIhvcNAQEFBQADggEBADK7x9QZfg2vtK0IUYiI0OiTHgXYAlLuYjos6qLgSAtARoEzzRuA8sGlJ5JRYsWZqkUj2ERoOzq4S88heXlD+Dlj07RAMXsB0guxiwpsIzxZ7M/S2zOmRtlvCKxxdfKtg8ohNfbQfC/SmfhL+I9X7rm4hJOj+NkpgmhRfgPOWIbHHguaDhPIXmhgqLwAODpvYBBKjuMLSlkZZsOrpxfS79f5NcObnBKlTkmiKTb2NXeEZ8n6+qnaNJdN3moRN2Mp1IB5gEXD//ZT+9K1O4ge/r9p+TRInjyBuCwGo7y8bXVhShwjXvpqtAWmElwpQ9MMDt1BxAxGBk7Otc8f5G7ewkA="
|
||||
|
||||
print(hex(extract_hash(b64decode(SIG)[2:], CERT)))
|
@ -1,55 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>iokit</key>
|
||||
<dict>
|
||||
<key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:MLB</key>
|
||||
<data>
|
||||
QzA3NjQ5NzAwN0ZHVjZYRFc=
|
||||
</data>
|
||||
<key>4D1EDE05-38C7-4A6A-9CC6-4BCCA8B38C14:ROM</key>
|
||||
<data>
|
||||
qGC26soK
|
||||
</data>
|
||||
<key>Fyp98tpgj</key>
|
||||
<data>
|
||||
N7rgxPQOa8kAoaUFzrgOoJI=
|
||||
</data>
|
||||
<key>Gq3489ugfi</key>
|
||||
<data>
|
||||
zxsZESjSTtPw9SyBsELYoEM=
|
||||
</data>
|
||||
<key>IOMACAddress</key>
|
||||
<data>
|
||||
qGC2H1Am
|
||||
</data>
|
||||
<key>IOPlatformSerialNumber</key>
|
||||
<string>C07SW06DG1J1</string>
|
||||
<key>IOPlatformUUID</key>
|
||||
<string>9CA28140-E736-59AB-9AC6-792E507D2184</string>
|
||||
<key>abKPld1EcMni</key>
|
||||
<data>
|
||||
mqJmiIGO40uj6irTvezB4K4=
|
||||
</data>
|
||||
<key>board-id</key>
|
||||
<data>
|
||||
TWFjLTM1QzVFMDgxMjBDN0VFQUYA
|
||||
</data>
|
||||
<key>kbjfrfpoJU</key>
|
||||
<data>
|
||||
RMD8Rrsnoq/nRCd5qG5pxvc=
|
||||
</data>
|
||||
<key>oycqAZloTNDm</key>
|
||||
<data>
|
||||
oygSjpinM48Z7vkz7Ttqaek=
|
||||
</data>
|
||||
<key>product-name</key>
|
||||
<data>
|
||||
TWFjbWluaTcsMQA=
|
||||
</data>
|
||||
</dict>
|
||||
<key>root_disk_uuid</key>
|
||||
<string>2CB26D1C-D73C-4F92-970B-F87341D4144E</string>
|
||||
</dict>
|
||||
</plist>
|
@ -1,356 +0,0 @@
|
||||
from io import BytesIO
|
||||
import unicorn
|
||||
from . import mparser as macholibre
|
||||
import logging
|
||||
logger = logging.getLogger("jelly")
|
||||
|
||||
STOP_ADDRESS = 0x00900000 # Used as a return address when calling functions
|
||||
|
||||
ARG_REGISTERS = [
|
||||
unicorn.x86_const.UC_X86_REG_RDI,
|
||||
unicorn.x86_const.UC_X86_REG_RSI,
|
||||
unicorn.x86_const.UC_X86_REG_RDX,
|
||||
unicorn.x86_const.UC_X86_REG_RCX,
|
||||
unicorn.x86_const.UC_X86_REG_R8,
|
||||
unicorn.x86_const.UC_X86_REG_R9
|
||||
]
|
||||
|
||||
class VirtualInstructions:
|
||||
def __init__(self, uc: unicorn.Uc):
|
||||
self.uc = uc
|
||||
|
||||
def push(self, value: int):
|
||||
self.uc.reg_write(unicorn.x86_const.UC_X86_REG_ESP, self.uc.reg_read(unicorn.x86_const.UC_X86_REG_ESP) - 8)
|
||||
self.uc.mem_write(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_ESP), value.to_bytes(8, byteorder='little'))
|
||||
|
||||
def pop(self) -> int:
|
||||
value = int.from_bytes(self.uc.mem_read(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_ESP), 8), byteorder='little')
|
||||
self.uc.reg_write(unicorn.x86_const.UC_X86_REG_ESP, self.uc.reg_read(unicorn.x86_const.UC_X86_REG_ESP) + 8)
|
||||
return value
|
||||
|
||||
def _set_args(self, args: list[int]):
|
||||
for i in range(len(args)):
|
||||
if i < 6:
|
||||
self.uc.reg_write(ARG_REGISTERS[i], args[i])
|
||||
else:
|
||||
self.push(args[i])
|
||||
|
||||
|
||||
def call(self, address: int, args: list[int] = []):
|
||||
logger.debug(f"Calling {hex(address)} with args {args}")
|
||||
self.push(STOP_ADDRESS)
|
||||
self._set_args(args)
|
||||
self.uc.emu_start(address, STOP_ADDRESS)
|
||||
return self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RAX)
|
||||
|
||||
|
||||
class Jelly:
|
||||
# Constants
|
||||
UC_ARCH = unicorn.UC_ARCH_X86
|
||||
UC_MODE = unicorn.UC_MODE_64
|
||||
|
||||
BINARY_BASE = 0x0
|
||||
|
||||
HOOK_BASE = 0xD00000
|
||||
HOOK_SIZE = 0x1000
|
||||
|
||||
STACK_BASE = 0x00300000
|
||||
STACK_SIZE = 0x00100000
|
||||
|
||||
HEAP_BASE = 0x00400000
|
||||
HEAP_SIZE = 0x00100000
|
||||
|
||||
STOP_ADDRESS = 0x00900000
|
||||
|
||||
# Public variables
|
||||
_hooks: dict[str, callable] = {}
|
||||
"""Symbol name to hook function mapping"""
|
||||
|
||||
instr: VirtualInstructions = None
|
||||
|
||||
uc: unicorn.Uc = None
|
||||
|
||||
# Private variables
|
||||
_binary: bytes = b""
|
||||
|
||||
_heap_use: int = 0
|
||||
|
||||
def __init__(self, binary: bytes):
|
||||
self._binary = binary
|
||||
|
||||
def setup(self, hooks: dict[str, callable] = {}):
|
||||
self._hooks = hooks
|
||||
self._setup_unicorn()
|
||||
self.instr = VirtualInstructions(self.uc)
|
||||
self._setup_hooks()
|
||||
self._map_binary()
|
||||
self._setup_stack()
|
||||
self._setup_heap()
|
||||
self._setup_stop()
|
||||
|
||||
|
||||
def _setup_unicorn(self):
|
||||
self.uc = unicorn.Uc(self.UC_ARCH, self.UC_MODE)
|
||||
|
||||
def _setup_stack(self):
|
||||
self.uc.mem_map(self.STACK_BASE, self.STACK_SIZE)
|
||||
self.uc.mem_write(self.STACK_BASE, b"\x00" * self.STACK_SIZE)
|
||||
|
||||
self.uc.reg_write(unicorn.x86_const.UC_X86_REG_ESP, self.STACK_BASE + self.STACK_SIZE)
|
||||
self.uc.reg_write(unicorn.x86_const.UC_X86_REG_EBP, self.STACK_BASE + self.STACK_SIZE)
|
||||
|
||||
def _setup_heap(self):
|
||||
self.uc.mem_map(self.HEAP_BASE, self.HEAP_SIZE)
|
||||
self.uc.mem_write(self.HEAP_BASE, b"\x00" * self.HEAP_SIZE)
|
||||
|
||||
def debug_registers(self):
|
||||
logger.debug(f"""
|
||||
RAX: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RAX))}
|
||||
RBX: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RBX))}
|
||||
RCX: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RCX))}
|
||||
RDX: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RDX))}
|
||||
RSI: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RSI))}
|
||||
RDI: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RDI))}
|
||||
RSP: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RSP))}
|
||||
RBP: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RBP))}
|
||||
RIP: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_RIP))}
|
||||
R8: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_R8))}
|
||||
R9: {hex(self.uc.reg_read(unicorn.x86_const.UC_X86_REG_R9))}
|
||||
""")
|
||||
def wrap_hook(self, func: callable) -> callable:
|
||||
# Get the number of arguments the function takes
|
||||
arg_count = func.__code__.co_argcount
|
||||
#print(f"Wrapping {arg_count} argument function {func.__name__}")
|
||||
# Create a wrapper function that reads the arguments from registers and the stack
|
||||
def wrapper(self: 'Jelly'):
|
||||
args = []
|
||||
for i in range(1, arg_count):
|
||||
if i < 6:
|
||||
args.append(self.uc.reg_read(ARG_REGISTERS[i-1]))
|
||||
else:
|
||||
args.append(self.instr.pop())
|
||||
#print(ARG_REGISTERS[1])
|
||||
#self.debug_registers()
|
||||
logger.debug(f"calling {func.__name__}")
|
||||
if args != []:
|
||||
logger.debug(f" with args: {args}")
|
||||
ret = func(self, *args)
|
||||
if ret is not None:
|
||||
self.uc.reg_write(unicorn.x86_const.UC_X86_REG_RAX, ret)
|
||||
return
|
||||
return wrapper
|
||||
|
||||
|
||||
def malloc(self, size: int) -> int:
|
||||
# Very naive malloc implementation
|
||||
addr = self.HEAP_BASE + self._heap_use
|
||||
self._heap_use += size
|
||||
return addr
|
||||
|
||||
def _setup_stop(self):
|
||||
self.uc.mem_map(self.STOP_ADDRESS, 0x1000)
|
||||
self.uc.mem_write(self.STOP_ADDRESS, b"\xc3" * 0x1000)
|
||||
|
||||
def _resolve_hook(uc: unicorn.Uc, address: int, size: int, self: 'Jelly'):
|
||||
for name, addr in self._resolved_hooks.items():
|
||||
if addr == address:
|
||||
logger.debug(f"{name}: ")
|
||||
self._hooks[name](self)
|
||||
|
||||
def _setup_hooks(self):
|
||||
# Wrap all hooks
|
||||
for name, func in self._hooks.items():
|
||||
self._hooks[name] = self.wrap_hook(func)
|
||||
|
||||
self.uc.mem_map(self.HOOK_BASE, self.HOOK_SIZE)
|
||||
# Write 'ret' instruction to all hook addresses
|
||||
self.uc.mem_write(self.HOOK_BASE, b"\xc3" * self.HOOK_SIZE)
|
||||
# Assign address in hook space to each hook
|
||||
current_address = self.HOOK_BASE
|
||||
self._resolved_hooks = {}
|
||||
for hook in self._hooks:
|
||||
self._resolved_hooks[hook] = current_address
|
||||
current_address += 1
|
||||
# Add unicorn instruction hook to entire hook space
|
||||
self.uc.hook_add(unicorn.UC_HOOK_CODE, Jelly._resolve_hook, begin=self.HOOK_BASE, end=self.HOOK_BASE + self.HOOK_SIZE, user_data=self)
|
||||
|
||||
def _map_binary(self):
|
||||
self.uc.mem_map(self.BINARY_BASE, round_to_page_size(len(self._binary), self.uc.ctl_get_page_size()))
|
||||
self.uc.mem_write(self.BINARY_BASE, self._binary)
|
||||
|
||||
# Unmap the first page so we can catch NULL derefs
|
||||
self.uc.mem_unmap(0x0, self.uc.ctl_get_page_size())
|
||||
|
||||
# Parse the binary so we can process binds
|
||||
p = macholibre.Parser(self._binary)
|
||||
p.parse()
|
||||
|
||||
for seg in p.segments:
|
||||
for section in seg['sects']:
|
||||
if section['type'] == 'LAZY_SYMBOL_POINTERS' or section['type'] == 'NON_LAZY_SYMBOL_POINTERS':
|
||||
self._parse_lazy_binds(self.uc, section['r1'], section, self._binary[p.dysymtab['indirectsymoff']:], self._binary[p.symtab['stroff']:], self._binary[p.symtab['symoff']:])
|
||||
|
||||
self._parse_binds(self.uc, self._binary[p.dyld_info['bind_off']:p.dyld_info['bind_off']+p.dyld_info['bind_size']], p.segments)
|
||||
|
||||
def _do_bind(self, mu: unicorn.Uc, type, location, name):
|
||||
if type == 1: # BIND_TYPE_POINTER
|
||||
if name in self._hooks:
|
||||
#print(f"Hooking {name} at {hex(location)}")
|
||||
mu.mem_write(location, self._resolved_hooks[name].to_bytes(8, byteorder='little'))
|
||||
else:
|
||||
#print(f"Unknown symbol {name}")
|
||||
pass
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown bind type {type}")
|
||||
|
||||
def _parse_lazy_binds(self, mu: unicorn.Uc, indirect_offset, section, dysimtab, strtab, symtab):
|
||||
logger.debug(f"Doing binds for {section['name']}")
|
||||
for i in range(0, int(section['size']/8)):
|
||||
# Parse into proper list?
|
||||
dysym = dysimtab[(indirect_offset + i)*4:(indirect_offset + i)*4+4]
|
||||
dysym = int.from_bytes(dysym, 'little')
|
||||
index = dysym & 0x3fffffff
|
||||
|
||||
# Proper list too?
|
||||
symbol = symtab[index * 16:(index * 16) + 4]
|
||||
strx = int.from_bytes(symbol, 'little')
|
||||
|
||||
name = c_string(strtab, strx) # Remove _ at beginning
|
||||
#print(f"Lazy bind for {hex(section['offset'] + (i * 8))} : {name}")
|
||||
self._do_bind(mu, 1, section['offset'] + (i * 8), name)
|
||||
|
||||
def _parse_binds(self, mu: unicorn.Uc, binds: bytes, segments):
|
||||
blen = len(binds)
|
||||
binds: BytesIO = BytesIO(binds)
|
||||
|
||||
ordinal = 0
|
||||
symbolName = ''
|
||||
type = BIND_TYPE_POINTER
|
||||
addend = 0
|
||||
segIndex = 0
|
||||
segOffset = 0
|
||||
|
||||
while binds.tell() < blen:
|
||||
current = binds.read(1)[0]
|
||||
opcode = current & BIND_OPCODE_MASK
|
||||
immediate = current & BIND_IMMEDIATE_MASK
|
||||
|
||||
#print(f"{hex(offset)}: {hex(opcode)} {hex(immediate)}")
|
||||
|
||||
if opcode == BIND_OPCODE_DONE:
|
||||
logger.debug("BIND_OPCODE_DONE")
|
||||
break
|
||||
elif opcode == BIND_OPCODE_SET_DYLIB_ORDINAL_IMM:
|
||||
ordinal = immediate
|
||||
elif opcode == BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB:
|
||||
#ordinal = uLEB128(&p);
|
||||
ordinal = decodeULEB128(binds)
|
||||
#raise NotImplementedError("BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB")
|
||||
elif opcode == BIND_OPCODE_SET_DYLIB_SPECIAL_IMM:
|
||||
if (immediate == 0):
|
||||
ordinal = 0
|
||||
else:
|
||||
ordinal = BIND_OPCODE_MASK | immediate
|
||||
elif opcode == BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM:
|
||||
# Parse string until null terminator
|
||||
symbolName = ''
|
||||
while True:
|
||||
b = binds.read(1)[0]
|
||||
if b == 0:
|
||||
break
|
||||
symbolName += chr(b)
|
||||
#while binds[offset] != 0:
|
||||
# symbolName += chr(binds[offset])
|
||||
# offset += 1
|
||||
#offset += 1
|
||||
#print(f"Symbol name: {symbolName}")
|
||||
elif opcode == BIND_OPCODE_SET_TYPE_IMM:
|
||||
type = immediate
|
||||
elif opcode == BIND_OPCODE_SET_ADDEND_SLEB:
|
||||
#addend = sLEB128(&p);
|
||||
raise NotImplementedError("BIND_OPCODE_SET_ADDEND_SLEB")
|
||||
elif opcode == BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB:
|
||||
segIndex = immediate
|
||||
segOffset = decodeULEB128(binds)
|
||||
#raise NotImplementedError("BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB")
|
||||
elif opcode == BIND_OPCODE_ADD_ADDR_ULEB:
|
||||
segOffset += decodeULEB128(binds)
|
||||
#segOffset += uLEB128(&p);
|
||||
#raise NotImplementedError("BIND_OPCODE_ADD_ADDR_ULEB")
|
||||
elif opcode == BIND_OPCODE_DO_BIND:
|
||||
self._do_bind(mu, type, segments[segIndex]['offset'] + segOffset, symbolName)
|
||||
segOffset += 8
|
||||
elif opcode == BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB:
|
||||
self._do_bind(mu, type, segments[segIndex]['offset'] + segOffset, symbolName)
|
||||
segOffset += decodeULEB128(binds) + 8
|
||||
#bind(type, (cast(void**) &segments[segIndex][segOffset]), symbolName, addend, generateFallback);
|
||||
#segOffset += uLEB128(&p) + size_t.sizeof;
|
||||
#raise NotImplementedError("BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB")
|
||||
elif opcode == BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED:
|
||||
#bind(type, (cast(void**) &segments[segIndex][segOffset]), symbolName, addend, generateFallback);
|
||||
self._do_bind(mu, type, segments[segIndex]['offset'] + segOffset, symbolName)
|
||||
segOffset += immediate * 8 + 8
|
||||
elif opcode == BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB:
|
||||
count = decodeULEB128(binds)
|
||||
skip = decodeULEB128(binds)
|
||||
for i in range(count):
|
||||
self._do_bind(mu, type, segments[segIndex]['offset'] + segOffset, symbolName)
|
||||
segOffset += skip + 8
|
||||
# uint64_t count = uLEB128(&p);
|
||||
# uint64_t skip = uLEB128(&p);
|
||||
# for (uint64_t i = 0; i < count; i++) {
|
||||
# bind(type, (cast(void**) &segments[segIndex][segOffset]), symbolName, addend, generateFallback);
|
||||
# segOffset += skip + size_t.sizeof;
|
||||
# }
|
||||
#raise NotImplementedError("BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB")
|
||||
else:
|
||||
logger.error(f"Unknown bind opcode {opcode}")
|
||||
|
||||
# Mach-O defines
|
||||
BIND_OPCODE_DONE = 0x00
|
||||
BIND_OPCODE_SET_DYLIB_ORDINAL_IMM = 0x10
|
||||
BIND_OPCODE_SET_DYLIB_ORDINAL_ULEB = 0x20
|
||||
BIND_OPCODE_SET_DYLIB_SPECIAL_IMM = 0x30
|
||||
BIND_OPCODE_SET_SYMBOL_TRAILING_FLAGS_IMM = 0x40
|
||||
BIND_OPCODE_SET_TYPE_IMM = 0x50
|
||||
BIND_OPCODE_SET_ADDEND_SLEB = 0x60
|
||||
BIND_OPCODE_SET_SEGMENT_AND_OFFSET_ULEB = 0x70
|
||||
BIND_OPCODE_ADD_ADDR_ULEB = 0x80
|
||||
BIND_OPCODE_DO_BIND = 0x90
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_ULEB = 0xA0
|
||||
BIND_OPCODE_DO_BIND_ADD_ADDR_IMM_SCALED = 0xB0
|
||||
BIND_OPCODE_DO_BIND_ULEB_TIMES_SKIPPING_ULEB = 0xC0
|
||||
BIND_OPCODE_THREADED = 0xD0
|
||||
|
||||
BIND_TYPE_POINTER = 1
|
||||
|
||||
BIND_OPCODE_MASK = 0xF0
|
||||
BIND_IMMEDIATE_MASK = 0x0F
|
||||
|
||||
# Helper functions
|
||||
def round_to_page_size(size: int, page_size: int) -> int:
|
||||
return (size + page_size - 1) & ~(page_size - 1)
|
||||
|
||||
def decodeULEB128(bytes: BytesIO) -> int:
|
||||
result = 0
|
||||
shift = 0
|
||||
while True:
|
||||
b = bytes.read(1)[0]
|
||||
result |= (b & 0x7F) << shift
|
||||
if (b & 0x80) == 0:
|
||||
break
|
||||
shift += 7
|
||||
return result
|
||||
|
||||
def c_string(bytes, start: int = 0) -> str:
|
||||
out = ''
|
||||
i = start
|
||||
|
||||
while True:
|
||||
if i > len(bytes) or bytes[i] == 0:
|
||||
break
|
||||
out += chr(bytes[i])
|
||||
#print(start)
|
||||
#print(chr(bytes[i]))
|
||||
i += 1
|
||||
return out
|
2301
emulated/mparser.py
2301
emulated/mparser.py
File diff suppressed because it is too large
Load Diff
422
emulated/nac.py
422
emulated/nac.py
@ -1,422 +0,0 @@
|
||||
import hashlib
|
||||
from . import mparser as macholibre
|
||||
from .jelly import Jelly
|
||||
import plistlib
|
||||
import logging
|
||||
logger = logging.getLogger("nac")
|
||||
|
||||
BINARY_HASH = "e1181ccad82e6629d52c6a006645ad87ee59bd13"
|
||||
BINARY_PATH = "emulated/IMDAppleServices"
|
||||
BINARY_URL = "https://github.com/JJTech0130/nacserver/raw/main/IMDAppleServices"
|
||||
|
||||
FAKE_DATA = plistlib.load(open("emulated/data.plist", "rb"))
|
||||
|
||||
def load_binary() -> bytes:
|
||||
# Open the file at BINARY_PATH, check the hash, and return the binary
|
||||
# If the hash doesn't match, raise an exception
|
||||
# Download the binary if it doesn't exist
|
||||
import os, requests
|
||||
if not os.path.exists(BINARY_PATH):
|
||||
logger.info("Downloading IMDAppleServices")
|
||||
resp = requests.get(BINARY_URL)
|
||||
b = resp.content
|
||||
# Save the binary
|
||||
open(BINARY_PATH, "wb").write(b)
|
||||
else:
|
||||
logger.debug("Using already downloaded IMDAppleServices")
|
||||
b = open(BINARY_PATH, "rb").read()
|
||||
if hashlib.sha1(b).hexdigest() != BINARY_HASH:
|
||||
raise Exception("Hashes don't match")
|
||||
return b
|
||||
|
||||
|
||||
def get_x64_slice(binary: bytes) -> bytes:
|
||||
# Get the x64 slice of the binary
|
||||
# If there is no x64 slice, raise an exception
|
||||
p = macholibre.Parser(binary)
|
||||
# Parse the binary to find the x64 slice
|
||||
off, size = p.u_get_offset(cpu_type="X86_64")
|
||||
return binary[off : off + size]
|
||||
|
||||
|
||||
def nac_init(j: Jelly, cert: bytes):
|
||||
# Allocate memory for the cert
|
||||
cert_addr = j.malloc(len(cert))
|
||||
j.uc.mem_write(cert_addr, cert)
|
||||
|
||||
# Allocate memory for the outputs
|
||||
out_validation_ctx_addr = j.malloc(8)
|
||||
out_request_bytes_addr = j.malloc(8)
|
||||
out_request_len_addr = j.malloc(8)
|
||||
|
||||
# Call the function
|
||||
ret = j.instr.call(
|
||||
0xB1DB0,
|
||||
[
|
||||
cert_addr,
|
||||
len(cert),
|
||||
out_validation_ctx_addr,
|
||||
out_request_bytes_addr,
|
||||
out_request_len_addr,
|
||||
],
|
||||
)
|
||||
|
||||
#print(hex(ret))
|
||||
|
||||
if ret != 0:
|
||||
n = ret & 0xffffffff
|
||||
n = (n ^ 0x80000000) - 0x80000000
|
||||
raise Exception(f"Error calling nac_init: {n}")
|
||||
|
||||
# Get the outputs
|
||||
validation_ctx_addr = j.uc.mem_read(out_validation_ctx_addr, 8)
|
||||
request_bytes_addr = j.uc.mem_read(out_request_bytes_addr, 8)
|
||||
request_len = j.uc.mem_read(out_request_len_addr, 8)
|
||||
|
||||
request_bytes_addr = int.from_bytes(request_bytes_addr, 'little')
|
||||
request_len = int.from_bytes(request_len, 'little')
|
||||
|
||||
logger.debug(f"Request @ {hex(request_bytes_addr)} : {hex(request_len)}")
|
||||
|
||||
request = j.uc.mem_read(request_bytes_addr, request_len)
|
||||
|
||||
validation_ctx_addr = int.from_bytes(validation_ctx_addr, 'little')
|
||||
return validation_ctx_addr, request
|
||||
|
||||
def nac_key_establishment(j: Jelly, validation_ctx: int, response: bytes):
|
||||
response_addr = j.malloc(len(response))
|
||||
j.uc.mem_write(response_addr, response)
|
||||
|
||||
ret = j.instr.call(
|
||||
0xB1DD0,
|
||||
[
|
||||
validation_ctx,
|
||||
response_addr,
|
||||
len(response),
|
||||
],
|
||||
)
|
||||
|
||||
if ret != 0:
|
||||
n = ret & 0xffffffff
|
||||
n = (n ^ 0x80000000) - 0x80000000
|
||||
raise Exception(f"Error calling nac_submit: {n}")
|
||||
|
||||
def nac_sign(j: Jelly, validation_ctx: int):
|
||||
#void *validation_ctx, void *unk_bytes, int unk_len,
|
||||
# void **validation_data, int *validation_data_len
|
||||
|
||||
out_validation_data_addr = j.malloc(8)
|
||||
out_validation_data_len_addr = j.malloc(8)
|
||||
|
||||
ret = j.instr.call(
|
||||
0xB1DF0,
|
||||
[
|
||||
validation_ctx,
|
||||
0,
|
||||
0,
|
||||
out_validation_data_addr,
|
||||
out_validation_data_len_addr,
|
||||
],
|
||||
)
|
||||
|
||||
if ret != 0:
|
||||
n = ret & 0xffffffff
|
||||
n = (n ^ 0x80000000) - 0x80000000
|
||||
raise Exception(f"Error calling nac_generate: {n}")
|
||||
|
||||
validation_data_addr = j.uc.mem_read(out_validation_data_addr, 8)
|
||||
validation_data_len = j.uc.mem_read(out_validation_data_len_addr, 8)
|
||||
|
||||
validation_data_addr = int.from_bytes(validation_data_addr, 'little')
|
||||
validation_data_len = int.from_bytes(validation_data_len, 'little')
|
||||
|
||||
validation_data = j.uc.mem_read(validation_data_addr, validation_data_len)
|
||||
|
||||
return validation_data
|
||||
|
||||
|
||||
def hook_code(uc, address: int, size: int, user_data):
|
||||
logger.debug(">>> Tracing instruction at 0x%x, instruction size = 0x%x" % (address, size))
|
||||
|
||||
|
||||
def malloc(j: Jelly, len: int) -> int:
|
||||
# Hook malloc
|
||||
# Return the address of the allocated memory
|
||||
#print("malloc hook called with len = %d" % len)
|
||||
return j.malloc(len)
|
||||
|
||||
|
||||
def memset_chk(j: Jelly, dest: int, c: int, len: int, destlen: int):
|
||||
logger.debug(
|
||||
"memset_chk called with dest = 0x%x, c = 0x%x, len = 0x%x, destlen = 0x%x"
|
||||
% (dest, c, len, destlen)
|
||||
)
|
||||
j.uc.mem_write(dest, bytes([c]) * len)
|
||||
return 0
|
||||
|
||||
|
||||
def sysctlbyname(j: Jelly):
|
||||
return 0 # The output is not checked
|
||||
|
||||
|
||||
def memcpy(j: Jelly, dest: int, src: int, len: int):
|
||||
logger.debug("memcpy called with dest = 0x%x, src = 0x%x, len = 0x%x" % (dest, src, len))
|
||||
orig = j.uc.mem_read(src, len)
|
||||
j.uc.mem_write(dest, bytes(orig))
|
||||
return 0
|
||||
|
||||
CF_OBJECTS = []
|
||||
|
||||
# struct __builtin_CFString {
|
||||
# int *isa; // point to __CFConstantStringClassReference
|
||||
# int flags;
|
||||
# const char *str;
|
||||
# long length;
|
||||
# }
|
||||
import struct
|
||||
|
||||
def _parse_cfstr_ptr(j: Jelly, ptr: int) -> str:
|
||||
size = struct.calcsize("<QQQQ")
|
||||
data = j.uc.mem_read(ptr, size)
|
||||
isa, flags, str_ptr, length = struct.unpack("<QQQQ", data)
|
||||
str_data = j.uc.mem_read(str_ptr, length)
|
||||
return str_data.decode("utf-8")
|
||||
|
||||
def _parse_cstr_ptr(j: Jelly, ptr: int) -> str:
|
||||
data = j.uc.mem_read(ptr, 256) # Lazy way to do it
|
||||
return data.split(b"\x00")[0].decode("utf-8")
|
||||
|
||||
def IORegistryEntryCreateCFProperty(j: Jelly, entry: int, key: int, allocator: int, options: int):
|
||||
key_str = _parse_cfstr_ptr(j, key)
|
||||
if key_str in FAKE_DATA["iokit"]:
|
||||
fake = FAKE_DATA["iokit"][key_str]
|
||||
logger.debug(f"IOKit Entry: {key_str} -> {fake}")
|
||||
# Return the index of the fake data in CF_OBJECTS
|
||||
CF_OBJECTS.append(fake)
|
||||
return len(CF_OBJECTS) # NOTE: We will have to subtract 1 from this later, can't return 0 here since that means NULL
|
||||
else:
|
||||
logger.debug(f"IOKit Entry: {key_str} -> None")
|
||||
return 0
|
||||
|
||||
def CFGetTypeID(j: Jelly, obj: int):
|
||||
obj = CF_OBJECTS[obj - 1]
|
||||
if isinstance(obj, bytes):
|
||||
return 1
|
||||
elif isinstance(obj, str):
|
||||
return 2
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def CFDataGetLength(j: Jelly, obj: int):
|
||||
obj = CF_OBJECTS[obj - 1]
|
||||
if isinstance(obj, bytes):
|
||||
return len(obj)
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def CFDataGetBytes(j: Jelly, obj: int, range_start: int, range_end: int, buf: int):
|
||||
obj = CF_OBJECTS[obj - 1]
|
||||
if isinstance(obj, bytes):
|
||||
data = obj[range_start:range_end]
|
||||
j.uc.mem_write(buf, data)
|
||||
logger.debug(f"CFDataGetBytes: {hex(range_start)}-{hex(range_end)} -> {hex(buf)}")
|
||||
return len(data)
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def CFDictionaryCreateMutable(j: Jelly) -> int:
|
||||
CF_OBJECTS.append({})
|
||||
return len(CF_OBJECTS)
|
||||
|
||||
def maybe_object_maybe_string(j: Jelly, obj: int):
|
||||
# If it's already a str
|
||||
if isinstance(obj, str):
|
||||
return obj
|
||||
elif obj > len(CF_OBJECTS):
|
||||
return obj
|
||||
#raise Exception(f"WTF: {hex(obj)}")
|
||||
# This is probably a CFString
|
||||
# return _parse_cfstr_ptr(j, obj)
|
||||
else:
|
||||
return CF_OBJECTS[obj - 1]
|
||||
|
||||
def CFDictionaryGetValue(j: Jelly, d: int, key: int) -> int:
|
||||
logger.debug(f"CFDictionaryGetValue: {d} {hex(key)}")
|
||||
d = CF_OBJECTS[d - 1]
|
||||
if key == 0xc3c3c3c3c3c3c3c3:
|
||||
key = "DADiskDescriptionVolumeUUIDKey" # Weirdness, this is a hack
|
||||
key = maybe_object_maybe_string(j, key)
|
||||
if isinstance(d, dict):
|
||||
if key in d:
|
||||
val = d[key]
|
||||
logger.debug(f"CFDictionaryGetValue: {key} -> {val}")
|
||||
CF_OBJECTS.append(val)
|
||||
return len(CF_OBJECTS)
|
||||
else:
|
||||
raise Exception("Key not found")
|
||||
return 0
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def CFDictionarySetValue(j: Jelly, d: int, key: int, val: int):
|
||||
d = CF_OBJECTS[d - 1]
|
||||
key = maybe_object_maybe_string(j, key)
|
||||
val = maybe_object_maybe_string(j, val)
|
||||
if isinstance(d, dict):
|
||||
d[key] = val
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def DADiskCopyDescription(j: Jelly) -> int:
|
||||
description = CFDictionaryCreateMutable(j)
|
||||
CFDictionarySetValue(j, description, "DADiskDescriptionVolumeUUIDKey", FAKE_DATA["root_disk_uuid"])
|
||||
return description
|
||||
|
||||
def CFStringCreate(j: Jelly, string: str) -> int:
|
||||
CF_OBJECTS.append(string)
|
||||
return len(CF_OBJECTS)
|
||||
|
||||
def CFStringGetLength(j: Jelly, string: int) -> int:
|
||||
string = CF_OBJECTS[string - 1]
|
||||
if isinstance(string, str):
|
||||
return len(string)
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def CFStringGetCString(j: Jelly, string: int, buf: int, buf_len: int, encoding: int) -> int:
|
||||
string = CF_OBJECTS[string - 1]
|
||||
if isinstance(string, str):
|
||||
data = string.encode("utf-8")
|
||||
j.uc.mem_write(buf, data)
|
||||
logger.debug(f"CFStringGetCString: {string} -> {hex(buf)}")
|
||||
return len(data)
|
||||
else:
|
||||
raise Exception("Unknown CF object type")
|
||||
|
||||
def IOServiceMatching(j: Jelly, name: int) -> int:
|
||||
# Read the raw c string pointed to by name
|
||||
name = _parse_cstr_ptr(j, name)
|
||||
logger.debug(f"IOServiceMatching: {name}")
|
||||
# Create a CFString from the name
|
||||
name = CFStringCreate(j, name)
|
||||
# Create a dictionary
|
||||
d = CFDictionaryCreateMutable(j)
|
||||
# Set the key "IOProviderClass" to the name
|
||||
CFDictionarySetValue(j, d, "IOProviderClass", name)
|
||||
# Return the dictionary
|
||||
return d
|
||||
|
||||
def IOServiceGetMatchingService(j: Jelly) -> int:
|
||||
return 92
|
||||
|
||||
ETH_ITERATOR_HACK = False
|
||||
def IOServiceGetMatchingServices(j: Jelly, port, match, existing) -> int:
|
||||
global ETH_ITERATOR_HACK
|
||||
ETH_ITERATOR_HACK = True
|
||||
# Write 93 to existing
|
||||
j.uc.mem_write(existing, bytes([93]))
|
||||
return 0
|
||||
|
||||
def IOIteratorNext(j: Jelly, iterator: int) -> int:
|
||||
global ETH_ITERATOR_HACK
|
||||
if ETH_ITERATOR_HACK:
|
||||
ETH_ITERATOR_HACK = False
|
||||
return 94
|
||||
else:
|
||||
return 0
|
||||
|
||||
def bzero(j: Jelly, ptr: int, len: int):
|
||||
j.uc.mem_write(ptr, bytes([0]) * len)
|
||||
return 0
|
||||
|
||||
def IORegistryEntryGetParentEntry(j: Jelly, entry: int, _, parent: int) -> int:
|
||||
j.uc.mem_write(parent, bytes([entry + 100]))
|
||||
return 0
|
||||
|
||||
import requests, plistlib
|
||||
def get_cert():
|
||||
resp = requests.get("http://static.ess.apple.com/identity/validation/cert-1.0.plist")
|
||||
resp = plistlib.loads(resp.content)
|
||||
return resp["cert"]
|
||||
|
||||
def get_session_info(req: bytes) -> bytes:
|
||||
body = {
|
||||
'session-info-request': req,
|
||||
}
|
||||
body = plistlib.dumps(body)
|
||||
resp = requests.post("https://identity.ess.apple.com/WebObjects/TDIdentityService.woa/wa/initializeValidation", data=body, verify=False)
|
||||
resp = plistlib.loads(resp.content)
|
||||
return resp["session-info"]
|
||||
|
||||
def arc4random(j: Jelly) -> int:
|
||||
import random
|
||||
return random.randint(0, 0xFFFFFFFF)
|
||||
#return 0
|
||||
|
||||
def load_nac() -> Jelly:
|
||||
binary = load_binary()
|
||||
binary = get_x64_slice(binary)
|
||||
# Create a Jelly object from the binary
|
||||
j = Jelly(binary)
|
||||
|
||||
hooks = {
|
||||
"_malloc": malloc,
|
||||
"___stack_chk_guard": lambda: 0,
|
||||
"___memset_chk": memset_chk,
|
||||
"_sysctlbyname": lambda _: 0,
|
||||
"_memcpy": memcpy,
|
||||
"_kIOMasterPortDefault": lambda: 0,
|
||||
"_IORegistryEntryFromPath": lambda _: 1,
|
||||
"_kCFAllocatorDefault": lambda: 0,
|
||||
"_IORegistryEntryCreateCFProperty": IORegistryEntryCreateCFProperty,
|
||||
"_CFGetTypeID": CFGetTypeID,
|
||||
"_CFStringGetTypeID": lambda _: 2,
|
||||
"_CFDataGetTypeID": lambda _: 1,
|
||||
"_CFDataGetLength": CFDataGetLength,
|
||||
"_CFDataGetBytes": CFDataGetBytes,
|
||||
"_CFRelease": lambda _: 0,
|
||||
"_IOObjectRelease": lambda _: 0,
|
||||
"_statfs$INODE64": lambda _: 0,
|
||||
"_DASessionCreate": lambda _: 201,
|
||||
"_DADiskCreateFromBSDName": lambda _: 202,
|
||||
"_kDADiskDescriptionVolumeUUIDKey": lambda: 0,
|
||||
"_DADiskCopyDescription": DADiskCopyDescription,
|
||||
"_CFDictionaryGetValue": CFDictionaryGetValue,
|
||||
"_CFUUIDCreateString": lambda _, __, uuid: uuid,
|
||||
"_CFStringGetLength": CFStringGetLength,
|
||||
"_CFStringGetMaximumSizeForEncoding": lambda _, length, __: length,
|
||||
"_CFStringGetCString": CFStringGetCString,
|
||||
"_free": lambda _: 0,
|
||||
"_IOServiceMatching": IOServiceMatching,
|
||||
"_IOServiceGetMatchingService": IOServiceGetMatchingService,
|
||||
"_CFDictionaryCreateMutable": CFDictionaryCreateMutable,
|
||||
"_kCFBooleanTrue": lambda: 0,
|
||||
"_CFDictionarySetValue": CFDictionarySetValue,
|
||||
"_IOServiceGetMatchingServices": IOServiceGetMatchingServices,
|
||||
"_IOIteratorNext": IOIteratorNext,
|
||||
"___bzero": bzero,
|
||||
"_IORegistryEntryGetParentEntry": IORegistryEntryGetParentEntry,
|
||||
"_arc4random": arc4random
|
||||
}
|
||||
j.setup(hooks)
|
||||
|
||||
return j
|
||||
|
||||
def generate_validation_data() -> bytes:
|
||||
j = load_nac()
|
||||
logger.debug("Loaded NAC library")
|
||||
val_ctx, req = nac_init(j,get_cert())
|
||||
logger.debug("Initialized NAC")
|
||||
session_info = get_session_info(req)
|
||||
logger.debug("Got session info")
|
||||
nac_key_establishment(j, val_ctx, session_info)
|
||||
logger.debug("Submitted session info")
|
||||
val_data = nac_sign(j, val_ctx)
|
||||
logger.info("Generated validation data")
|
||||
return bytes(val_data)
|
||||
|
||||
if __name__ == "__main__":
|
||||
from base64 import b64encode
|
||||
val_data = generate_validation_data()
|
||||
logger.info(f"Validation Data: {b64encode(val_data).decode()}")
|
||||
#main()
|
@ -1,6 +0,0 @@
|
||||
from base64 import b64encode
|
||||
import emulated.nac
|
||||
|
||||
vd = emulated.nac.generate_validation_data()
|
||||
vd = b64encode(vd).decode()
|
||||
print(vd)
|
@ -1,86 +0,0 @@
|
||||
from base64 import b64encode
|
||||
|
||||
import apns
|
||||
|
||||
from . import _helpers, identity, profile, query
|
||||
|
||||
|
||||
class IDSUser:
|
||||
# Sets self.user_id and self._auth_token
|
||||
def _authenticate_for_token(
|
||||
self, username: str, password: str, factor_callback: callable = None
|
||||
):
|
||||
self.user_id, self._auth_token = profile.get_auth_token(
|
||||
username, password, factor_callback
|
||||
)
|
||||
|
||||
# Sets self._auth_keypair using self.user_id and self._auth_token
|
||||
def _authenticate_for_cert(self):
|
||||
self._auth_keypair = profile.get_auth_cert(self.user_id, self._auth_token)
|
||||
|
||||
# Factor callback will be called if a 2FA code is necessary
|
||||
def __init__(
|
||||
self,
|
||||
push_connection: apns.APNSConnection,
|
||||
):
|
||||
self.push_connection = push_connection
|
||||
self._push_keypair = _helpers.KeyPair(
|
||||
self.push_connection.private_key, self.push_connection.cert
|
||||
)
|
||||
|
||||
self.ec_key = self.rsa_key = None
|
||||
|
||||
def __str__(self):
|
||||
return f"IDSUser(user_id={self.user_id}, handles={self.handles}, push_token={b64encode(self.push_connection.token).decode()})"
|
||||
|
||||
# Authenticates with a username and password, to create a brand new authentication keypair
|
||||
def authenticate(
|
||||
self, username: str, password: str, factor_callback: callable = None
|
||||
):
|
||||
self._authenticate_for_token(username, password, factor_callback)
|
||||
self._authenticate_for_cert()
|
||||
self.handles = profile.get_handles(
|
||||
b64encode(self.push_connection.token),
|
||||
self.user_id,
|
||||
self._auth_keypair,
|
||||
self._push_keypair,
|
||||
)
|
||||
self.current_handle = self.handles[0]
|
||||
|
||||
|
||||
# Uses an existing authentication keypair
|
||||
def restore_authentication(
|
||||
self, auth_keypair: _helpers.KeyPair, user_id: str, handles: dict
|
||||
):
|
||||
self._auth_keypair = auth_keypair
|
||||
self.user_id = user_id
|
||||
self.handles = handles
|
||||
self.current_handle = self.handles[0]
|
||||
|
||||
# This is a separate call so that the user can make sure the first part succeeds before asking for validation data
|
||||
def register(self, validation_data: str):
|
||||
"""
|
||||
self.ec_key, self.rsa_key will be set to a randomly gnenerated EC and RSA keypair
|
||||
if they are not already set
|
||||
"""
|
||||
if self.encryption_identity is None:
|
||||
self.encryption_identity = identity.IDSIdentity()
|
||||
|
||||
|
||||
cert = identity.register(
|
||||
b64encode(self.push_connection.token),
|
||||
self.handles,
|
||||
self.user_id,
|
||||
self._auth_keypair,
|
||||
self._push_keypair,
|
||||
self.encryption_identity,
|
||||
validation_data,
|
||||
)
|
||||
self._id_keypair = _helpers.KeyPair(self._auth_keypair.key, cert)
|
||||
|
||||
def restore_identity(self, id_keypair: _helpers.KeyPair):
|
||||
self._id_keypair = id_keypair
|
||||
|
||||
def lookup(self, uris: list[str], topic: str = "com.apple.madrid") -> any:
|
||||
return query.lookup(self.push_connection, self.current_handle, self._id_keypair, uris, topic)
|
||||
|
@ -1,39 +0,0 @@
|
||||
from collections import namedtuple
|
||||
|
||||
USER_AGENT = "com.apple.madrid-lookup [macOS,13.2.1,22D68,MacBookPro18,3]"
|
||||
PROTOCOL_VERSION = "1640"
|
||||
|
||||
# KeyPair is a named tuple that holds a key and a certificate in PEM form
|
||||
KeyPair = namedtuple("KeyPair", ["key", "cert"])
|
||||
|
||||
|
||||
def dearmour(armoured: str) -> str:
|
||||
import re
|
||||
|
||||
# Use a regex to remove the header and footer (generic so it work on more than just certificates)
|
||||
return re.sub(r"-----BEGIN .*-----|-----END .*-----", "", armoured).replace(
|
||||
"\n", ""
|
||||
)
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa
|
||||
def parse_key(key: str):
|
||||
# Check if it is a public or private key
|
||||
if "PUBLIC" in key:
|
||||
return serialization.load_pem_public_key(key.encode())
|
||||
else:
|
||||
return serialization.load_pem_private_key(key.encode(), None)
|
||||
|
||||
def serialize_key(key):
|
||||
if isinstance(key, ec.EllipticCurvePrivateKey) or isinstance(key, rsa.RSAPrivateKey):
|
||||
return key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
).decode("utf-8").strip()
|
||||
else:
|
||||
return key.public_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PublicFormat.SubjectPublicKeyInfo,
|
||||
).decode("utf-8").strip()
|
||||
|
177
ids/identity.py
177
ids/identity.py
@ -1,177 +0,0 @@
|
||||
import plistlib
|
||||
from base64 import b64decode
|
||||
from typing import Union
|
||||
|
||||
import requests
|
||||
|
||||
from ._helpers import PROTOCOL_VERSION, USER_AGENT, KeyPair, parse_key, serialize_key
|
||||
from .signing import add_auth_signature, armour_cert
|
||||
|
||||
from io import BytesIO
|
||||
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, rsa
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("ids")
|
||||
|
||||
class IDSIdentity:
|
||||
def __init__(
|
||||
self,
|
||||
signing_key: Union[str, None] = None,
|
||||
encryption_key: Union[str, None] = None,
|
||||
signing_public_key: Union[str, None] = None,
|
||||
encryption_public_key: Union[str, None] = None):
|
||||
if signing_key is not None:
|
||||
self.signing_key = signing_key
|
||||
self.signing_public_key = serialize_key(parse_key(signing_key).public_key())
|
||||
elif signing_public_key is not None:
|
||||
self.signing_key = None
|
||||
self.signing_public_key = signing_public_key
|
||||
else:
|
||||
# Generate a new key
|
||||
self.signing_key = serialize_key(ec.generate_private_key(ec.SECP256R1()))
|
||||
self.signing_public_key = serialize_key(parse_key(self.signing_key).public_key())
|
||||
|
||||
if encryption_key is not None:
|
||||
self.encryption_key = encryption_key
|
||||
self.encryption_public_key = serialize_key(parse_key(encryption_key).public_key())
|
||||
elif encryption_public_key is not None:
|
||||
self.encryption_key = None
|
||||
self.encryption_public_key = encryption_public_key
|
||||
else:
|
||||
self.encryption_key = serialize_key(rsa.generate_private_key(65537, 1280))
|
||||
self.encryption_public_key = serialize_key(parse_key(self.encryption_key).public_key())
|
||||
|
||||
def decode(input: bytes) -> 'IDSIdentity':
|
||||
input = BytesIO(input)
|
||||
|
||||
assert input.read(5) == b'\x30\x81\xF6\x81\x43' # DER header
|
||||
raw_ecdsa = input.read(67)
|
||||
assert input.read(3) == b'\x82\x81\xAE' # DER header
|
||||
raw_rsa = input.read(174)
|
||||
|
||||
# Parse the RSA key
|
||||
raw_rsa = BytesIO(raw_rsa)
|
||||
assert raw_rsa.read(2) == b'\x00\xAC' # Not sure what this is
|
||||
assert raw_rsa.read(3) == b'\x30\x81\xA9' # Inner DER header
|
||||
assert raw_rsa.read(3) == b'\x02\x81\xA1'
|
||||
rsa_modulus = raw_rsa.read(161)
|
||||
rsa_modulus = int.from_bytes(rsa_modulus, "big")
|
||||
assert raw_rsa.read(5) == b'\x02\x03\x01\x00\x01' # Exponent, should always be 65537
|
||||
|
||||
# Parse the EC key
|
||||
assert raw_ecdsa[:3] == b'\x00\x41\x04'
|
||||
raw_ecdsa = raw_ecdsa[3:]
|
||||
ec_x = int.from_bytes(raw_ecdsa[:32], "big")
|
||||
ec_y = int.from_bytes(raw_ecdsa[32:], "big")
|
||||
|
||||
ec_key = ec.EllipticCurvePublicNumbers(ec_x, ec_y, ec.SECP256R1())
|
||||
ec_key = ec_key.public_key()
|
||||
|
||||
rsa_key = rsa.RSAPublicNumbers(e=65537, n=rsa_modulus)
|
||||
rsa_key = rsa_key.public_key()
|
||||
|
||||
return IDSIdentity(signing_public_key=serialize_key(ec_key), encryption_public_key=serialize_key(rsa_key))
|
||||
|
||||
|
||||
def encode(self) -> bytes:
|
||||
output = BytesIO()
|
||||
|
||||
raw_rsa = BytesIO()
|
||||
raw_rsa.write(b'\x00\xAC')
|
||||
raw_rsa.write(b'\x30\x81\xA9')
|
||||
raw_rsa.write(b'\x02\x81\xA1')
|
||||
raw_rsa.write(parse_key(self.encryption_public_key).public_numbers().n.to_bytes(161, "big"))
|
||||
raw_rsa.write(b'\x02\x03\x01\x00\x01') # Hardcode the exponent
|
||||
|
||||
output.write(b'\x30\x81\xF6\x81\x43')
|
||||
output.write(b'\x00\x41\x04')
|
||||
output.write(parse_key(self.signing_public_key).public_numbers().x.to_bytes(32, "big"))
|
||||
output.write(parse_key(self.signing_public_key).public_numbers().y.to_bytes(32, "big"))
|
||||
|
||||
output.write(b'\x82\x81\xAE')
|
||||
output.write(raw_rsa.getvalue())
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
def register(
|
||||
push_token, handles, user_id, auth_key: KeyPair, push_key: KeyPair, identity: IDSIdentity, validation_data
|
||||
):
|
||||
logger.debug(f"Registering IDS identity for {handles}")
|
||||
uris = [{"uri": handle} for handle in handles]
|
||||
|
||||
body = {
|
||||
"hardware-version": "MacBookPro18,3",
|
||||
"language": "en-US",
|
||||
"os-version": "macOS,13.2.1,22D68",
|
||||
"software-version": "22D68",
|
||||
"services": [
|
||||
{
|
||||
"capabilities": [{"flags": 17, "name": "Messenger", "version": 1}],
|
||||
"service": "com.apple.madrid",
|
||||
"users": [
|
||||
{
|
||||
"client-data": {
|
||||
'is-c2k-equipment': True,
|
||||
'optionally-receive-typing-indicators': True,
|
||||
'public-message-identity-key': identity.encode(),
|
||||
'public-message-identity-version':2,
|
||||
'show-peer-errors': True,
|
||||
'supports-ack-v1': True,
|
||||
'supports-activity-sharing-v1': True,
|
||||
'supports-audio-messaging-v2': True,
|
||||
"supports-autoloopvideo-v1": True,
|
||||
'supports-be-v1': True,
|
||||
'supports-ca-v1': True,
|
||||
'supports-fsm-v1': True,
|
||||
'supports-fsm-v2': True,
|
||||
'supports-fsm-v3': True,
|
||||
'supports-ii-v1': True,
|
||||
'supports-impact-v1': True,
|
||||
'supports-inline-attachments': True,
|
||||
'supports-keep-receipts': True,
|
||||
"supports-location-sharing": True,
|
||||
'supports-media-v2': True,
|
||||
'supports-photos-extension-v1': True,
|
||||
'supports-st-v1': True,
|
||||
'supports-update-attachments-v1': True,
|
||||
},
|
||||
"uris": uris,
|
||||
"user-id": user_id,
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
"validation-data": b64decode(validation_data),
|
||||
}
|
||||
|
||||
body = plistlib.dumps(body)
|
||||
|
||||
headers = {
|
||||
"x-protocol-version": PROTOCOL_VERSION,
|
||||
"x-auth-user-id-0": user_id,
|
||||
}
|
||||
add_auth_signature(headers, body, "id-register", auth_key, push_key, push_token, 0)
|
||||
|
||||
r = requests.post(
|
||||
"https://identity.ess.apple.com/WebObjects/TDIdentityService.woa/wa/register",
|
||||
headers=headers,
|
||||
data=body,
|
||||
verify=False,
|
||||
)
|
||||
r = plistlib.loads(r.content)
|
||||
#print(f'Response code: {r["status"]}')
|
||||
logger.debug(f"Recieved response to IDS registration: {r}")
|
||||
if "status" in r and r["status"] == 6004:
|
||||
raise Exception("Validation data expired!")
|
||||
# TODO: Do validation of nested statuses
|
||||
if "status" in r and r["status"] != 0:
|
||||
raise Exception(f"Failed to register: {r}")
|
||||
if not "services" in r:
|
||||
raise Exception(f"No services in response: {r}")
|
||||
if not "users" in r["services"][0]:
|
||||
raise Exception(f"No users in response: {r}")
|
||||
if not "cert" in r["services"][0]["users"][0]:
|
||||
raise Exception(f"No cert in response: {r}")
|
||||
|
||||
return armour_cert(r["services"][0]["users"][0]["cert"])
|
172
ids/profile.py
172
ids/profile.py
@ -1,172 +0,0 @@
|
||||
import plistlib
|
||||
import random
|
||||
import uuid
|
||||
from base64 import b64decode
|
||||
|
||||
import requests
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||
from cryptography.x509.oid import NameOID
|
||||
|
||||
import bags
|
||||
|
||||
from . import signing
|
||||
from ._helpers import PROTOCOL_VERSION, USER_AGENT, KeyPair
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("ids")
|
||||
|
||||
|
||||
def _auth_token_request(username: str, password: str) -> any:
|
||||
# Turn the PET into an auth token
|
||||
data = {
|
||||
"username": username,
|
||||
#"client-id": str(uuid.uuid4()),
|
||||
#"delegates": {"com.apple.private.ids": {"protocol-version": "4"}},
|
||||
"password": password,
|
||||
}
|
||||
data = plistlib.dumps(data)
|
||||
|
||||
r = requests.post(
|
||||
# TODO: Figure out which URL bag we can get this from
|
||||
"https://profile.ess.apple.com/WebObjects/VCProfileService.woa/wa/authenticateUser",
|
||||
#"https://setup.icloud.com/setup/prefpane/loginDelegates",
|
||||
#auth=(username, password),
|
||||
data=data,
|
||||
verify=False,
|
||||
)
|
||||
r = plistlib.loads(r.content)
|
||||
return r
|
||||
|
||||
|
||||
# Gets an IDS auth token for the given username and password
|
||||
# Will use native Grand Slam on macOS
|
||||
# If factor_gen is not None, it will be called to get the 2FA code, otherwise it will be prompted
|
||||
# Returns (realm user id, auth token)
|
||||
def get_auth_token(
|
||||
username: str, password: str, factor_gen: callable = None
|
||||
) -> tuple[str, str]:
|
||||
from sys import platform
|
||||
|
||||
result = _auth_token_request(username, password)
|
||||
if result["status"] != 0:
|
||||
if result["status"] == 5000:
|
||||
if factor_gen is None:
|
||||
password = password + input("Enter 2FA code: ")
|
||||
else:
|
||||
password = password + factor_gen()
|
||||
result = _auth_token_request(username, password)
|
||||
if result["status"] != 0:
|
||||
raise Exception(f"Error: {result}")
|
||||
|
||||
auth_token = result["auth-token"]
|
||||
realm_user_id = result["profile-id"]
|
||||
# else:
|
||||
# logger.debug("Using old-style authentication")
|
||||
# # Make the request without the 2FA code to make the prompt appear
|
||||
# _auth_token_request(username, password)
|
||||
# # TODO: Make sure we actually need the second request, some rare accounts don't have 2FA
|
||||
# # Now make the request with the 2FA code
|
||||
# if factor_gen is None:
|
||||
# pet = password + input("Enter 2FA code: ")
|
||||
# else:
|
||||
# pet = password + factor_gen()
|
||||
# r = _auth_token_request(username, pet)
|
||||
# # print(r)
|
||||
# if "description" in r:
|
||||
# raise Exception(f"Error: {r['description']}")
|
||||
# service_data = r["delegates"]["com.apple.private.ids"]["service-data"]
|
||||
# realm_user_id = service_data["realm-user-id"]
|
||||
# auth_token = service_data["auth-token"]
|
||||
# print(f"Auth token for {realm_user_id}: {auth_token}")
|
||||
logger.debug(f"Got auth token for IDS: {auth_token}")
|
||||
return realm_user_id, auth_token
|
||||
|
||||
|
||||
def _generate_csr(private_key: rsa.RSAPrivateKey) -> str:
|
||||
csr = (
|
||||
x509.CertificateSigningRequestBuilder()
|
||||
.subject_name(
|
||||
x509.Name(
|
||||
[
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, random.randbytes(20).hex()),
|
||||
]
|
||||
)
|
||||
)
|
||||
.sign(private_key, hashes.SHA256())
|
||||
)
|
||||
|
||||
csr = csr.public_bytes(serialization.Encoding.PEM).decode("utf-8")
|
||||
return (
|
||||
csr.replace("-----BEGIN CERTIFICATE REQUEST-----", "")
|
||||
.replace("-----END CERTIFICATE REQUEST-----", "")
|
||||
.replace("\n", "")
|
||||
)
|
||||
|
||||
|
||||
# Gets an IDS auth cert for the given user id and auth token
|
||||
# Returns [private key PEM, certificate PEM]
|
||||
def get_auth_cert(user_id, token) -> KeyPair:
|
||||
BAG_KEY = "id-authenticate-ds-id"
|
||||
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537, key_size=2048, backend=default_backend()
|
||||
)
|
||||
body = {
|
||||
"authentication-data": {"auth-token": token},
|
||||
"csr": b64decode(_generate_csr(private_key)),
|
||||
"realm-user-id": user_id,
|
||||
}
|
||||
|
||||
body = plistlib.dumps(body)
|
||||
|
||||
r = requests.post(
|
||||
bags.ids_bag()[BAG_KEY],
|
||||
#"https://profile.ess.apple.com/WebObjects/VCProfileService.woa/wa/authenticateDS",
|
||||
data=body,
|
||||
headers={"x-protocol-version": "1630"},
|
||||
verify=False,
|
||||
)
|
||||
r = plistlib.loads(r.content)
|
||||
if r["status"] != 0:
|
||||
raise (Exception(f"Failed to get auth cert: {r}"))
|
||||
cert = x509.load_der_x509_certificate(r["cert"])
|
||||
logger.debug("Got auth cert from token")
|
||||
return KeyPair(
|
||||
private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
.decode("utf-8")
|
||||
.strip(),
|
||||
cert.public_bytes(serialization.Encoding.PEM).decode("utf-8").strip(),
|
||||
)
|
||||
|
||||
|
||||
def get_handles(push_token, user_id: str, auth_key: KeyPair, push_key: KeyPair):
|
||||
BAG_KEY = "id-get-handles"
|
||||
|
||||
headers = {
|
||||
"x-protocol-version": PROTOCOL_VERSION,
|
||||
"x-auth-user-id": user_id,
|
||||
}
|
||||
signing.add_auth_signature(
|
||||
headers, None, BAG_KEY, auth_key, push_key, push_token
|
||||
)
|
||||
|
||||
r = requests.get(
|
||||
bags.ids_bag()[BAG_KEY],
|
||||
headers=headers,
|
||||
verify=False,
|
||||
)
|
||||
|
||||
r = plistlib.loads(r.content)
|
||||
|
||||
if not "handles" in r:
|
||||
raise Exception("No handles in response: " + str(r))
|
||||
|
||||
logger.debug(f"User {user_id} has handles {r['handles']}")
|
||||
return [handle["uri"] for handle in r["handles"]]
|
71
ids/query.py
71
ids/query.py
@ -1,71 +0,0 @@
|
||||
import gzip
|
||||
import plistlib
|
||||
import random
|
||||
from base64 import b64encode
|
||||
|
||||
import apns
|
||||
import bags
|
||||
|
||||
from ._helpers import KeyPair, PROTOCOL_VERSION
|
||||
from . import signing
|
||||
|
||||
|
||||
def lookup(
|
||||
conn: apns.APNSConnection,
|
||||
self_uri: str,
|
||||
id_keypair: KeyPair,
|
||||
query: list[str],
|
||||
topic,
|
||||
) -> bytes:
|
||||
BAG_KEY = "id-query"
|
||||
|
||||
conn.filter([topic])
|
||||
|
||||
body = plistlib.dumps({"uris": query})
|
||||
body = gzip.compress(body, mtime=0)
|
||||
|
||||
push_token = b64encode(conn.token).decode()
|
||||
|
||||
headers = {
|
||||
"x-id-self-uri": self_uri,
|
||||
"x-protocol-version": PROTOCOL_VERSION,
|
||||
}
|
||||
signing.add_id_signature(headers, body, BAG_KEY, id_keypair, push_token)
|
||||
|
||||
msg_id = random.randbytes(16)
|
||||
|
||||
req = {
|
||||
"cT": "application/x-apple-plist",
|
||||
"U": msg_id,
|
||||
"c": 96,
|
||||
"u": bags.ids_bag()[BAG_KEY],
|
||||
"h": headers,
|
||||
"v": 2,
|
||||
"b": body,
|
||||
}
|
||||
|
||||
conn.send_message(topic, plistlib.dumps(req, fmt=plistlib.FMT_BINARY))
|
||||
|
||||
def check_response(x):
|
||||
if x[0] != 0x0A:
|
||||
return False
|
||||
resp_body = apns._get_field(x[1], 3)
|
||||
if resp_body is None:
|
||||
return False
|
||||
resp_body = plistlib.loads(resp_body)
|
||||
return resp_body.get('U') == msg_id
|
||||
|
||||
# Lambda to check if the response is the one we want
|
||||
payload = conn.incoming_queue.wait_pop_find(check_response)
|
||||
resp = apns._get_field(payload[1], 3)
|
||||
resp = plistlib.loads(resp)
|
||||
resp = gzip.decompress(resp["b"])
|
||||
resp = plistlib.loads(resp)
|
||||
# Acknowledge the message
|
||||
#conn._send_ack(apns._get_field(payload[1], 4))
|
||||
|
||||
if resp['status'] != 0:
|
||||
raise Exception(f'Query failed: {resp}')
|
||||
if not 'results' in resp:
|
||||
raise Exception(f'No results in response: {resp}')
|
||||
return resp['results']
|
125
ids/signing.py
125
ids/signing.py
@ -1,125 +0,0 @@
|
||||
import random
|
||||
from base64 import b64decode, b64encode
|
||||
from datetime import datetime
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||
from cryptography.x509.oid import NameOID
|
||||
|
||||
from ._helpers import KeyPair, dearmour
|
||||
|
||||
|
||||
# TODO: Move this helper somewhere else
|
||||
def armour_cert(cert: bytes) -> str:
|
||||
cert = x509.load_der_x509_certificate(cert)
|
||||
return cert.public_bytes(serialization.Encoding.PEM).decode("utf-8").strip()
|
||||
|
||||
|
||||
"""
|
||||
Generates a nonce in this format:
|
||||
01000001876bd0a2c0e571093967fce3d7
|
||||
01 # version
|
||||
000001876d008cc5 # unix time
|
||||
r1r2r3r4r5r6r7r8 # random bytes
|
||||
"""
|
||||
|
||||
|
||||
def generate_nonce() -> bytes:
|
||||
return (
|
||||
b"\x01"
|
||||
+ int(datetime.now().timestamp() * 1000).to_bytes(8, "big")
|
||||
+ random.randbytes(8)
|
||||
)
|
||||
|
||||
|
||||
import typing
|
||||
|
||||
|
||||
# Creates a payload from individual parts for signing
|
||||
def _create_payload(
|
||||
bag_key: str,
|
||||
query_string: str,
|
||||
push_token: typing.Union[str, bytes],
|
||||
payload: bytes,
|
||||
nonce: typing.Union[bytes, None] = None,
|
||||
) -> tuple[bytes, bytes]:
|
||||
# Generate the nonce
|
||||
if nonce is None:
|
||||
nonce = generate_nonce()
|
||||
|
||||
push_token = b64decode(push_token)
|
||||
|
||||
if payload is None:
|
||||
payload = b""
|
||||
|
||||
return (
|
||||
nonce
|
||||
+ len(bag_key).to_bytes(4, "big")
|
||||
+ bag_key.encode()
|
||||
+ len(query_string).to_bytes(4, "big")
|
||||
+ query_string.encode()
|
||||
+ len(payload).to_bytes(4, "big")
|
||||
+ payload
|
||||
+ len(push_token).to_bytes(4, "big")
|
||||
+ push_token,
|
||||
nonce,
|
||||
)
|
||||
|
||||
|
||||
# Returns signature, nonce
|
||||
def _sign_payload(
|
||||
private_key: str, bag_key: str, query_string: str, push_token: str, payload: bytes, nonce = None
|
||||
) -> tuple[str, bytes]:
|
||||
# Load the private key
|
||||
key = serialization.load_pem_private_key(
|
||||
private_key.encode(), password=None, backend=default_backend()
|
||||
)
|
||||
|
||||
payload, nonce = _create_payload(bag_key, query_string, push_token, payload, nonce)
|
||||
|
||||
sig = key.sign(payload, padding.PKCS1v15(), hashes.SHA1()) # type: ignore
|
||||
|
||||
sig = b"\x01\x01" + sig
|
||||
sig = b64encode(sig).decode()
|
||||
|
||||
return sig, nonce
|
||||
|
||||
|
||||
# Add headers for x-push-sig and x-auth-sig stuff
|
||||
def add_auth_signature(
|
||||
headers: dict,
|
||||
body: bytes,
|
||||
bag_key: str,
|
||||
auth_key: KeyPair,
|
||||
push_key: KeyPair,
|
||||
push_token: str,
|
||||
auth_number=None,
|
||||
):
|
||||
push_sig, push_nonce = _sign_payload(push_key.key, bag_key, "", push_token, body)
|
||||
headers["x-push-sig"] = push_sig
|
||||
headers["x-push-nonce"] = b64encode(push_nonce)
|
||||
headers["x-push-cert"] = dearmour(push_key.cert)
|
||||
headers["x-push-token"] = push_token
|
||||
|
||||
auth_sig, auth_nonce = _sign_payload(auth_key.key, bag_key, "", push_token, body)
|
||||
auth_postfix = "-" + str(auth_number) if auth_number is not None else ""
|
||||
headers["x-auth-sig" + auth_postfix] = auth_sig
|
||||
headers["x-auth-nonce" + auth_postfix] = b64encode(auth_nonce)
|
||||
headers["x-auth-cert" + auth_postfix] = dearmour(auth_key.cert)
|
||||
|
||||
|
||||
def add_id_signature(
|
||||
headers: dict,
|
||||
body: bytes,
|
||||
bag_key: str,
|
||||
id_key: KeyPair,
|
||||
push_token: str,
|
||||
nonce=None,
|
||||
):
|
||||
id_sig, id_nonce = _sign_payload(id_key.key, bag_key, "", push_token, body, nonce)
|
||||
headers["x-id-sig"] = id_sig
|
||||
headers["x-id-nonce"] = b64encode(id_nonce).decode()
|
||||
headers["x-id-cert"] = dearmour(id_key.cert)
|
||||
headers["x-push-token"] = push_token
|
565
imessage.py
565
imessage.py
@ -1,565 +0,0 @@
|
||||
# LOW LEVEL imessage function, decryption etc
|
||||
# Don't handle APNS etc, accept it already setup
|
||||
|
||||
## HAVE ANOTHER FILE TO SETUP EVERYTHING AUTOMATICALLY, etc
|
||||
# JSON parsing of keys, don't pass around strs??
|
||||
|
||||
import base64
|
||||
import gzip
|
||||
import logging
|
||||
import plistlib
|
||||
import random
|
||||
from typing import Union
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from hashlib import sha1, sha256
|
||||
from io import BytesIO
|
||||
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import ec, padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
import apns
|
||||
import ids
|
||||
|
||||
logger = logging.getLogger("imessage")
|
||||
|
||||
NORMAL_NONCE = b"\x00" * 15 + b"\x01" # This is always used as the AES nonce
|
||||
|
||||
|
||||
class BalloonBody:
|
||||
"""Represents the special parts of message extensions etc."""
|
||||
|
||||
def __init__(self, type: str, data: bytes):
|
||||
self.type = type
|
||||
self.data = data
|
||||
|
||||
# TODO : Register handlers based on type id
|
||||
|
||||
|
||||
class AttachmentFile:
|
||||
def data(self) -> bytes:
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@dataclass
|
||||
class MMCSFile(AttachmentFile):
|
||||
url: Union[str, None] = None
|
||||
size: Union[int, None] = None
|
||||
owner: Union[str, None] = None
|
||||
signature: Union[bytes, None] = None
|
||||
decryption_key: Union[bytes, None] = None
|
||||
|
||||
def data(self) -> bytes:
|
||||
import requests
|
||||
logger.info(requests.get(
|
||||
url=self.url,
|
||||
headers={
|
||||
"User-Agent": f"IMTransferAgent/900 CFNetwork/596.2.3 Darwin/12.2.0 (x86_64) (Macmini5,1)",
|
||||
# "MMCS-Url": self.url,
|
||||
# "MMCS-Signature": str(base64.encodebytes(self.signature)),
|
||||
# "MMCS-Owner": self.owner
|
||||
},
|
||||
).headers)
|
||||
return b""
|
||||
|
||||
|
||||
@dataclass
|
||||
class InlineFile(AttachmentFile):
|
||||
_data: bytes
|
||||
|
||||
def data(self) -> bytes:
|
||||
return self._data
|
||||
|
||||
|
||||
@dataclass
|
||||
class Attachment:
|
||||
name: str
|
||||
mime_type: str
|
||||
versions: list[AttachmentFile]
|
||||
|
||||
def __init__(self, message_raw_content: dict, xml_element: ElementTree.Element):
|
||||
attrs = xml_element.attrib
|
||||
|
||||
self.name = attrs["name"] if "name" in attrs else None
|
||||
self.mime_type = attrs["mime-type"] if "mime-type" in attrs else None
|
||||
|
||||
if "inline-attachment" in attrs:
|
||||
# just grab the inline attachment !
|
||||
self.versions = [InlineFile(message_raw_content[attrs["inline-attachment"]])]
|
||||
else:
|
||||
# suffer
|
||||
versions = []
|
||||
for attribute in attrs:
|
||||
if attribute.startswith("mmcs") or \
|
||||
attribute.startswith("decryption-key") or \
|
||||
attribute.startswith("file-size"):
|
||||
segments = attribute.split('-')
|
||||
if segments[-1].isnumeric():
|
||||
index = int(segments[-1])
|
||||
attribute_name = segments[:-1]
|
||||
else:
|
||||
index = 0
|
||||
attribute_name = attribute
|
||||
|
||||
while index >= len(versions):
|
||||
versions.append(MMCSFile())
|
||||
|
||||
val = attrs[attribute_name]
|
||||
if attribute_name == "mmcs-url":
|
||||
versions[index].url = val
|
||||
elif attribute_name == "mmcs-owner":
|
||||
versions[index].owner = val
|
||||
elif attribute_name == "mmcs-signature-hex":
|
||||
versions[index].signature = base64.b16decode(val)
|
||||
elif attribute_name == "file-size":
|
||||
versions[index].size = int(val)
|
||||
elif attribute_name == "decryption-key":
|
||||
versions[index].decryption_key = base64.b16decode(val)[1:]
|
||||
|
||||
self.versions = versions
|
||||
|
||||
def __repr__(self):
|
||||
return f'<Attachment name="{self.name}" type="{self.mime_type}">'
|
||||
|
||||
|
||||
@dataclass
|
||||
class iMessage:
|
||||
"""Represents an iMessage"""
|
||||
|
||||
text: str = ""
|
||||
"""Plain text of message, always required, may be an empty string"""
|
||||
xml: Union[str, None] = None
|
||||
"""XML portion of message, may be None"""
|
||||
participants: list[str] = field(default_factory=list)
|
||||
"""List of participants in the message, including the sender"""
|
||||
sender: Union[str, None] = None
|
||||
"""Sender of the message"""
|
||||
id: Union[uuid.UUID, None] = None
|
||||
"""ID of the message, will be randomly generated if not provided"""
|
||||
group_id: Union[uuid.UUID, None] = None
|
||||
"""Group ID of the message, will be randomly generated if not provided"""
|
||||
body: Union[BalloonBody, None] = None
|
||||
"""BalloonBody, may be None"""
|
||||
effect: Union[str, None] = None
|
||||
"""iMessage effect sent with this message, may be None"""
|
||||
|
||||
_compressed: bool = True
|
||||
"""Internal property representing whether the message should be compressed"""
|
||||
|
||||
_raw: Union[dict, None] = None
|
||||
"""Internal property representing the original raw message, may be None"""
|
||||
|
||||
def attachments(self) -> list[Attachment]:
|
||||
if self.xml is not None:
|
||||
return [Attachment(self._raw, elem) for elem in ElementTree.fromstring(self.xml)[0] if elem.tag == "FILE"]
|
||||
else:
|
||||
return []
|
||||
|
||||
def sanity_check(self):
|
||||
"""Corrects any missing fields"""
|
||||
if self.id is None:
|
||||
self.id = uuid.uuid4()
|
||||
|
||||
if self.group_id is None:
|
||||
self.group_id = uuid.uuid4()
|
||||
|
||||
if self.sender is None:
|
||||
if len(self.participants) > 1:
|
||||
self.sender = self.participants[-1]
|
||||
else:
|
||||
logger.warning(
|
||||
"Message has no sender, and only one participant, sanity check failed"
|
||||
)
|
||||
return False
|
||||
|
||||
if self.sender not in self.participants:
|
||||
self.participants.append(self.sender)
|
||||
|
||||
if self.xml != None:
|
||||
self._compressed = False # XML is never compressed for some reason
|
||||
|
||||
return True
|
||||
|
||||
def from_raw(message: bytes, sender: Union[str, None] = None) -> "iMessage":
|
||||
"""Create an `iMessage` from raw message bytes"""
|
||||
compressed = False
|
||||
try:
|
||||
message = gzip.decompress(message)
|
||||
compressed = True
|
||||
except:
|
||||
pass
|
||||
|
||||
message = plistlib.loads(message)
|
||||
|
||||
return iMessage(
|
||||
text=message.get("t", ""),
|
||||
xml=message.get("x"),
|
||||
participants=message.get("p", []),
|
||||
sender=sender if sender is not None else message.get("p", [])[-1] if "p" in message else None,
|
||||
id=uuid.UUID(message.get("r")) if "r" in message else None,
|
||||
group_id=uuid.UUID(message.get("gid")) if "gid" in message else None,
|
||||
body=BalloonBody(message["bid"], message["b"]) if "bid" in message and "b" in message else None,
|
||||
effect=message["iid"] if "iid" in message else None,
|
||||
_compressed=compressed,
|
||||
_raw=message,
|
||||
)
|
||||
|
||||
def to_raw(self) -> bytes:
|
||||
"""Convert an `iMessage` to raw message bytes"""
|
||||
if not self.sanity_check():
|
||||
raise ValueError("Message failed sanity check")
|
||||
|
||||
d = {
|
||||
"t": self.text,
|
||||
"x": self.xml,
|
||||
"p": self.participants,
|
||||
"r": str(self.id).upper(),
|
||||
"gid": str(self.group_id).upper(),
|
||||
"pv": 0,
|
||||
"gv": "8",
|
||||
"v": "1",
|
||||
"iid": self.effect
|
||||
}
|
||||
|
||||
# Remove keys that are None
|
||||
d = {k: v for k, v in d.items() if v is not None}
|
||||
|
||||
# Serialize as a plist
|
||||
d = plistlib.dumps(d, fmt=plistlib.FMT_BINARY)
|
||||
|
||||
# Compression
|
||||
if self._compressed:
|
||||
d = gzip.compress(d, mtime=0)
|
||||
|
||||
return d
|
||||
|
||||
def to_string(self) -> str:
|
||||
message_str = f"[{self.sender}] '{self.text}'"
|
||||
if self.effect is not None:
|
||||
message_str += f" with effect [{self.effect}]"
|
||||
return message_str
|
||||
|
||||
|
||||
class iMessageUser:
|
||||
"""Represents a logged in and connected iMessage user.
|
||||
This abstraction should probably be reworked into IDS some time..."""
|
||||
|
||||
def __init__(self, connection: apns.APNSConnection, user: ids.IDSUser):
|
||||
self.connection = connection
|
||||
self.user = user
|
||||
|
||||
def _get_raw_message(self):
|
||||
"""
|
||||
Returns a raw APNs message corresponding to the next conforming notification in the queue
|
||||
Returns None if no conforming notification is found
|
||||
"""
|
||||
|
||||
def check_response(x):
|
||||
if x[0] != 0x0A:
|
||||
return False
|
||||
if apns._get_field(x[1], 2) != sha1("com.apple.madrid".encode()).digest():
|
||||
return False
|
||||
resp_body = apns._get_field(x[1], 3)
|
||||
if resp_body is None:
|
||||
# logger.debug("Rejecting madrid message with no body")
|
||||
return False
|
||||
resp_body = plistlib.loads(resp_body)
|
||||
if "P" not in resp_body:
|
||||
# logger.debug(f"Rejecting madrid message with no payload : {resp_body}")
|
||||
return False
|
||||
return True
|
||||
|
||||
payload = self.connection.incoming_queue.pop_find(check_response)
|
||||
if payload is None:
|
||||
return None
|
||||
id = apns._get_field(payload[1], 4)
|
||||
|
||||
return payload
|
||||
|
||||
def _parse_payload(payload: bytes) -> tuple[bytes, bytes]:
|
||||
payload = BytesIO(payload)
|
||||
|
||||
tag = payload.read(1)
|
||||
#print("TAG", tag)
|
||||
body_length = int.from_bytes(payload.read(2), "big")
|
||||
body = payload.read(body_length)
|
||||
|
||||
signature_len = payload.read(1)[0]
|
||||
signature = payload.read(signature_len)
|
||||
|
||||
return (body, signature)
|
||||
|
||||
def _construct_payload(body: bytes, signature: bytes) -> bytes:
|
||||
payload = (
|
||||
b"\x02"
|
||||
+ len(body).to_bytes(2, "big")
|
||||
+ body
|
||||
+ len(signature).to_bytes(1, "big")
|
||||
+ signature
|
||||
)
|
||||
return payload
|
||||
|
||||
def _hash_identity(id: bytes) -> bytes:
|
||||
iden = ids.identity.IDSIdentity.decode(id)
|
||||
|
||||
# TODO: Combine this with serialization code in ids.identity
|
||||
output = BytesIO()
|
||||
output.write(b"\x00\x41\x04")
|
||||
output.write(
|
||||
ids._helpers.parse_key(iden.signing_public_key)
|
||||
.public_numbers()
|
||||
.x.to_bytes(32, "big")
|
||||
)
|
||||
output.write(
|
||||
ids._helpers.parse_key(iden.signing_public_key)
|
||||
.public_numbers()
|
||||
.y.to_bytes(32, "big")
|
||||
)
|
||||
|
||||
output.write(b"\x00\xAC")
|
||||
output.write(b"\x30\x81\xA9")
|
||||
output.write(b"\x02\x81\xA1")
|
||||
output.write(
|
||||
ids._helpers.parse_key(iden.encryption_public_key)
|
||||
.public_numbers()
|
||||
.n.to_bytes(161, "big")
|
||||
)
|
||||
output.write(b"\x02\x03\x01\x00\x01")
|
||||
|
||||
return sha256(output.getvalue()).digest()
|
||||
|
||||
def _encrypt_sign_payload(
|
||||
self, key: ids.identity.IDSIdentity, message: bytes
|
||||
) -> bytes:
|
||||
# Generate a random AES key
|
||||
random_seed = random.randbytes(11)
|
||||
# Create the HMAC
|
||||
import hmac
|
||||
|
||||
hm = hmac.new(
|
||||
random_seed,
|
||||
message
|
||||
+ b"\x02"
|
||||
+ iMessageUser._hash_identity(self.user.encryption_identity.encode())
|
||||
+ iMessageUser._hash_identity(key.encode()),
|
||||
sha256,
|
||||
).digest()
|
||||
|
||||
aes_key = random_seed + hm[:5]
|
||||
|
||||
# print(len(aes_key))
|
||||
|
||||
# Encrypt the message with the AES key
|
||||
cipher = Cipher(algorithms.AES(aes_key), modes.CTR(NORMAL_NONCE))
|
||||
encrypted = cipher.encryptor().update(message)
|
||||
|
||||
# Encrypt the AES key with the public key of the recipient
|
||||
recipient_key = ids._helpers.parse_key(key.encryption_public_key)
|
||||
rsa_body = recipient_key.encrypt(
|
||||
aes_key + encrypted[:100],
|
||||
padding.OAEP(
|
||||
mgf=padding.MGF1(algorithm=hashes.SHA1()),
|
||||
algorithm=hashes.SHA1(),
|
||||
label=None,
|
||||
),
|
||||
)
|
||||
|
||||
# Construct the payload
|
||||
body = rsa_body + encrypted[100:]
|
||||
sig = ids._helpers.parse_key(self.user.encryption_identity.signing_key).sign(
|
||||
body, ec.ECDSA(hashes.SHA1())
|
||||
)
|
||||
payload = iMessageUser._construct_payload(body, sig)
|
||||
|
||||
return payload
|
||||
|
||||
def _decrypt_payload(self, payload: bytes) -> dict:
|
||||
payload = iMessageUser._parse_payload(payload)
|
||||
|
||||
body = BytesIO(payload[0])
|
||||
rsa_body = ids._helpers.parse_key(
|
||||
self.user.encryption_identity.encryption_key
|
||||
).decrypt(
|
||||
body.read(160),
|
||||
padding.OAEP(
|
||||
mgf=padding.MGF1(algorithm=hashes.SHA1()),
|
||||
algorithm=hashes.SHA1(),
|
||||
label=None,
|
||||
),
|
||||
)
|
||||
|
||||
cipher = Cipher(algorithms.AES(rsa_body[:16]), modes.CTR(NORMAL_NONCE))
|
||||
decrypted = cipher.decryptor().update(rsa_body[16:] + body.read())
|
||||
|
||||
return decrypted
|
||||
|
||||
def _verify_payload(self, payload: bytes, sender: str, sender_token: str) -> bool:
|
||||
# Get the public key for the sender
|
||||
self._cache_keys([sender])
|
||||
|
||||
if not sender_token in self.KEY_CACHE:
|
||||
logger.warning("Unable to find the public key of the sender, cannot verify")
|
||||
return False
|
||||
|
||||
identity_keys = ids.identity.IDSIdentity.decode(self.KEY_CACHE[sender_token][0])
|
||||
sender_ec_key = ids._helpers.parse_key(identity_keys.signing_public_key)
|
||||
|
||||
payload = iMessageUser._parse_payload(payload)
|
||||
|
||||
try:
|
||||
# Verify the signature (will throw an exception if it fails)
|
||||
sender_ec_key.verify(
|
||||
payload[1],
|
||||
payload[0],
|
||||
ec.ECDSA(hashes.SHA1()),
|
||||
)
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
|
||||
def receive(self) -> Union[iMessage, None]:
|
||||
"""
|
||||
Will return the next iMessage in the queue, or None if there are no messages
|
||||
"""
|
||||
raw = self._get_raw_message()
|
||||
if raw is None:
|
||||
return None
|
||||
body = apns._get_field(raw[1], 3)
|
||||
body = plistlib.loads(body)
|
||||
#print(f"Got body message {body}")
|
||||
payload = body["P"]
|
||||
|
||||
if not self._verify_payload(payload, body['sP'], body["t"]):
|
||||
raise Exception("Failed to verify payload")
|
||||
|
||||
decrypted = self._decrypt_payload(payload)
|
||||
|
||||
return iMessage.from_raw(decrypted, body['sP'])
|
||||
|
||||
KEY_CACHE_HANDLE: str = ""
|
||||
KEY_CACHE: dict[bytes, tuple[bytes, bytes]] = {}
|
||||
"""Mapping of push token : (public key, session token)"""
|
||||
USER_CACHE: dict[str, list[bytes]] = {}
|
||||
"""Mapping of handle : [push tokens]"""
|
||||
|
||||
def _cache_keys(self, participants: list[str]):
|
||||
# Clear the cache if the handle has changed
|
||||
if self.KEY_CACHE_HANDLE != self.user.current_handle:
|
||||
self.KEY_CACHE_HANDLE = self.user.current_handle
|
||||
self.KEY_CACHE = {}
|
||||
self.USER_CACHE = {}
|
||||
|
||||
# Check to see if we have cached the keys for all of the participants
|
||||
if all([p in self.USER_CACHE for p in participants]):
|
||||
return
|
||||
|
||||
# Look up the public keys for the participants, and cache a token : public key mapping
|
||||
lookup = self.user.lookup(participants)
|
||||
|
||||
for key, participant in lookup.items():
|
||||
if not key in self.USER_CACHE:
|
||||
self.USER_CACHE[key] = []
|
||||
|
||||
for identity in participant["identities"]:
|
||||
if not "client-data" in identity:
|
||||
continue
|
||||
if not "public-message-identity-key" in identity["client-data"]:
|
||||
continue
|
||||
if not "push-token" in identity:
|
||||
continue
|
||||
if not "session-token" in identity:
|
||||
continue
|
||||
|
||||
self.USER_CACHE[key].append(identity["push-token"])
|
||||
|
||||
# print(identity)
|
||||
|
||||
self.KEY_CACHE[identity["push-token"]] = (
|
||||
identity["client-data"]["public-message-identity-key"],
|
||||
identity["session-token"],
|
||||
)
|
||||
|
||||
def send(self, message: iMessage):
|
||||
# Set the sender, if it isn't already
|
||||
if message.sender is None:
|
||||
message.sender = self.user.handles[0] # TODO : Which handle to use?
|
||||
|
||||
message.sanity_check() # Sanity check MUST be called before caching keys, so that the sender is added to the list of participants
|
||||
self._cache_keys(message.participants)
|
||||
|
||||
# Turn the message into a raw message
|
||||
raw = message.to_raw()
|
||||
import base64
|
||||
|
||||
bundled_payloads = []
|
||||
for participant in message.participants:
|
||||
participant = participant.lower()
|
||||
for push_token in self.USER_CACHE[participant]:
|
||||
if push_token == self.connection.token:
|
||||
continue # Don't send to ourselves
|
||||
|
||||
identity_keys = ids.identity.IDSIdentity.decode(
|
||||
self.KEY_CACHE[push_token][0]
|
||||
)
|
||||
payload = self._encrypt_sign_payload(identity_keys, raw)
|
||||
|
||||
bundled_payloads.append(
|
||||
{
|
||||
"tP": participant,
|
||||
"D": not participant
|
||||
== message.sender, # TODO: Should this be false sometimes? For self messages?
|
||||
"sT": self.KEY_CACHE[push_token][1],
|
||||
"P": payload,
|
||||
"t": push_token,
|
||||
}
|
||||
)
|
||||
|
||||
msg_id = random.randbytes(4)
|
||||
body = {
|
||||
"fcn": 1,
|
||||
"c": 100,
|
||||
"E": "pair",
|
||||
"ua": "[macOS,13.4.1,22F82,MacBookPro18,3]",
|
||||
"v": 8,
|
||||
"i": int.from_bytes(msg_id, "big"),
|
||||
"U": message.id.bytes,
|
||||
"dtl": bundled_payloads,
|
||||
"sP": message.sender,
|
||||
}
|
||||
|
||||
body = plistlib.dumps(body, fmt=plistlib.FMT_BINARY)
|
||||
|
||||
self.connection.send_message("com.apple.madrid", body, msg_id)
|
||||
|
||||
# This code can check to make sure we got a success response, but waiting for the response is annoying,
|
||||
# so for now we just YOLO it and assume it worked
|
||||
|
||||
# def check_response(x):
|
||||
# if x[0] != 0x0A:
|
||||
# return False
|
||||
# if apns._get_field(x[1], 2) != sha1("com.apple.madrid".encode()).digest():
|
||||
# return False
|
||||
# resp_body = apns._get_field(x[1], 3)
|
||||
# if resp_body is None:
|
||||
# return False
|
||||
# resp_body = plistlib.loads(resp_body)
|
||||
# if "c" not in resp_body or resp_body["c"] != 255:
|
||||
# return False
|
||||
# return True
|
||||
|
||||
|
||||
# num_recv = 0
|
||||
# while True:
|
||||
# if num_recv == len(bundled_payloads):
|
||||
# break
|
||||
# payload = self.connection.incoming_queue.wait_pop_find(check_response)
|
||||
# if payload is None:
|
||||
# continue
|
||||
|
||||
# resp_body = apns._get_field(payload[1], 3)
|
||||
# resp_body = plistlib.loads(resp_body)
|
||||
# logger.error(resp_body)
|
||||
# num_recv += 1
|
37
pyproject.toml
Normal file
37
pyproject.toml
Normal file
@ -0,0 +1,37 @@
|
||||
[build-system]
|
||||
requires = ["setuptools", "setuptools_scm"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "pypush"
|
||||
dynamic = ["version"]
|
||||
dependencies = [
|
||||
"anyio",
|
||||
"httpx",
|
||||
"cryptography",
|
||||
"typing-extensions",
|
||||
"exceptiongroup",
|
||||
'importlib_metadata; python_version>="3.9"',
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
pypush = "pypush.cli:main"
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
"pytest",
|
||||
"pytest-asyncio",
|
||||
]
|
||||
cli = [
|
||||
"frida",
|
||||
"rich",
|
||||
"typer"
|
||||
]
|
||||
|
||||
[tool.setuptools_scm]
|
||||
version_file = "pypush/_version.py"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "6.0"
|
||||
addopts = ["-ra", "-q"]
|
||||
testpaths = ["tests"]
|
5
pypush/apns/__init__.py
Normal file
5
pypush/apns/__init__.py
Normal file
@ -0,0 +1,5 @@
|
||||
__all__ = ["protocol", "create_apns_connection", "activate"]
|
||||
|
||||
from . import protocol
|
||||
from .lifecycle import create_apns_connection
|
||||
from .albert import activate
|
142
pypush/apns/_protocol.py
Normal file
142
pypush/apns/_protocol.py
Normal file
@ -0,0 +1,142 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import MISSING, field
|
||||
from dataclasses import fields as dataclass_fields
|
||||
from typing import Any, TypeVar, get_origin, get_args, Union
|
||||
|
||||
from pypush.apns.transport import Packet
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
def command(cls: T) -> T:
|
||||
"""
|
||||
Automatically add from_packet and to_packet methods to a dataclass
|
||||
"""
|
||||
|
||||
def from_packet(cls, packet: Packet):
|
||||
assert packet.id == cls.PacketType
|
||||
field_values = {}
|
||||
for current_field in dataclass_fields(cls):
|
||||
if (
|
||||
current_field.metadata is None
|
||||
or "packet_id" not in current_field.metadata
|
||||
):
|
||||
# This isn't meant for us, just skip it
|
||||
continue
|
||||
|
||||
packet_value = packet.fields_for_id(current_field.metadata["packet_id"])
|
||||
|
||||
current_field_type = current_field.type
|
||||
|
||||
if get_origin(current_field_type) is Union and type(None) in get_args(
|
||||
current_field_type
|
||||
): # Optional
|
||||
if not packet_value:
|
||||
field_values[current_field.name] = None
|
||||
continue
|
||||
current_field_type = get_args(current_field.type)[0]
|
||||
else:
|
||||
# If the field is not optional, it must be present
|
||||
if not packet_value:
|
||||
raise ValueError(
|
||||
f"Field with packet ID {current_field.metadata['packet_id']} not found in packet"
|
||||
)
|
||||
|
||||
if get_origin(current_field_type) is list:
|
||||
assert get_args(current_field_type) == (bytes,)
|
||||
field_values[current_field.name] = packet_value
|
||||
else:
|
||||
# If it's not supposed to be a list, assume that there is only 1 field with this ID
|
||||
assert len(packet_value) == 1
|
||||
packet_value = packet_value[0]
|
||||
|
||||
if current_field_type == int:
|
||||
assert len(packet_value) == current_field.metadata["packet_bytes"]
|
||||
field_values[current_field.name] = int.from_bytes(
|
||||
packet_value, "big"
|
||||
)
|
||||
elif current_field_type == str:
|
||||
field_values[current_field.name] = packet_value.decode()
|
||||
elif current_field_type == bytes:
|
||||
field_values[current_field.name] = packet_value
|
||||
else:
|
||||
raise TypeError(
|
||||
f"Unsupported field type: {repr(current_field_type)} for field '{current_field.name}' in {cls.__name__}"
|
||||
)
|
||||
|
||||
# Check for extra fields
|
||||
for field in packet.fields:
|
||||
if field.id not in [
|
||||
f.metadata["packet_id"]
|
||||
for f in dataclass_fields(cls)
|
||||
if f.metadata is not None and "packet_id" in f.metadata
|
||||
]:
|
||||
logging.warning(
|
||||
f"Unexpected field with packet ID {field.id} in packet {packet}"
|
||||
)
|
||||
return cls(**field_values)
|
||||
|
||||
def to_packet(self) -> Packet:
|
||||
packet_fields = []
|
||||
for f in dataclass_fields(self):
|
||||
if f.metadata is None or "packet_id" not in f.metadata:
|
||||
continue
|
||||
value = getattr(self, f.name)
|
||||
if isinstance(value, int):
|
||||
packet_value = value.to_bytes(f.metadata["packet_bytes"], "big")
|
||||
elif isinstance(value, str):
|
||||
packet_value = value.encode()
|
||||
elif isinstance(value, bytes):
|
||||
packet_value = value
|
||||
elif value is None:
|
||||
continue
|
||||
elif isinstance(value, list):
|
||||
for v in value:
|
||||
packet_fields.append(
|
||||
Packet.Field(id=f.metadata["packet_id"], value=v)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
raise TypeError(f"Unsupported field type: {f.type}")
|
||||
packet_fields.append(
|
||||
Packet.Field(id=f.metadata["packet_id"], value=packet_value)
|
||||
)
|
||||
return Packet(id=self.PacketType, fields=packet_fields)
|
||||
|
||||
setattr(cls, "from_packet", classmethod(from_packet))
|
||||
setattr(cls, "to_packet", to_packet)
|
||||
return cls
|
||||
|
||||
|
||||
def fid(
|
||||
packet_id: int,
|
||||
byte_len: int = 1,
|
||||
default: Any = MISSING,
|
||||
default_factory: Any = MISSING,
|
||||
repr: bool = True,
|
||||
):
|
||||
"""
|
||||
:param packet_id: The packet ID of the field
|
||||
:param byte_len: The length of the field in bytes (for int fields)
|
||||
:param default: The default value of the field
|
||||
"""
|
||||
if not default == MISSING and not default_factory == MISSING:
|
||||
raise ValueError("Cannot specify both default and default_factory")
|
||||
if not default == MISSING:
|
||||
return field(
|
||||
metadata={"packet_id": packet_id, "packet_bytes": byte_len},
|
||||
default=default,
|
||||
repr=repr,
|
||||
)
|
||||
if not default_factory == MISSING:
|
||||
return field(
|
||||
metadata={"packet_id": packet_id, "packet_bytes": byte_len},
|
||||
default_factory=default_factory,
|
||||
repr=repr,
|
||||
)
|
||||
else:
|
||||
return field(
|
||||
metadata={"packet_id": packet_id, "packet_bytes": byte_len}, repr=repr
|
||||
)
|
45
pypush/apns/_util.py
Normal file
45
pypush/apns/_util.py
Normal file
@ -0,0 +1,45 @@
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Generic, TypeVar
|
||||
|
||||
import anyio
|
||||
from anyio.abc import ObjectSendStream
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class BroadcastStream(Generic[T]):
|
||||
def __init__(self):
|
||||
self.streams: list[ObjectSendStream[T]] = []
|
||||
|
||||
async def broadcast(self, packet):
|
||||
for stream in self.streams:
|
||||
try:
|
||||
await stream.send(packet)
|
||||
except anyio.BrokenResourceError:
|
||||
self.streams.remove(stream)
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_stream(self):
|
||||
send, recv = anyio.create_memory_object_stream[T]()
|
||||
self.streams.append(send)
|
||||
async with recv:
|
||||
yield recv
|
||||
self.streams.remove(send)
|
||||
await send.aclose()
|
||||
|
||||
|
||||
def exponential_backoff(f):
|
||||
async def wrapper(*args, **kwargs):
|
||||
backoff = 1
|
||||
while True:
|
||||
try:
|
||||
return await f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logging.warning(
|
||||
f"Error in {f.__name__}: {e}, retrying in {backoff} seconds"
|
||||
)
|
||||
await anyio.sleep(backoff)
|
||||
backoff *= 2
|
||||
|
||||
return wrapper
|
@ -1,20 +1,18 @@
|
||||
__all__ = ["activate"]
|
||||
|
||||
import plistlib
|
||||
import re
|
||||
import uuid
|
||||
from base64 import b64decode, b64encode
|
||||
from base64 import b64decode
|
||||
from typing import Tuple, Optional
|
||||
|
||||
import requests
|
||||
import httpx
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||
from cryptography.x509.oid import NameOID
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger("albert")
|
||||
|
||||
# These keys are from https://github.com/MiUnlockCode/albertsimlockapple/blob/main/ALBERTBUGBYMIUNLOCK.php, which is licensed under the MIT license
|
||||
# If this becomes a problem, I know how to generate new keys
|
||||
FAIRPLAY_PRIVATE_KEY = b64decode(
|
||||
"LS0tLS1CRUdJTiBSU0EgUFJJVkFURSBLRVktLS0tLQpNSUlDV3dJQkFBS0JnUUMzQktyTFBJQmFiaHByKzRTdnVRSG5iRjBzc3FSSVE2Ny8xYlRmQXJWdVVGNnA5c2RjdjcwTityOHlGeGVzRG1wVG1LaXRMUDA2c3pLTkFPMWs1SlZrOS9QMWVqejA4Qk1lOWVBYjRqdUFoVldkZkFJeWFKN3NHRmplU0wwMTVtQXZyeFRGY09NMTBGL3FTbEFSQmljY3hIalBYdHVXVnIwZkxHcmhNKy9BTVFJREFRQUJBb0dBQ0dXM2JISFBOZGI5Y1Z6dC9wNFBmMDNTakoxNXVqTVkwWFk5d1VtL2gxczZyTE84Ky8xME1ETUVHTWxFZGNtSGlXUmt3T1ZpalJIeHpOUnhFQU1JODdBcnVvZmhqZGRiTlZMdDZwcFcybkxDSzdjRURRSkZhaFRXOUdRRnpwVlJRWFhmeHI0Y3MxWDNrdXRsQjZ1WTJWR2x0eFFGWXNqNWRqdjdEK0E3MkEwQ1FRRFpqMVJHZHhiZU9vNFh6eGZBNm40MkdwWmF2VGxNM1F6R0ZvQkpnQ3FxVnUxSlFPem9vQU1SVCtOUGZnb0U4K3VzSVZWQjRJbzBiQ1VUV0xwa0V5dFRBa0VBMTFyeklwR0loRmtQdE5jLzMzZnZCRmd3VWJzalRzMVY1RzZ6NWx5L1huRzlFTmZMYmxnRW9iTG1TbXozaXJ2QlJXQURpd1V4NXpZNkZOL0RtdGk1NndKQWRpU2Nha3VmY255dnp3UVo3UndwLzYxK2VyWUpHTkZ0YjJDbXQ4Tk82QU9laGNvcEhNWlFCQ1d5MWVjbS83dUovb1ozYXZmSmRXQkkzZkd2L2twZW13SkFHTVh5b0RCanB1M2oyNmJEUno2eHRTczc2N3IrVmN0VExTTDYrTzRFYWFYbDNQRW1DcngvVSthVGpVNDVyN0RuaThaK3dkaElKRlBkbkpjZEZrd0dId0pBUFErd1ZxUmpjNGgzSHd1OEk2bGxrOXdocEs5TzcwRkxvMUZNVmRheXRFbE15cXpRMi8wNWZNYjdGNnlhV2h1K1EyR0dYdmRsVVJpQTN0WTBDc2ZNMHc9PQotLS0tLUVORCBSU0EgUFJJVkFURSBLRVktLS0tLQ=="
|
||||
)
|
||||
@ -23,7 +21,7 @@ FAIRPLAY_CERT_CHAIN = b64decode(
|
||||
)
|
||||
|
||||
|
||||
def _generate_csr(private_key: rsa.RSAPrivateKey) -> str:
|
||||
def _generate_csr(private_key: rsa.RSAPrivateKey, name: str = str(uuid.uuid4())) -> str:
|
||||
csr = (
|
||||
x509.CertificateSigningRequestBuilder()
|
||||
.subject_name(
|
||||
@ -34,7 +32,7 @@ def _generate_csr(private_key: rsa.RSAPrivateKey) -> str:
|
||||
x509.NameAttribute(NameOID.LOCALITY_NAME, "Cupertino"),
|
||||
x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Apple Inc."),
|
||||
x509.NameAttribute(NameOID.ORGANIZATIONAL_UNIT_NAME, "iPhone"),
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, str(uuid.uuid4())),
|
||||
x509.NameAttribute(NameOID.COMMON_NAME, name),
|
||||
]
|
||||
)
|
||||
)
|
||||
@ -43,71 +41,71 @@ def _generate_csr(private_key: rsa.RSAPrivateKey) -> str:
|
||||
return csr.public_bytes(serialization.Encoding.PEM).decode("utf-8")
|
||||
|
||||
|
||||
# Generates an APNs push certificate by talking to Albert
|
||||
# Returns [private key PEM, certificate PEM]
|
||||
def generate_push_cert() -> tuple[str, str]:
|
||||
async def activate(
|
||||
http_client: Optional[httpx.AsyncClient] = None,
|
||||
device_class: str = "Windows",
|
||||
udid: str = str(uuid.uuid4()),
|
||||
serial: str = "WindowSerial",
|
||||
version: str = "10.6.4",
|
||||
build: str = "10.6.4",
|
||||
model: str = "windows1,1",
|
||||
) -> Tuple[x509.Certificate, rsa.RSAPrivateKey]:
|
||||
if http_client is None:
|
||||
# Do this here to ensure the client is not accidentally reused during tests
|
||||
http_client = httpx.AsyncClient()
|
||||
|
||||
private_key = rsa.generate_private_key(
|
||||
public_exponent=65537, key_size=2048, backend=default_backend()
|
||||
public_exponent=65537, key_size=1024, backend=default_backend()
|
||||
)
|
||||
csr = _generate_csr(private_key)
|
||||
|
||||
activation_info = {
|
||||
"ActivationRandomness": str(uuid.uuid4()),
|
||||
"ActivationState": "Unactivated",
|
||||
"BuildVersion": "10.6.4",
|
||||
"DeviceCertRequest": csr.encode("utf-8"),
|
||||
"DeviceClass": "Windows",
|
||||
"ProductType": "windows1,1",
|
||||
"ProductVersion": "10.6.4",
|
||||
"SerialNumber": "WindowSerial",
|
||||
"UniqueDeviceID": str(uuid.uuid4()),
|
||||
}
|
||||
activation_info = plistlib.dumps(
|
||||
{
|
||||
"ActivationRandomness": str(uuid.uuid4()),
|
||||
"ActivationState": "Unactivated",
|
||||
"BuildVersion": build,
|
||||
"DeviceCertRequest": csr.encode("utf-8"),
|
||||
"DeviceClass": device_class,
|
||||
"ProductType": model,
|
||||
"ProductVersion": version,
|
||||
"SerialNumber": serial,
|
||||
"UniqueDeviceID": udid,
|
||||
}
|
||||
)
|
||||
|
||||
logger.debug(f"Generated activation info (with UUID: {activation_info['UniqueDeviceID']})")
|
||||
|
||||
activation_info = plistlib.dumps(activation_info)
|
||||
|
||||
# Load the private key
|
||||
fairplay_key = serialization.load_pem_private_key(
|
||||
FAIRPLAY_PRIVATE_KEY, password=None, backend=default_backend()
|
||||
)
|
||||
assert isinstance(fairplay_key, rsa.RSAPrivateKey)
|
||||
|
||||
# Sign the activation info
|
||||
signature = fairplay_key.sign(activation_info, padding.PKCS1v15(), hashes.SHA1()) # type: ignore
|
||||
signature = fairplay_key.sign(activation_info, padding.PKCS1v15(), hashes.SHA1())
|
||||
|
||||
body = {
|
||||
"ActivationInfoComplete": True,
|
||||
"ActivationInfoXML": activation_info,
|
||||
"FairPlayCertChain": FAIRPLAY_CERT_CHAIN,
|
||||
"FairPlaySignature": signature,
|
||||
}
|
||||
|
||||
resp = requests.post(
|
||||
"https://albert.apple.com/WebObjects/ALUnbrick.woa/wa/deviceActivation?device=Windows",
|
||||
data={"activation-info": plistlib.dumps(body)},
|
||||
verify=False,
|
||||
resp = await http_client.post(
|
||||
f"https://albert.apple.com/deviceservices/deviceActivation?device={device_class}",
|
||||
data={
|
||||
"activation-info": plistlib.dumps(
|
||||
{
|
||||
"ActivationInfoComplete": True,
|
||||
"ActivationInfoXML": activation_info,
|
||||
"FairPlayCertChain": FAIRPLAY_CERT_CHAIN,
|
||||
"FairPlaySignature": signature,
|
||||
}
|
||||
).decode()
|
||||
},
|
||||
)
|
||||
|
||||
protocol = re.search("<Protocol>(.*)</Protocol>", resp.text).group(1) # type: ignore
|
||||
try:
|
||||
protocol = re.search("<Protocol>(.*)</Protocol>", resp.text).group(1) # type: ignore
|
||||
except AttributeError:
|
||||
# Search for error text between <b> and </b>
|
||||
error = re.search("<b>(.*)</b>", resp.text).group(1) # type: ignore
|
||||
raise Exception(f"Failed to get certificate from Albert: {error}")
|
||||
|
||||
protocol = plistlib.loads(protocol.encode("utf-8"))
|
||||
|
||||
logger.debug("Recieved push certificate from Albert")
|
||||
|
||||
return (
|
||||
private_key.private_bytes(
|
||||
encoding=serialization.Encoding.PEM,
|
||||
format=serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
.decode("utf-8")
|
||||
.strip(),
|
||||
protocol["device-activation"]["activation-record"]["DeviceCertificate"]
|
||||
.decode("utf-8")
|
||||
.strip(),
|
||||
x509.load_pem_x509_certificate(
|
||||
protocol["device-activation"]["activation-record"]["DeviceCertificate"]
|
||||
),
|
||||
private_key,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
private_key, cert = generate_push_cert()
|
||||
print(private_key)
|
||||
print(cert)
|
141
pypush/apns/lifecycle.py
Normal file
141
pypush/apns/lifecycle.py
Normal file
@ -0,0 +1,141 @@
|
||||
# Lifecycle management, reconnection, etc
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import random
|
||||
import time
|
||||
import typing
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
import anyio
|
||||
from anyio.abc import TaskGroup
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import padding, rsa
|
||||
|
||||
from . import protocol, transport, _util
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def create_apns_connection(
|
||||
certificate: x509.Certificate,
|
||||
private_key: rsa.RSAPrivateKey,
|
||||
token: typing.Optional[bytes] = None,
|
||||
courier: typing.Optional[str] = None,
|
||||
):
|
||||
async with anyio.create_task_group() as tg:
|
||||
conn = Connection(tg, certificate, private_key, token, courier)
|
||||
yield conn
|
||||
tg.cancel_scope.cancel() # Cancel the task group when the context manager exits
|
||||
await conn.aclose() # Make sure to close the connection after the task group is cancelled
|
||||
|
||||
|
||||
class Connection:
|
||||
def __init__(
|
||||
self,
|
||||
task_group: TaskGroup,
|
||||
certificate: x509.Certificate,
|
||||
private_key: rsa.RSAPrivateKey,
|
||||
token: typing.Optional[bytes] = None,
|
||||
courier: typing.Optional[str] = None,
|
||||
):
|
||||
|
||||
self.certificate = certificate
|
||||
self.private_key = private_key
|
||||
self.base_token = token
|
||||
|
||||
self._conn = None
|
||||
self._tg = task_group
|
||||
self._broadcast = _util.BroadcastStream[protocol.Command]()
|
||||
self._reconnect_lock = anyio.Lock()
|
||||
|
||||
if courier is None:
|
||||
# Pick a random courier server from 1 to 50
|
||||
courier = f"{random.randint(1, 50)}-courier.push.apple.com"
|
||||
self.courier = courier
|
||||
|
||||
self._tg.start_soon(self.reconnect)
|
||||
self._tg.start_soon(self._ping_task)
|
||||
|
||||
async def _receive_task(self):
|
||||
assert self._conn is not None
|
||||
async for command in self._conn:
|
||||
logging.debug(f"Received command: {command}")
|
||||
await self._broadcast.broadcast(command)
|
||||
logging.warning("Receive task ended")
|
||||
|
||||
async def _ping_task(self):
|
||||
while True:
|
||||
await anyio.sleep(30)
|
||||
logging.debug("Sending keepalive")
|
||||
await self.send(protocol.KeepAliveCommand())
|
||||
await self.receive(protocol.KeepAliveAck)
|
||||
|
||||
@_util.exponential_backoff
|
||||
async def reconnect(self):
|
||||
async with self._reconnect_lock: # Prevent weird situations where multiple reconnects are happening at once
|
||||
if self._conn is not None:
|
||||
logging.warning("Closing existing connection")
|
||||
await self._conn.aclose()
|
||||
self._conn = protocol.CommandStream(
|
||||
await transport.create_courier_connection(courier=self.courier)
|
||||
)
|
||||
cert = self.certificate.public_bytes(serialization.Encoding.DER)
|
||||
nonce = (
|
||||
b"\x00"
|
||||
+ int(time.time() * 1000).to_bytes(8, "big")
|
||||
+ random.randbytes(8)
|
||||
)
|
||||
signature = b"\x01\x01" + self.private_key.sign(
|
||||
nonce, padding.PKCS1v15(), hashes.SHA1()
|
||||
)
|
||||
await self._conn.send(
|
||||
protocol.ConnectCommand(
|
||||
push_token=self.base_token,
|
||||
state=1,
|
||||
flags=69,
|
||||
certificate=cert,
|
||||
nonce=nonce,
|
||||
signature=signature,
|
||||
)
|
||||
)
|
||||
self._tg.start_soon(self._receive_task)
|
||||
ack = await self.receive(protocol.ConnectAck)
|
||||
logging.debug(f"Connected with ack: {ack}")
|
||||
assert ack.status == 0
|
||||
if self.base_token is None:
|
||||
self.base_token = ack.token
|
||||
else:
|
||||
assert ack.token == self.base_token
|
||||
|
||||
async def aclose(self):
|
||||
if self._conn is not None:
|
||||
await self._conn.aclose()
|
||||
# Note: Will be reopened if task group is still running and ping task is still running
|
||||
|
||||
T = typing.TypeVar("T", bound=protocol.Command)
|
||||
|
||||
async def receive_stream(
|
||||
self, filter: typing.Type[T], max: int = -1
|
||||
) -> typing.AsyncIterator[T]:
|
||||
async with self._broadcast.open_stream() as stream:
|
||||
async for command in stream:
|
||||
if isinstance(command, filter):
|
||||
yield command
|
||||
max -= 1
|
||||
if max == 0:
|
||||
break
|
||||
|
||||
async def receive(self, filter: typing.Type[T]) -> T:
|
||||
async for command in self.receive_stream(filter, 1):
|
||||
return command
|
||||
raise ValueError("No matching command received")
|
||||
|
||||
async def send(self, command: protocol.Command):
|
||||
try:
|
||||
assert self._conn is not None
|
||||
await self._conn.send(command)
|
||||
except Exception as e:
|
||||
logging.warning(f"Error sending command, reconnecting")
|
||||
await self.reconnect()
|
||||
await self.send(command)
|
285
pypush/apns/protocol.py
Normal file
285
pypush/apns/protocol.py
Normal file
File diff suppressed because one or more lines are too long
132
pypush/apns/transport.py
Normal file
132
pypush/apns/transport.py
Normal file
@ -0,0 +1,132 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import ssl
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
|
||||
import anyio
|
||||
from anyio.abc import ByteStream, ObjectStream
|
||||
|
||||
# Pick a random courier server from 01 to APNSCourierHostcount
|
||||
COURIER_PORT = 5223
|
||||
ALPN = ["apns-security-v3"]
|
||||
|
||||
# Manages TLS connection to courier, parses into raw packets
|
||||
|
||||
|
||||
@dataclass
|
||||
class Packet:
|
||||
@dataclass
|
||||
class Field:
|
||||
id: int
|
||||
value: bytes
|
||||
|
||||
class Type(Enum):
|
||||
Connect = 7
|
||||
ConnectAck = 8
|
||||
FilterTopics = 9
|
||||
SendMessage = 10
|
||||
SendMessageAck = 11
|
||||
KeepAlive = 12
|
||||
KeepAliveAck = 13
|
||||
NoStorage = 14
|
||||
SetState = 20
|
||||
UNKNOWN = "Unknown"
|
||||
|
||||
def __new__(cls, value):
|
||||
# Create a new instance of Enum
|
||||
obj = object.__new__(cls)
|
||||
obj._value_ = value
|
||||
return obj
|
||||
|
||||
@classmethod
|
||||
def _missing_(cls, value):
|
||||
# Handle unknown values
|
||||
instance = cls.UNKNOWN
|
||||
instance._value_ = value # Assign the unknown value
|
||||
return instance
|
||||
|
||||
def __str__(self):
|
||||
if self is Packet.Type.UNKNOWN:
|
||||
return f"Unknown({self._value_})"
|
||||
return self.name
|
||||
|
||||
|
||||
id: Type
|
||||
fields: list[Field]
|
||||
|
||||
def fields_for_id(self, id: int) -> list[bytes]:
|
||||
return [field.value for field in self.fields if field.id == id]
|
||||
|
||||
|
||||
async def create_courier_connection(
|
||||
courier: str = "1-courier.push.apple.com",
|
||||
) -> PacketStream:
|
||||
context = ssl.create_default_context()
|
||||
context.set_alpn_protocols(ALPN)
|
||||
|
||||
# TODO: Verify courier certificate
|
||||
context.check_hostname = False
|
||||
context.verify_mode = ssl.CERT_NONE
|
||||
|
||||
return PacketStream(
|
||||
await anyio.connect_tcp(
|
||||
courier, COURIER_PORT, ssl_context=context, tls_standard_compatible=False
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def receive_exact(stream: ByteStream, length: int) -> bytes:
|
||||
buffer = b""
|
||||
while len(buffer) < length:
|
||||
buffer += await stream.receive(length - len(buffer))
|
||||
return buffer
|
||||
|
||||
|
||||
@dataclass
|
||||
class PacketStream(ObjectStream[Packet]):
|
||||
transport_stream: ByteStream
|
||||
|
||||
def _serialize_field(self, field: Packet.Field) -> bytes:
|
||||
return (
|
||||
field.id.to_bytes(1, "big")
|
||||
+ len(field.value).to_bytes(2, "big")
|
||||
+ field.value
|
||||
)
|
||||
|
||||
def _serialize_packet(self, packet: Packet) -> bytes:
|
||||
payload = b""
|
||||
for field in packet.fields:
|
||||
payload += self._serialize_field(field)
|
||||
return (
|
||||
packet.id.value.to_bytes(1, "big")
|
||||
+ len(payload).to_bytes(4, "big")
|
||||
+ payload
|
||||
)
|
||||
|
||||
async def send(self, item: Packet) -> None:
|
||||
await self.transport_stream.send(self._serialize_packet(item))
|
||||
|
||||
async def receive(self) -> Packet:
|
||||
packet_id = int.from_bytes(await receive_exact(self.transport_stream, 1), "big")
|
||||
packet_length = int.from_bytes(
|
||||
await receive_exact(self.transport_stream, 4), "big"
|
||||
)
|
||||
if packet_length == 0:
|
||||
return Packet(Packet.Type(packet_id), [])
|
||||
payload = await receive_exact(self.transport_stream, packet_length)
|
||||
assert len(payload) == packet_length
|
||||
fields = []
|
||||
while len(payload) > 0:
|
||||
field_id = int.from_bytes(payload[:1], "big")
|
||||
field_length = int.from_bytes(payload[1:3], "big")
|
||||
field_value = payload[3 : 3 + field_length]
|
||||
fields.append(Packet.Field(field_id, field_value))
|
||||
payload = payload[3 + field_length :]
|
||||
return Packet(Packet.Type(packet_id), fields)
|
||||
|
||||
async def aclose(self) -> None:
|
||||
await self.transport_stream.aclose()
|
||||
|
||||
async def send_eof(self) -> None:
|
||||
await self.transport_stream.send_eof()
|
48
pypush/cli/__init__.py
Normal file
48
pypush/cli/__init__.py
Normal file
@ -0,0 +1,48 @@
|
||||
import logging
|
||||
|
||||
import typer
|
||||
from rich.logging import RichHandler
|
||||
from typing_extensions import Annotated
|
||||
|
||||
from . import proxy as _proxy
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG, handlers=[RichHandler()], format="%(message)s")
|
||||
|
||||
app = typer.Typer()
|
||||
|
||||
|
||||
@app.command()
|
||||
def proxy(
|
||||
attach: Annotated[
|
||||
bool, typer.Option(help="Use Frida to attach to the running `apsd`")
|
||||
] = True,
|
||||
):
|
||||
"""
|
||||
Proxy APNs traffic between the local machine and the APNs courier
|
||||
|
||||
Attach requires SIP to be disabled and to be running as root
|
||||
"""
|
||||
|
||||
_proxy.main(attach)
|
||||
|
||||
|
||||
@app.command()
|
||||
def client(
|
||||
topic: Annotated[str, typer.Argument(help="app topic to listen on")],
|
||||
sandbox: Annotated[
|
||||
bool, typer.Option("--sandbox/--production", help="APNs courier to use")
|
||||
] = True,
|
||||
):
|
||||
"""
|
||||
Connect to the APNs courier and listen for app notifications on the given topic
|
||||
"""
|
||||
typer.echo("Running APNs client")
|
||||
raise NotImplementedError("Not implemented yet")
|
||||
|
||||
|
||||
def main():
|
||||
app()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
74
pypush/cli/_frida.py
Normal file
74
pypush/cli/_frida.py
Normal file
@ -0,0 +1,74 @@
|
||||
import frida
|
||||
import logging
|
||||
|
||||
|
||||
def attach_to_apsd() -> frida.core.Session:
|
||||
frida.kill("apsd")
|
||||
while True:
|
||||
try:
|
||||
return frida.attach("apsd")
|
||||
except frida.ProcessNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def redirect_courier(
|
||||
session: frida.core.Session, courier="courier.push.apple.com", to="localhost"
|
||||
):
|
||||
script = session.create_script(
|
||||
"""
|
||||
var getaddrinfo_handle = Module.findExportByName(null, 'getaddrinfo');
|
||||
if (getaddrinfo_handle) {
|
||||
Interceptor.attach(getaddrinfo_handle, {
|
||||
onEnter: function(args) {
|
||||
var node = Memory.readUtf8String(args[0]);
|
||||
var service = Memory.readUtf8String(args[1]);
|
||||
//this.res_ptr = args[3]
|
||||
//console.log('[*] getaddrinfo("' + node + '", "' + service + '", ...)');
|
||||
// Check for "courier.push.apple.com" in name
|
||||
if (node.indexOf('"""
|
||||
+ courier
|
||||
+ """') !== -1) {
|
||||
// Write "localhost" to the first argument
|
||||
Memory.writeUtf8String(args[0], '"""
|
||||
+ to
|
||||
+ """');
|
||||
console.log('getaddrinfo("' + node + '", ...) => getaddrinfo("localhost", ...)');
|
||||
} else {
|
||||
//console.log('getaddrinfo("' + node + '", ...)');
|
||||
}
|
||||
}
|
||||
});
|
||||
console.log('getaddrinfo() hook installed.');
|
||||
}
|
||||
"""
|
||||
)
|
||||
script.set_log_handler(lambda _, message: logging.debug(message))
|
||||
script.load()
|
||||
|
||||
|
||||
def trust_all_hosts(session: frida.core.Session):
|
||||
script = session.create_script(
|
||||
"""
|
||||
// Hook -[APSTCPStream isTrust:validWithPolicy:forPeer:] to always return true
|
||||
var isTrust_handle = ObjC.classes.APSTCPStream['- isTrust:validWithPolicy:forPeer:'];
|
||||
if (isTrust_handle) {
|
||||
Interceptor.attach(isTrust_handle.implementation, {
|
||||
onEnter: function(args) {
|
||||
console.log('-[APSTCPStream isTrust:validWithPolicy:forPeer:]');
|
||||
//console.log(' - isTrust: ' + args[2]);
|
||||
//console.log(' - validWithPolicy: ' + args[3]);
|
||||
//console.log(' - forPeer: ' + args[4]);
|
||||
//args[2] = true;
|
||||
//console.log(' => isTrust: ' + args[2]);
|
||||
},
|
||||
onLeave: function(retval) {
|
||||
console.log(' <= ' + retval);
|
||||
retval.replace(1);
|
||||
}
|
||||
});
|
||||
console.log('-[APSTCPStream isTrust:validWithPolicy:forPeer:] hook installed.');
|
||||
}
|
||||
"""
|
||||
)
|
||||
script.set_log_handler(lambda _, message: logging.debug(message))
|
||||
script.load()
|
166
pypush/cli/proxy.py
Normal file
166
pypush/cli/proxy.py
Normal file
@ -0,0 +1,166 @@
|
||||
import datetime
|
||||
import logging
|
||||
import ssl
|
||||
import tempfile
|
||||
from typing import Optional
|
||||
|
||||
import anyio
|
||||
import anyio.abc
|
||||
import anyio.to_thread
|
||||
from anyio.streams.tls import TLSListener, TLSStream
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import rsa
|
||||
from cryptography.hazmat.primitives.hashes import SHA256
|
||||
from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat
|
||||
|
||||
# from pypush import apns
|
||||
from pypush.apns import transport
|
||||
from pypush.apns import protocol
|
||||
|
||||
from . import _frida
|
||||
|
||||
|
||||
async def forward_packets(
|
||||
source: transport.PacketStream,
|
||||
dest: transport.PacketStream,
|
||||
name: str = "",
|
||||
):
|
||||
try:
|
||||
async for packet in source:
|
||||
try:
|
||||
command = protocol.command_from_packet(packet)
|
||||
if not isinstance(command, protocol.UnknownCommand):
|
||||
logging.info(f"{name} -> {command}")
|
||||
else:
|
||||
logging.warning(f"{name} -> {command}")
|
||||
except Exception as e:
|
||||
logging.error(f"Error parsing packet: {e}")
|
||||
logging.error(f"{name} => {packet}")
|
||||
await dest.send(packet)
|
||||
continue
|
||||
await dest.send(command.to_packet())
|
||||
logging.info(f"{name} -> EOF")
|
||||
except anyio.EndOfStream:
|
||||
logging.info(f"{name} -> EOS")
|
||||
except anyio.ClosedResourceError:
|
||||
logging.info(f"{name} -> Closed")
|
||||
except Exception as e:
|
||||
logging.error(f"Error forwarding packets: {e}")
|
||||
await dest.aclose() # close the other stream so that the other task exits cleanly
|
||||
|
||||
|
||||
connection_cnt = 0
|
||||
|
||||
|
||||
async def handle(client: TLSStream):
|
||||
global connection_cnt
|
||||
connection_cnt += 1
|
||||
|
||||
sni = client._ssl_object.server_name # type: ignore
|
||||
logging.debug(f"Got SNI: {sni}")
|
||||
sandbox = "sandbox" in sni
|
||||
|
||||
async with client:
|
||||
client_pkt = transport.PacketStream(client)
|
||||
logging.debug("Client connected")
|
||||
|
||||
forward = (
|
||||
"1-courier.push.apple.com"
|
||||
if not sandbox
|
||||
else "1-courier.sandbox.push.apple.com"
|
||||
)
|
||||
name = f"prod-{connection_cnt}" if not sandbox else f"sandbox-{connection_cnt}"
|
||||
async with await transport.create_courier_connection(forward) as conn:
|
||||
logging.debug("Connected to courier")
|
||||
async with anyio.create_task_group() as tg:
|
||||
tg.start_soon(forward_packets, client_pkt, conn, f"client-{name}")
|
||||
tg.start_soon(forward_packets, conn, client_pkt, f"server-{name}")
|
||||
logging.debug("Started forwarding")
|
||||
|
||||
logging.debug("Courier disconnected")
|
||||
|
||||
|
||||
def temp_certs():
|
||||
# Create a self-signed certificate for the server and write it to temporary files
|
||||
key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
|
||||
builder = x509.CertificateBuilder()
|
||||
builder = builder.subject_name(
|
||||
x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "localhost")])
|
||||
)
|
||||
builder = builder.issuer_name(
|
||||
x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "localhost")])
|
||||
)
|
||||
builder = builder.not_valid_before(datetime.datetime.utcnow())
|
||||
builder = builder.not_valid_after(
|
||||
datetime.datetime.utcnow() + datetime.timedelta(days=1)
|
||||
)
|
||||
builder = builder.serial_number(x509.random_serial_number())
|
||||
builder = builder.public_key(key.public_key())
|
||||
builder = builder.add_extension(
|
||||
x509.SubjectAlternativeName([x509.DNSName("localhost")]), critical=False
|
||||
)
|
||||
certificate = builder.sign(key, SHA256())
|
||||
|
||||
cert_path, key_path = tempfile.mktemp(), tempfile.mktemp()
|
||||
|
||||
with open(cert_path, "wb") as f:
|
||||
f.write(certificate.public_bytes(Encoding.PEM))
|
||||
with open(key_path, "wb") as f:
|
||||
f.write(
|
||||
key.private_bytes(
|
||||
Encoding.PEM,
|
||||
serialization.PrivateFormat.TraditionalOpenSSL,
|
||||
serialization.NoEncryption(),
|
||||
)
|
||||
)
|
||||
|
||||
return cert_path, key_path
|
||||
|
||||
|
||||
def sni_callback(conn, server_name, ssl_context):
|
||||
# Set the server name in the conn so we can use it later
|
||||
conn.server_name = server_name # type: ignore
|
||||
|
||||
|
||||
async def courier_proxy(host):
|
||||
# Start listening on localhost:COURIER_PORT
|
||||
listener = await anyio.create_tcp_listener(
|
||||
local_port=transport.COURIER_PORT, local_host=host
|
||||
)
|
||||
# Create an SSL context
|
||||
context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
context.set_alpn_protocols(["apns-security-v3"])
|
||||
context.load_cert_chain(*temp_certs())
|
||||
context.set_servername_callback(sni_callback)
|
||||
listener = TLSListener(listener, ssl_context=context, standard_compatible=False)
|
||||
logging.info(f"Listening on {host}:{transport.COURIER_PORT}")
|
||||
|
||||
await listener.serve(handle)
|
||||
|
||||
|
||||
async def ainput(prompt: str = "") -> str:
|
||||
print(prompt, end="")
|
||||
return await anyio.to_thread.run_sync(input)
|
||||
|
||||
|
||||
async def start(attach):
|
||||
async with anyio.create_task_group() as tg:
|
||||
tg.start_soon(courier_proxy, "localhost")
|
||||
if attach:
|
||||
try:
|
||||
apsd = _frida.attach_to_apsd()
|
||||
_frida.redirect_courier(apsd, "courier.push.apple.com", "localhost")
|
||||
_frida.redirect_courier(
|
||||
apsd, "courier.sandbox.push.apple.com", "localhost"
|
||||
)
|
||||
_frida.trust_all_hosts(apsd)
|
||||
except Exception as e:
|
||||
logging.error(f"Error attaching to apsd (did you run as root?): {e}")
|
||||
logging.info("Press Enter to exit...")
|
||||
await ainput()
|
||||
tg.cancel_scope.cancel()
|
||||
|
||||
|
||||
def main(attach):
|
||||
anyio.run(start, attach)
|
0
pypush/cli/pushclient.py
Normal file
0
pypush/cli/pushclient.py
Normal file
0
pypush/cloudkit/.gitkeep
Normal file
0
pypush/cloudkit/.gitkeep
Normal file
0
pypush/grandslam/.gitkeep
Normal file
0
pypush/grandslam/.gitkeep
Normal file
0
pypush/ids/.gitkeep
Normal file
0
pypush/ids/.gitkeep
Normal file
0
pypush/imessage/.gitkeep
Normal file
0
pypush/imessage/.gitkeep
Normal file
@ -1,9 +0,0 @@
|
||||
requests
|
||||
cryptography
|
||||
wheel
|
||||
tlslite-ng==0.8.0a43
|
||||
srp
|
||||
pbkdf2
|
||||
unicorn
|
||||
rich
|
||||
prompt_toolkit
|
42
tests/test_apns.py
Normal file
42
tests/test_apns.py
Normal file
@ -0,0 +1,42 @@
|
||||
import pytest
|
||||
from pypush import apns
|
||||
import asyncio
|
||||
|
||||
# from aioapns import *
|
||||
import uuid
|
||||
import anyio
|
||||
|
||||
# from pypush.apns import _util
|
||||
# from pypush.apns import albert, lifecycle, protocol
|
||||
from pypush import apns
|
||||
|
||||
import logging
|
||||
from rich.logging import RichHandler
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG, handlers=[RichHandler()], format="%(message)s")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_activate():
|
||||
global certificate, key
|
||||
certificate, key = await apns.activate()
|
||||
assert certificate is not None
|
||||
assert key is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_lifecycle_2():
|
||||
async with apns.create_apns_connection(
|
||||
certificate, key, courier="localhost"
|
||||
) as connection:
|
||||
await connection.receive(
|
||||
apns.protocol.ConnectAck
|
||||
) # Just wait until the initial connection is established. Don't do this in real code plz.
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_shorthand():
|
||||
async with apns.create_apns_connection(
|
||||
*await apns.activate(), courier="localhost"
|
||||
) as connection:
|
||||
await connection.receive(apns.protocol.ConnectAck)
|
Loading…
Reference in New Issue
Block a user