Add iPad support, auto-pinning, and comprehensive logging
- Adaptive iPhone/iPad layout with NavigationSplitView sidebar - Auto-detect SSL-pinned domains, fall back to passthrough - Certificate install via local HTTP server (Safari profile flow) - App Group-backed CA, per-domain leaf cert LRU cache - DB-backed config repository, Darwin notification throttling - Rules engine, breakpoint rules, pinned domain tracking - os.Logger instrumentation across tunnel/proxy/mitm/capture/cert/rules/db/ipc/ui - Fix dyld framework embed, race conditions, thread safety
This commit is contained in:
@@ -4,110 +4,108 @@ import NIOPosix
|
||||
import NIOSSL
|
||||
import NIOHTTP1
|
||||
|
||||
/// After a CONNECT tunnel is established, this handler:
|
||||
/// 1. Reads the first bytes from the client to extract the SNI hostname from the TLS ClientHello
|
||||
/// 2. Generates a per-domain leaf certificate via CertificateManager
|
||||
/// 3. Terminates client-side TLS with the generated cert
|
||||
/// 4. Initiates server-side TLS to the real server
|
||||
/// 5. Installs HTTP codecs + HTTPCaptureHandler on both sides to capture decrypted traffic
|
||||
final class MITMHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
typealias InboundIn = ByteBuffer
|
||||
|
||||
private let host: String
|
||||
private let originalHost: String
|
||||
private let upstreamHost: String
|
||||
private let port: Int
|
||||
private let trafficRepo: TrafficRepository
|
||||
private let certManager: CertificateManager
|
||||
private let runtimeStatusRepo = RuntimeStatusRepository()
|
||||
|
||||
init(host: String, port: Int, trafficRepo: TrafficRepository, certManager: CertificateManager = .shared) {
|
||||
self.host = host
|
||||
init(
|
||||
originalHost: String,
|
||||
upstreamHost: String,
|
||||
port: Int,
|
||||
trafficRepo: TrafficRepository,
|
||||
certManager: CertificateManager = .shared
|
||||
) {
|
||||
self.originalHost = originalHost
|
||||
self.upstreamHost = upstreamHost
|
||||
self.port = port
|
||||
self.trafficRepo = trafficRepo
|
||||
self.certManager = certManager
|
||||
ProxyLogger.mitm.info("MITMHandler created original=\(originalHost) upstream=\(upstreamHost):\(port)")
|
||||
}
|
||||
|
||||
func channelRead(context: ChannelHandlerContext, data: NIOAny) {
|
||||
var buffer = unwrapInboundIn(data)
|
||||
let bufferSize = buffer.readableBytes
|
||||
|
||||
// Extract SNI from ClientHello if possible, otherwise use the CONNECT host
|
||||
let sniDomain = extractSNI(from: buffer) ?? host
|
||||
let sniDomain = extractSNI(from: buffer) ?? originalHost
|
||||
ProxyLogger.mitm.info("MITM ClientHello: \(bufferSize) bytes, SNI=\(sniDomain) (fallback host=\(self.originalHost))")
|
||||
|
||||
// Remove this handler — we'll rebuild the pipeline
|
||||
context.pipeline.removeHandler(self, promise: nil)
|
||||
|
||||
// Get TLS context for this domain
|
||||
let sslContext: NIOSSLContext
|
||||
do {
|
||||
sslContext = try certManager.tlsServerContext(for: sniDomain)
|
||||
ProxyLogger.mitm.info("MITM TLS context created for \(sniDomain)")
|
||||
} catch {
|
||||
print("[MITM] Failed to get TLS context for \(sniDomain): \(error)")
|
||||
ProxyLogger.mitm.error("MITM TLS context FAILED for \(sniDomain): \(error.localizedDescription)")
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "TLS context \(sniDomain): \(error.localizedDescription)"
|
||||
}
|
||||
context.close(promise: nil)
|
||||
return
|
||||
}
|
||||
|
||||
// Add server-side TLS handler (we are the "server" to the client)
|
||||
let sslServerHandler = NIOSSLServerHandler(context: sslContext)
|
||||
let trafficRepo = self.trafficRepo
|
||||
let host = self.host
|
||||
let originalHost = self.originalHost
|
||||
let upstreamHost = self.upstreamHost
|
||||
let port = self.port
|
||||
let runtimeStatusRepo = self.runtimeStatusRepo
|
||||
let tlsErrorHandler = TLSErrorLogger(label: "CLIENT-SIDE", domain: sniDomain, runtimeStatusRepo: runtimeStatusRepo)
|
||||
|
||||
context.channel.pipeline.addHandler(sslServerHandler, position: .first).flatMap {
|
||||
// Add HTTP codec after TLS
|
||||
// Add TLS error logger right after the SSL handler to catch handshake failures
|
||||
context.channel.pipeline.addHandler(tlsErrorHandler)
|
||||
}.flatMap {
|
||||
context.channel.pipeline.addHandler(ByteToMessageHandler(HTTPRequestDecoder()))
|
||||
}.flatMap {
|
||||
context.channel.pipeline.addHandler(HTTPResponseEncoder())
|
||||
}.flatMap {
|
||||
// Add the forwarding handler that connects to the real server
|
||||
context.channel.pipeline.addHandler(
|
||||
MITMForwardHandler(
|
||||
remoteHost: host,
|
||||
remoteHost: upstreamHost,
|
||||
remotePort: port,
|
||||
domain: sniDomain,
|
||||
originalDomain: originalHost,
|
||||
trafficRepo: trafficRepo
|
||||
)
|
||||
)
|
||||
}.whenComplete { result in
|
||||
switch result {
|
||||
case .success:
|
||||
// Re-fire the original ClientHello bytes so TLS handshake proceeds
|
||||
ProxyLogger.mitm.info("MITM pipeline installed for \(sniDomain), re-firing ClientHello (\(bufferSize) bytes)")
|
||||
context.channel.pipeline.fireChannelRead(NIOAny(buffer))
|
||||
case .failure(let error):
|
||||
print("[MITM] Pipeline setup failed: \(error)")
|
||||
ProxyLogger.mitm.error("MITM pipeline setup FAILED for \(sniDomain): \(error)")
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "Pipeline setup \(sniDomain): \(error.localizedDescription)"
|
||||
}
|
||||
context.close(promise: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - SNI Extraction
|
||||
|
||||
/// Parse the SNI hostname from a TLS ClientHello message.
|
||||
private func extractSNI(from buffer: ByteBuffer) -> String? {
|
||||
var buf = buffer
|
||||
guard buf.readableBytes >= 43 else { return nil }
|
||||
|
||||
// TLS record header
|
||||
guard buf.readInteger(as: UInt8.self) == 0x16 else { return nil } // Handshake
|
||||
let _ = buf.readInteger(as: UInt16.self) // Version
|
||||
let _ = buf.readInteger(as: UInt16.self) // Length
|
||||
|
||||
// Handshake header
|
||||
guard buf.readInteger(as: UInt8.self) == 0x01 else { return nil } // ClientHello
|
||||
let _ = buf.readBytes(length: 3) // Length (3 bytes)
|
||||
|
||||
// Client version
|
||||
guard buf.readInteger(as: UInt8.self) == 0x16 else { return nil }
|
||||
let _ = buf.readInteger(as: UInt16.self)
|
||||
let _ = buf.readInteger(as: UInt16.self)
|
||||
guard buf.readInteger(as: UInt8.self) == 0x01 else { return nil }
|
||||
let _ = buf.readBytes(length: 3)
|
||||
let _ = buf.readInteger(as: UInt16.self)
|
||||
// Random (32 bytes)
|
||||
guard buf.readBytes(length: 32) != nil else { return nil }
|
||||
// Session ID
|
||||
guard let sessionIdLen = buf.readInteger(as: UInt8.self) else { return nil }
|
||||
guard buf.readBytes(length: Int(sessionIdLen)) != nil else { return nil }
|
||||
// Cipher suites
|
||||
guard let cipherSuitesLen = buf.readInteger(as: UInt16.self) else { return nil }
|
||||
guard buf.readBytes(length: Int(cipherSuitesLen)) != nil else { return nil }
|
||||
// Compression methods
|
||||
guard let compMethodsLen = buf.readInteger(as: UInt8.self) else { return nil }
|
||||
guard buf.readBytes(length: Int(compMethodsLen)) != nil else { return nil }
|
||||
|
||||
// Extensions
|
||||
guard let extensionsLen = buf.readInteger(as: UInt16.self) else { return nil }
|
||||
var extensionsRemaining = Int(extensionsLen)
|
||||
|
||||
@@ -116,50 +114,48 @@ final class MITMHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
let extLen = buf.readInteger(as: UInt16.self) else { return nil }
|
||||
extensionsRemaining -= 4 + Int(extLen)
|
||||
|
||||
if extType == 0x0000 { // SNI extension
|
||||
guard let _ = buf.readInteger(as: UInt16.self), // SNI list length
|
||||
if extType == 0x0000 {
|
||||
guard let _ = buf.readInteger(as: UInt16.self),
|
||||
let nameType = buf.readInteger(as: UInt8.self),
|
||||
nameType == 0x00, // hostname
|
||||
nameType == 0x00,
|
||||
let nameLen = buf.readInteger(as: UInt16.self),
|
||||
let nameBytes = buf.readBytes(length: Int(nameLen)) else {
|
||||
return nil
|
||||
}
|
||||
return String(bytes: nameBytes, encoding: .utf8)
|
||||
let nameBytes = buf.readBytes(length: Int(nameLen)) else { return nil }
|
||||
let name = String(bytes: nameBytes, encoding: .utf8)
|
||||
ProxyLogger.mitm.debug("SNI extracted: \(name ?? "nil")")
|
||||
return name
|
||||
} else {
|
||||
guard buf.readBytes(length: Int(extLen)) != nil else { return nil }
|
||||
}
|
||||
}
|
||||
|
||||
ProxyLogger.mitm.debug("SNI: not found in ClientHello")
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - MITMForwardHandler
|
||||
|
||||
/// Handles decrypted HTTP from the client, forwards to the real server over TLS,
|
||||
/// and relays responses back. Captures everything via HTTPCaptureHandler.
|
||||
final class MITMForwardHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
typealias InboundIn = HTTPServerRequestPart
|
||||
typealias OutboundOut = HTTPServerResponsePart
|
||||
|
||||
private let remoteHost: String
|
||||
private let remotePort: Int
|
||||
private let domain: String
|
||||
private let originalDomain: String
|
||||
private let trafficRepo: TrafficRepository
|
||||
private let runtimeStatusRepo = RuntimeStatusRepository()
|
||||
private var remoteChannel: Channel?
|
||||
|
||||
// Buffer request parts until upstream is connected
|
||||
private var pendingParts: [HTTPServerRequestPart] = []
|
||||
private var isConnected = false
|
||||
|
||||
init(remoteHost: String, remotePort: Int, domain: String, trafficRepo: TrafficRepository) {
|
||||
init(remoteHost: String, remotePort: Int, originalDomain: String, trafficRepo: TrafficRepository) {
|
||||
self.remoteHost = remoteHost
|
||||
self.remotePort = remotePort
|
||||
self.domain = domain
|
||||
self.originalDomain = originalDomain
|
||||
self.trafficRepo = trafficRepo
|
||||
}
|
||||
|
||||
func handlerAdded(context: ChannelHandlerContext) {
|
||||
ProxyLogger.mitm.info("MITMForward: connecting to upstream \(self.remoteHost):\(self.remotePort)")
|
||||
connectToRemote(context: context)
|
||||
}
|
||||
|
||||
@@ -167,12 +163,17 @@ final class MITMForwardHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
let part = unwrapInboundIn(data)
|
||||
|
||||
if isConnected, let remote = remoteChannel {
|
||||
// Forward to upstream as client request
|
||||
switch part {
|
||||
case .head(let head):
|
||||
ProxyLogger.mitm.info("MITMForward: decrypted request \(head.method.rawValue) \(head.uri)")
|
||||
var clientHead = HTTPRequestHead(version: head.version, method: head.method, uri: head.uri, headers: head.headers)
|
||||
if !clientHead.headers.contains(name: "Host") {
|
||||
clientHead.headers.add(name: "Host", value: domain)
|
||||
clientHead.headers.add(name: "Host", value: originalDomain)
|
||||
}
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastSuccessfulMITMDomain = self.originalDomain
|
||||
$0.lastSuccessfulMITMAt = Date().timeIntervalSince1970
|
||||
$0.lastMITMError = nil
|
||||
}
|
||||
remote.write(NIOAny(HTTPClientRequestPart.head(clientHead)), promise: nil)
|
||||
case .body(let buffer):
|
||||
@@ -181,22 +182,27 @@ final class MITMForwardHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
remote.writeAndFlush(NIOAny(HTTPClientRequestPart.end(trailers)), promise: nil)
|
||||
}
|
||||
} else {
|
||||
ProxyLogger.mitm.debug("MITMForward: buffering request part (not connected yet)")
|
||||
pendingParts.append(part)
|
||||
}
|
||||
}
|
||||
|
||||
func channelInactive(context: ChannelHandlerContext) {
|
||||
ProxyLogger.mitm.debug("MITMForward: client channel inactive")
|
||||
remoteChannel?.close(promise: nil)
|
||||
}
|
||||
|
||||
func errorCaught(context: ChannelHandlerContext, error: Error) {
|
||||
print("[MITMForward] Error: \(error)")
|
||||
ProxyLogger.mitm.error("MITMForward error: \(error.localizedDescription)")
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "Forwarding \(self.originalDomain): \(error.localizedDescription)"
|
||||
}
|
||||
context.close(promise: nil)
|
||||
remoteChannel?.close(promise: nil)
|
||||
}
|
||||
|
||||
private func connectToRemote(context: ChannelHandlerContext) {
|
||||
let captureHandler = HTTPCaptureHandler(trafficRepo: trafficRepo, domain: domain, scheme: "https")
|
||||
let captureHandler = HTTPCaptureHandler(trafficRepo: trafficRepo, domain: originalDomain, scheme: "https")
|
||||
let clientContext = context
|
||||
|
||||
do {
|
||||
@@ -206,44 +212,63 @@ final class MITMForwardHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
ClientBootstrap(group: context.eventLoop)
|
||||
.channelOption(.socketOption(.so_reuseaddr), value: 1)
|
||||
.channelInitializer { channel in
|
||||
let sniHandler = try! NIOSSLClientHandler(context: sslContext, serverHostname: self.domain)
|
||||
let sniHandler: NIOSSLClientHandler
|
||||
do {
|
||||
sniHandler = try NIOSSLClientHandler(context: sslContext, serverHostname: self.originalDomain)
|
||||
} catch {
|
||||
ProxyLogger.mitm.error("NIOSSLClientHandler init FAILED: \(error.localizedDescription)")
|
||||
self.runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "Client TLS handler \(self.originalDomain): \(error.localizedDescription)"
|
||||
}
|
||||
channel.close(promise: nil)
|
||||
return channel.eventLoop.makeFailedFuture(error)
|
||||
}
|
||||
let upstreamTLSLogger = TLSErrorLogger(label: "UPSTREAM", domain: self.originalDomain, runtimeStatusRepo: self.runtimeStatusRepo)
|
||||
return channel.pipeline.addHandler(sniHandler).flatMap {
|
||||
channel.pipeline.addHandler(upstreamTLSLogger)
|
||||
}.flatMap {
|
||||
channel.pipeline.addHandler(HTTPRequestEncoder())
|
||||
}.flatMap {
|
||||
channel.pipeline.addHandler(ByteToMessageHandler(HTTPResponseDecoder()))
|
||||
}.flatMap {
|
||||
channel.pipeline.addHandler(captureHandler)
|
||||
}.flatMap {
|
||||
channel.pipeline.addHandler(
|
||||
MITMRelayHandler(clientContext: clientContext)
|
||||
)
|
||||
channel.pipeline.addHandler(MITMRelayHandler(clientContext: clientContext))
|
||||
}
|
||||
}
|
||||
.connect(host: remoteHost, port: remotePort)
|
||||
.whenComplete { result in
|
||||
switch result {
|
||||
case .success(let channel):
|
||||
ProxyLogger.mitm.info("MITMForward: upstream connected to \(self.remoteHost):\(self.remotePort)")
|
||||
self.remoteChannel = channel
|
||||
self.isConnected = true
|
||||
self.flushPending(remote: channel)
|
||||
case .failure(let error):
|
||||
print("[MITMForward] Connect to \(self.remoteHost):\(self.remotePort) failed: \(error)")
|
||||
ProxyLogger.mitm.error("MITMForward: upstream connect FAILED \(self.remoteHost):\(self.remotePort): \(error.localizedDescription)")
|
||||
self.runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "Upstream \(self.originalDomain): \(error.localizedDescription)"
|
||||
}
|
||||
clientContext.close(promise: nil)
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
print("[MITMForward] TLS setup failed: \(error)")
|
||||
ProxyLogger.mitm.error("MITMForward: TLS context creation FAILED: \(error.localizedDescription)")
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "TLS configuration \(self.originalDomain): \(error.localizedDescription)"
|
||||
}
|
||||
context.close(promise: nil)
|
||||
}
|
||||
}
|
||||
|
||||
private func flushPending(remote: Channel) {
|
||||
ProxyLogger.mitm.debug("MITMForward: flushing \(self.pendingParts.count) buffered parts")
|
||||
for part in pendingParts {
|
||||
switch part {
|
||||
case .head(let head):
|
||||
var clientHead = HTTPRequestHead(version: head.version, method: head.method, uri: head.uri, headers: head.headers)
|
||||
if !clientHead.headers.contains(name: "Host") {
|
||||
clientHead.headers.add(name: "Host", value: domain)
|
||||
clientHead.headers.add(name: "Host", value: originalDomain)
|
||||
}
|
||||
remote.write(NIOAny(HTTPClientRequestPart.head(clientHead)), promise: nil)
|
||||
case .body(let buffer):
|
||||
@@ -258,7 +283,6 @@ final class MITMForwardHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
|
||||
// MARK: - MITMRelayHandler
|
||||
|
||||
/// Relays responses from the real server back to the proxy client.
|
||||
final class MITMRelayHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
typealias InboundIn = HTTPClientResponsePart
|
||||
|
||||
@@ -270,11 +294,10 @@ final class MITMRelayHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
|
||||
func channelRead(context: ChannelHandlerContext, data: NIOAny) {
|
||||
let part = unwrapInboundIn(data)
|
||||
|
||||
switch part {
|
||||
case .head(let head):
|
||||
let serverResponse = HTTPResponseHead(version: head.version, status: head.status, headers: head.headers)
|
||||
clientContext.write(NIOAny(HTTPServerResponsePart.head(serverResponse)), promise: nil)
|
||||
ProxyLogger.mitm.debug("MITMRelay response: \(head.status.code)")
|
||||
clientContext.write(NIOAny(HTTPServerResponsePart.head(HTTPResponseHead(version: head.version, status: head.status, headers: head.headers))), promise: nil)
|
||||
case .body(let buffer):
|
||||
clientContext.write(NIOAny(HTTPServerResponsePart.body(.byteBuffer(buffer))), promise: nil)
|
||||
case .end(let trailers):
|
||||
@@ -283,12 +306,105 @@ final class MITMRelayHandler: ChannelInboundHandler, RemovableChannelHandler {
|
||||
}
|
||||
|
||||
func channelInactive(context: ChannelHandlerContext) {
|
||||
ProxyLogger.mitm.debug("MITMRelay: remote inactive")
|
||||
clientContext.close(promise: nil)
|
||||
}
|
||||
|
||||
func errorCaught(context: ChannelHandlerContext, error: Error) {
|
||||
print("[MITMRelay] Error: \(error)")
|
||||
ProxyLogger.mitm.error("MITMRelay error: \(error.localizedDescription)")
|
||||
RuntimeStatusRepository().update {
|
||||
$0.lastMITMError = "Relay response: \(error.localizedDescription)"
|
||||
}
|
||||
context.close(promise: nil)
|
||||
clientContext.close(promise: nil)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - TLSErrorLogger
|
||||
|
||||
/// Catches and logs TLS handshake errors with detailed context.
|
||||
/// Placed right after NIOSSLServerHandler/NIOSSLClientHandler in the pipeline.
|
||||
final class TLSErrorLogger: ChannelInboundHandler, RemovableChannelHandler {
|
||||
typealias InboundIn = NIOAny
|
||||
|
||||
private let label: String
|
||||
private let domain: String
|
||||
private let runtimeStatusRepo: RuntimeStatusRepository
|
||||
|
||||
init(label: String, domain: String, runtimeStatusRepo: RuntimeStatusRepository) {
|
||||
self.label = label
|
||||
self.domain = domain
|
||||
self.runtimeStatusRepo = runtimeStatusRepo
|
||||
}
|
||||
|
||||
func channelActive(context: ChannelHandlerContext) {
|
||||
ProxyLogger.mitm.info("TLS[\(self.label)] \(self.domain): channel active (handshake starting)")
|
||||
context.fireChannelActive()
|
||||
}
|
||||
|
||||
func channelInactive(context: ChannelHandlerContext) {
|
||||
ProxyLogger.mitm.info("TLS[\(self.label)] \(self.domain): channel inactive")
|
||||
context.fireChannelInactive()
|
||||
}
|
||||
|
||||
func channelRead(context: ChannelHandlerContext, data: NIOAny) {
|
||||
// TLS handshake completed if we're getting data through
|
||||
context.fireChannelRead(data)
|
||||
}
|
||||
|
||||
func userInboundEventTriggered(context: ChannelHandlerContext, event: Any) {
|
||||
if let tlsEvent = event as? NIOSSLVerificationCallback {
|
||||
ProxyLogger.mitm.info("TLS[\(self.label)] \(self.domain): verification callback triggered")
|
||||
}
|
||||
// Check for handshake completion by string matching the event type
|
||||
let eventDesc = String(describing: event)
|
||||
if eventDesc.contains("handshakeCompleted") {
|
||||
ProxyLogger.mitm.info("TLS[\(self.label)] \(self.domain): HANDSHAKE COMPLETED event=\(eventDesc)")
|
||||
} else {
|
||||
ProxyLogger.mitm.debug("TLS[\(self.label)] \(self.domain): user event=\(eventDesc)")
|
||||
}
|
||||
context.fireUserInboundEventTriggered(event)
|
||||
}
|
||||
|
||||
func errorCaught(context: ChannelHandlerContext, error: Error) {
|
||||
let errorDesc = String(describing: error)
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): ERROR \(errorDesc)")
|
||||
|
||||
// Categorize and detect SSL pinning
|
||||
let lowerError = errorDesc.lowercased()
|
||||
var isPinningLikely = false
|
||||
var category = "UNKNOWN"
|
||||
|
||||
if lowerError.contains("certificate") || lowerError.contains("trust") {
|
||||
category = "CERTIFICATE_TRUST"
|
||||
isPinningLikely = label == "CLIENT-SIDE"
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): CERTIFICATE TRUST ISSUE — client likely doesn't trust our CA")
|
||||
} else if lowerError.contains("handshake") {
|
||||
category = "HANDSHAKE_FAILURE"
|
||||
isPinningLikely = label == "CLIENT-SIDE"
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): HANDSHAKE FAILURE — protocol mismatch or cert rejected")
|
||||
} else if lowerError.contains("eof") || lowerError.contains("reset") || lowerError.contains("closed") || lowerError.contains("connection") {
|
||||
category = "CONNECTION_RESET"
|
||||
isPinningLikely = label == "CLIENT-SIDE"
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): CONNECTION RESET during handshake (SSL pinning suspected)")
|
||||
} else if lowerError.contains("unrecognized") || lowerError.contains("alert") || lowerError.contains("fatal") {
|
||||
category = "TLS_ALERT"
|
||||
isPinningLikely = true
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): TLS ALERT — peer sent alert (unknown_ca / bad_certificate)")
|
||||
}
|
||||
|
||||
// If this is a client-side error (the app rejected our cert), it's likely SSL pinning.
|
||||
// Auto-record this domain as pinned so future connections use passthrough.
|
||||
if isPinningLikely && label == "CLIENT-SIDE" {
|
||||
let reason = "TLS \(category): \(String(errorDesc.prefix(200)))"
|
||||
PinnedDomainRepository().markPinned(domain: domain, reason: reason)
|
||||
ProxyLogger.mitm.error("TLS[\(self.label)] \(self.domain): AUTO-PINNED — future connections will use passthrough")
|
||||
}
|
||||
|
||||
runtimeStatusRepo.update {
|
||||
$0.lastMITMError = "TLS[\(self.label)] \(self.domain) [\(category)]: \(String(errorDesc.prefix(200)))"
|
||||
}
|
||||
|
||||
context.fireErrorCaught(error)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user