前言:iOS 部署的重要性 52.1 iOS 平台优势 为什么选择 iOS 部署 IMS?
优势
说明
性能强劲
A 系列芯片、Neural Engine
Metal 加速
GPU 性能优异
生态系统
App Store 分发
隐私保护
本地处理,无数据上传
IMS 应用场景:
便携式 DMS 设备
车载 iPhone 集成
测试与演示工具
研究原型开发
五十八、部署架构 58.1 整体架构 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 ┌─────────────────────────────────────────────────────────────────────────┐ │ iOS MediaPipe 部署架构 │ ├─────────────────────────────────────────────────────────────────────────┤ │ │ │ ┌─────────────────────────────────────────────────────────┐ │ │ │ Application Layer ( Swift ) │ │ │ │ │ │ │ │ • SwiftUI / UIKit │ │ │ │ • Business Logic │ │ │ │ • Lifecycle Management │ │ │ │ │ │ │ └─────────────────────────────────────────────────────────┘ │ │ │ │ │ ▼ │ │ ┌─────────────────────────────────────────────────────────┐ │ │ │ Objective - C ++ Bridge │ │ │ │ │ │ │ │ • Objective - C / C ++ 混编 │ │ │ │ • Swift ↔ C ++ 桥接 │ │ │ │ • Memory Management ( ARC ) │ │ │ │ │ │ │ └─────────────────────────────────────────────────────────┘ │ │ │ │ │ ▼ │ │ ┌─────────────────────────────────────────────────────────┐ │ │ │ Native Layer ( C ++ ) │ │ │ │ │ │ │ │ • MediaPipe Graph │ │ │ │ • Calculator Framework │ │ │ │ • Metal GPU Processing │ │ │ │ │ │ │ └─────────────────────────────────────────────────────────┘ │ │ │ │ │ ▼ │ │ ┌─────────────────────────────────────────────────────────┐ │ │ │ Hardware Layer │ │ │ │ │ │ │ │ • AVFoundation ( Camera ) │ │ │ │ • Metal / Metal Performance Shaders │ │ │ │ • Neural Engine ( Core ML ) │ │ │ │ │ │ │ └─────────────────────────────────────────────────────────┘ │ │ │ └─────────────────────────────────────────────────────────────────────────┘
58.2 目录结构 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 IMSDMS.xcodeproj/ ├── IMSDMS/ │ ├── App/ │ │ ├── AppDelegate.swift │ │ └── ContentView.swift │ ├── Camera/ │ │ └── CameraManager.swift │ ├── MediaPipe/ │ │ ├── MPPGraph.h │ │ ├── MPPGraph.mm │ │ └── MPPGraph-Bridging-Header.h │ ├── Native/ │ │ ├── mediapipe_jni.cc │ │ └── GraphRunner.h │ ├── Models/ │ │ ├── dms_graph.pbtxt │ │ └── * .tflite │ └── Resources/ │ └── Assets.xcassets └── Podfile
五十九、Xcode 配置 59.1 Podfile 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 platform :ios , '12.0' use_frameworks! target 'IMSDMS' do pod 'MediaPipeTasksVision' , '~> 0.10' pod 'GPUImage2' pod 'SnapKit' end post_install do |installer | installer.pods_project.targets.each do |target | target.build_configurations.each do |config | config.build_settings['ENABLE_BITCODE' ] = 'NO' config.build_settings['ARCHS' ] = 'arm64' end end end
59.2 Build Settings 1 2 3 4 5 OTHER_LDFLAGS = -ObjC -lc++ -lmediapipe_frameworkCLANG_CXX_LANGUAGE_STANDARD = c++17 CLANG_CXX_LIBRARY = libc++ENABLE_BITCODE = NO
六十、Objective-C++ 桥接 60.1 Objective-C 头文件 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 #import <Foundation/Foundation.h> #import <CoreVideo/CoreVideo.h> NS_ASSUME_NONNULL_BEGIN @class DMSResult ;@interface MPPGraph : NSObject - (instancetype )initWithGraphPath:(NSString *)graphPath; - (DMSResult *)processPixelBuffer:(CVPixelBufferRef)pixelBuffer timestamp:(uint64_t)timestamp; - (DMSResult *)processSampleBuffer:(CMSampleBufferRef )sampleBuffer; - (void )close;@end @interface DMSResult : NSObject @property (nonatomic , readonly ) float fatigueScore;@property (nonatomic , readonly ) float confidence;@property (nonatomic , readonly ) NSInteger fatigueLevel;@property (nonatomic , readonly ) BOOL isAlert;@end NS_ASSUME_NONNULL_END
60.2 Objective-C++ 实现 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 #import "MPPGraph.h" #import <UIKit/UIKit.h> #include "mediapipe/framework/calculator_graph.h" #include "mediapipe/framework/formats/image_frame.h" @interface MPPGraph () { std::unique_ptr<mediapipe::CalculatorGraph> _graph; mediapipe::OutputStreamPoller _poller; }@end @implementation MPPGraph - (instancetype )initWithGraphPath:(NSString *)graphPath { self = [super init]; if (self ) { _graph = std::make_unique<mediapipe::CalculatorGraph>(); NSString *content = [NSString stringWithContentsOfFile:graphPath encoding:NSUTF8StringEncoding error:nil ]; mediapipe::CalculatorGraphConfig config; config.ParseFromString(std::string([content UTF8String])); if (!_graph->Initialize(config).ok()) { NSLog (@"Failed to initialize MediaPipe graph" ); return nil ; } _poller = _graph->AddOutputStreamPoller("result" ); if (!_graph->StartRun({}).ok()) { NSLog (@"Failed to start MediaPipe graph" ); return nil ; } } return self ; } - (DMSResult *)processPixelBuffer:(CVPixelBufferRef)pixelBuffer timestamp:(uint64_t)timestamp { auto image_frame = [self convertPixelBufferToImageFrame:pixelBuffer]; if (!image_frame) { return nil ; } mediapipe::Packet packet = mediapipe::Adopt(image_frame.release()) .At(mediapipe::Timestamp(timestamp)); _graph->AddPacketToInputStream("input" , packet); mediapipe::Packet result_packet; if (_poller.Next(&result_packet)) { return [self convertPacketToResult:result_packet]; } return nil ; } - (void )close { if (_graph) { _graph->CloseAllPacketSources(); _graph->WaitUntilDone(); _graph.reset(); } } - (std::unique_ptr<mediapipe::ImageFrame>)convertPixelBufferToImageFrame:(CVPixelBufferRef)pixelBuffer { CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); size_t width = CVPixelBufferGetWidth(pixelBuffer); size_t height = CVPixelBufferGetHeight(pixelBuffer); void *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer); auto image_frame = std::make_unique<mediapipe::ImageFrame>( mediapipe::ImageFormat::SRGBA, static_cast<int >(width), static_cast<int >(height) ); std::memcpy(image_frame->MutablePixelData(), baseAddress, width * height * 4 ); CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); return image_frame; } - (DMSResult *)convertPacketToResult:(const mediapipe::Packet&)packet { DMSResult *result = [[DMSResult alloc] init]; return result; }@end @implementation DMSResult - (instancetype )init { self = [super init]; if (self ) { _fatigueScore = 0 ; _confidence = 0 ; _fatigueLevel = 0 ; _isAlert = NO ; } return self ; }@end
六十一、Swift 集成 61.1 Swift API 封装 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 import Foundationimport CoreVideoimport AVFoundationclass MediaPipeManager : ObservableObject { private var graph: MPPGraph ? private let processingQueue = DispatchQueue (label: "com.ims.dms.processing" ) @Published var latestResult: DMSResult ? @Published var isRunning = false var onResult: ((DMSResult ) -> Void )? func initialize (graphPath : String ) -> Bool { graph = MPPGraph (graphPath: graphPath) return graph != nil } func process (pixelBuffer : CVPixelBuffer ) { guard let graph = graph else { return } processingQueue.async { [weak self ] in let timestamp = UInt64 (Date ().timeIntervalSince1970 * 1000 ) if let result = graph.process(pixelBuffer, timestamp: timestamp) { DispatchQueue .main.async { self ? .latestResult = result self ? .onResult? (result) } } } } func close () { graph? .close() graph = nil } }
61.2 SwiftUI 界面 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 import SwiftUIstruct ContentView : View { @StateObject private var mediaPipe = MediaPipeManager () @StateObject private var camera = CameraManager () var body: some View { ZStack { CameraPreview (camera: camera) .edgesIgnoringSafeArea(.all) VStack { Spacer () if let result = mediaPipe.latestResult { ResultOverlay (result: result) } } .padding() } .onAppear { if let graphPath = Bundle .main.path(forResource: "dms_graph" , ofType: "pbtxt" ) { _ = mediaPipe.initialize(graphPath: graphPath) } camera.onFrame = { pixelBuffer in mediaPipe.process(pixelBuffer: pixelBuffer) } camera.start() } .onDisappear { camera.stop() mediaPipe.close() } } }struct ResultOverlay : View { let result: DMSResult var body: some View { VStack (alignment: .leading, spacing: 8 ) { Text ("疲劳分数: \(String(format: "%.1f" , result.fatigueScore)) " ) Text ("置信度: \(String(format: "%.1f" , result.confidence)) " ) Text ("等级: \(result.fatigueLevel) " ) if result.isAlert { Text ("⚠️ 检测到疲劳!" ) .font(.headline) .foregroundColor(.red) } } .padding() .background(Color .black.opacity(0.6 )) .cornerRadius(10 ) .foregroundColor(.white) } }
六十二、AVFoundation 相机 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 import AVFoundationclass CameraManager : NSObject , ObservableObject { private let captureSession = AVCaptureSession () private let videoOutput = AVCaptureVideoDataOutput () private let processingQueue = DispatchQueue (label: "com.ims.dms.camera" ) var onFrame: ((CVPixelBuffer ) -> Void )? func start () { captureSession.sessionPreset = .vga640x480 guard let device = AVCaptureDevice .default(.builtInWideAngleCamera, for: .video, position: .front), let input = try? AVCaptureDeviceInput (device: device) else { return } captureSession.addInput(input) videoOutput.setSampleBufferDelegate(self , queue: processingQueue) videoOutput.alwaysDiscardsLateVideoFrames = true videoOutput.videoSettings = [ kCVPixelBufferPixelFormatTypeKey as String : kCVPixelFormatType_32BGRA ] captureSession.addOutput(videoOutput) captureSession.startRunning() } func stop () { captureSession.stopRunning() } }extension CameraManager : AVCaptureVideoDataOutputSampleBufferDelegate { func captureOutput (_ output : AVCaptureOutput , didOutput sampleBuffer : CMSampleBuffer , from connection : AVCaptureConnection ) { guard let pixelBuffer = CMSampleBufferGetImageBuffer (sampleBuffer) else { return } onFrame? (pixelBuffer) } }
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 #import <Metal/Metal.h> #import <MetalPerformanceShaders/MetalPerformanceShaders.h> @interface MetalProcessor : NSObject - (void )processTexture:(id <MTLTexture >)texture;@end @implementation MetalProcessor { id <MTLDevice > _device; id <MTLCommandQueue > _commandQueue; } - (instancetype )init { self = [super init]; if (self ) { _device = MTLCreateSystemDefaultDevice (); _commandQueue = [_device newCommandQueue]; } return self ; } - (void )processTexture:(id <MTLTexture >)texture { id <MTLCommandBuffer > commandBuffer = [_commandQueue commandBuffer]; MPSImageConversion *converter = [[MPSImageConversion alloc] initWithDevice:_device srcAlpha:MPSSourceAlphaNonPremultiplied destAlpha:MPSDestinationAlphaNonPremultiplied backgroundColor:nil conversionInfo:nil ]; [converter encodeToCommandBuffer:commandBuffer sourceTexture:texture destinationTexture:texture]; [commandBuffer commit]; }@end
六十四、总结
要点
说明
架构
Swift → Objective-C++ → C++ → Metal
桥接
Objective-C++ 混编、Bridging Header
相机
AVFoundation Capture Session
GPU
Metal / MPS 加速
下篇预告 MediaPipe 系列 53:嵌入式部署——高通 QNN 加速
系列进度: 52/55更新时间: 2026-03-12