I am trying to build an app that streams Iphone screen to PC over the wifi network. I am able to capture app’s screen and receive it on my PC but if the app goes in background (I exit using home button), the stream still shows the app screen. I want to show it the current visible UI on Iphone.
This is the complete script.
import Foundation
import AVFoundation
import ReplayKit
import UIKit
class ScreenRecorder: NSObject, ObservableObject, StreamDelegate {
private var recorder = RPScreenRecorder.shared()
private var outputStream: OutputStream?
private var backgroundTask: UIBackgroundTaskIdentifier = .invalid
override init() {
super.init()
}
func startCapturing() {
setupStream()
registerBackgroundTask()
recorder.isMicrophoneEnabled = false
recorder.startCapture(handler: { [weak self] (sampleBuffer, bufferType, error) in
if let error = error {
print("Error capturing sample buffer: (error.localizedDescription)")
return
}
if bufferType == .video {
self?.processSampleBuffer(sampleBuffer: sampleBuffer)
}
}) { error in
if let error = error {
print("Error starting capture: (error.localizedDescription)")
} else {
print("Capture started successfully")
}
}
}
func stopCapturing() {
recorder.stopCapture { [weak self] error in
if let error = error {
print("Error stopping capture: (error.localizedDescription)")
}
self?.outputStream?.close()
self?.outputStream = nil
}
endBackgroundTask()
}
private func processSampleBuffer(sampleBuffer: CMSampleBuffer) {
guard let outputStream = outputStream else {
print("Output stream is nil")
return
}
guard CMSampleBufferDataIsReady(sampleBuffer) else {
print("Sample buffer data is not ready")
return
}
if let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let ciImage = CIImage(cvImageBuffer: imageBuffer)
let context = CIContext()
if let jpegData = context.jpegRepresentation(of: ciImage, colorSpace: CGColorSpaceCreateDeviceRGB(), options: [:]) {
var frameSize = UInt32(jpegData.count)
var sizeData = Data(bytes: &frameSize, count: MemoryLayout<UInt32>.size)
// Send the frame size
sizeData.withUnsafeBytes { (rawBufferPointer: UnsafeRawBufferPointer) in
if let baseAddress = rawBufferPointer.baseAddress {
let bytesWritten = outputStream.write(baseAddress.assumingMemoryBound(to: UInt8.self), maxLength: sizeData.count)
if bytesWritten <= 0 {
print("Error writing frame size to output stream: (outputStream.streamError?.localizedDescription ?? "Unknown error")")
return
} else {
print("Wrote frame size (frameSize) to output stream")
}
}
}
jpegData.withUnsafeBytes { (rawBufferPointer: UnsafeRawBufferPointer) in
if let baseAddress = rawBufferPointer.baseAddress {
let bytesWritten = outputStream.write(baseAddress.assumingMemoryBound(to: UInt8.self), maxLength: jpegData.count)
if bytesWritten <= 0 {
print("Error writing frame data to output stream: (outputStream.streamError?.localizedDescription ?? "Unknown error")")
} else {
print("Wrote (bytesWritten) bytes of sample buffer data to the output stream")
}
}
}
} else {
print("Error converting CIImage to JPEG data")
}
} else {
print("Failed to get image buffer")
}
}
func setupStream() {
var readStream: Unmanaged<CFReadStream>?
var writeStream: Unmanaged<CFWriteStream>?
let ipAddress = "192.168.x.xx"
let port: UInt32 = 5001
print("Attempting to connect to (ipAddress) on port (port)")
CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, ipAddress as CFString, port, &readStream, &writeStream)
guard let outStream = writeStream?.takeRetainedValue() else {
print("Failed to create write stream")
return
}
outputStream = outStream
outputStream?.delegate = self
outputStream?.schedule(in: .current, forMode: .default)
outputStream?.open()
print("Stream setup complete, attempting to open output stream")
}
func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
switch eventCode {
case .openCompleted:
print("Stream opened successfully")
case .hasBytesAvailable:
print("Stream has bytes available")
case .hasSpaceAvailable:
print("Stream has space available")
case .errorOccurred:
if let error = aStream.streamError {
print("Stream error occurred: (error)")
}
case .endEncountered:
print("Stream end encountered")
aStream.close()
aStream.remove(from: .current, forMode: .default)
default:
print("Unknown stream event")
}
}
private func registerBackgroundTask() {
backgroundTask = UIApplication.shared.beginBackgroundTask { [weak self] in
self?.endBackgroundTask()
}
assert(backgroundTask != .invalid)
}
private func endBackgroundTask() {
if backgroundTask != .invalid {
UIApplication.shared.endBackgroundTask(backgroundTask)
backgroundTask = .invalid
}
}
}
It only shows this much on my mac screen. Please help!. I am not able to figure out a way to stream my complete screen. Is it possible?
iOS version – 18
Xcode version – 16
Macos version – sequoia 15
Edit:
I tried using RPBroadcastHandler as suggested in comment so I,
- Added New target in my App (Broadcast upload extension) and got a new target scaffolded in my App.
- This is what the project structure looks like now.
- The updated SampleHandler.swift looks like this
import ReplayKit
import VideoToolbox
class SampleHandler: RPBroadcastSampleHandler {
var outputStream: OutputStream?
override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
setupStream()
}
override func broadcastPaused() { }
override func broadcastResumed() { }
override func broadcastFinished() {
outputStream?.close()
}
func setupStream() {
var writeStream: Unmanaged<CFWriteStream>?
let ipAddress = "192.168.1.38"
let port: UInt32 = 5000
CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, ipAddress as CFString, port, nil, &writeStream)
outputStream = writeStream?.takeRetainedValue()
outputStream?.schedule(in: .current, forMode: .default)
outputStream?.open()
}
override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
guard let outputStream = outputStream, sampleBufferType == .video else { return }
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
let ciImage = CIImage(cvImageBuffer: imageBuffer)
let context = CIContext()
if let jpegData = context.jpegRepresentation(of: ciImage, colorSpace: CGColorSpaceCreateDeviceRGB(), options: [:]) {
let _ = jpegData.withUnsafeBytes { outputStream.write($0.bindMemory(to: UInt8.self).baseAddress!, maxLength: jpegData.count) }
}
}
}
- I left the other files unchanged and "Build and Run".
- Xcode prompts me to select an app to run, I choose "ScreenStreamer" As it is what includes the extension.
- Once the app is installed, it shows this dialog.
- Error details –
The specified capability is not supported by this device.
Domain: com.apple.dt.CoreDeviceError
Code: 1001
User Info: {
DVTErrorCreationDateKey = "2024-08-02 12:18:47 +0000";
IDERunOperationFailingWorker = IDEDebugXPCCoreDeviceWorker;
ServiceName = "com.apple.instruments.dtservicehub";
}
--
Event Metadata: com.apple.dt.IDERunOperationWorkerFinished : {
"device_identifier" = "00008020-000848683A84002E";
"device_isCoreDevice" = 1;
"device_model" = "iPhone11,6";
"device_osBuild" = "18.0 (22A5316j)";
"device_platform" = "com.apple.platform.iphoneos";
"device_thinningType" = "iPhone11,6";
"dvt_coredevice_version" = "397.16";
"dvt_coresimulator_version" = 986;
"dvt_mobiledevice_version" = "1759.0.5";
"launchSession_schemeCommand" = Run;
"launchSession_state" = 1;
"launchSession_targetArch" = arm64;
"operation_duration_ms" = 3252;
"operation_errorCode" = 1001;
"operation_errorDomain" = "com.apple.dt.CoreDeviceError";
"operation_errorWorker" = IDEDebugXPCCoreDeviceWorker;
"operation_name" = IDERunOperationWorkerGroup;
"param_debugger_attachToExtensions" = 1;
"param_debugger_attachToXPC" = 1;
"param_debugger_type" = 1;
"param_destination_isProxy" = 0;
"param_destination_platform" = "com.apple.platform.iphoneos";
"param_diag_113575882_enable" = 0;
"param_diag_MainThreadChecker_stopOnIssue" = 0;
"param_diag_MallocStackLogging_enableDuringAttach" = 0;
"param_diag_MallocStackLogging_enableForXPC" = 1;
"param_diag_allowLocationSimulation" = 1;
"param_diag_checker_tpc_enable" = 1;
"param_diag_gpu_frameCapture_enable" = 0;
"param_diag_gpu_shaderValidation_enable" = 0;
"param_diag_gpu_validation_enable" = 0;
"param_diag_memoryGraphOnResourceException" = 0;
"param_diag_mtc_enable" = 1;
"param_diag_queueDebugging_enable" = 1;
"param_diag_runtimeProfile_generate" = 0;
"param_diag_sanitizer_asan_enable" = 0;
"param_diag_sanitizer_tsan_enable" = 0;
"param_diag_sanitizer_tsan_stopOnIssue" = 0;
"param_diag_sanitizer_ubsan_enable" = 0;
"param_diag_sanitizer_ubsan_stopOnIssue" = 0;
"param_diag_showNonLocalizedStrings" = 0;
"param_diag_viewDebugging_enabled" = 1;
"param_diag_viewDebugging_insertDylibOnLaunch" = 1;
"param_install_style" = 2;
"param_launcher_UID" = 2;
"param_launcher_allowDeviceSensorReplayData" = 0;
"param_launcher_kind" = 0;
"param_launcher_style" = 0;
"param_launcher_substyle" = 2;
"param_runnable_appExtensionHostRunMode" = 0;
"param_runnable_productType" = "com.apple.product-type.app-extension";
"param_structuredConsoleMode" = 1;
"param_testing_launchedForTesting" = 0;
"param_testing_suppressSimulatorApp" = 0;
"param_testing_usingCLI" = 0;
"sdk_canonicalName" = "iphoneos18.0";
"sdk_osVersion" = "18.0";
"sdk_variant" = iphoneos;
}
--
System Information
macOS Version 15.0 (Build 24A5298h)
Xcode 16.0 (23047) (Build 16A5211f)
Timestamp: 2024-08-02T17:48:47+05:30
I am not sure what is causing this error.
2
Answers
Thank you @iSpain17 @GoksuBayy for suggestions. Here's what worked for me. I set up the broadcast extension like usual and created a picker button to launch the Broadcast Extension picker.
ViewController.swift
Then for processing Video buffer, this implementation worked for me.
SampleHandler.swift
Python script to receive the stream on PC.
It is not possible I guess, Apple RPSCreenRecorder documentation says "Your app can record the audio and video inside of the app" so you can not record iPhone screen except for your app screen. However you can use RPBroadcastSampleHandler to stream your iPhone. see: RPBroadcastSampleHandler