skip to Main Content

I am trying to build an app that streams Iphone screen to PC over the wifi network. I am able to capture app’s screen and receive it on my PC but if the app goes in background (I exit using home button), the stream still shows the app screen. I want to show it the current visible UI on Iphone.
This is the complete script.

import Foundation
import AVFoundation
import ReplayKit
import UIKit

class ScreenRecorder: NSObject, ObservableObject, StreamDelegate {
    private var recorder = RPScreenRecorder.shared()
    private var outputStream: OutputStream?
    private var backgroundTask: UIBackgroundTaskIdentifier = .invalid

    override init() {
        super.init()
    }

    func startCapturing() {
        setupStream()
        registerBackgroundTask()
        recorder.isMicrophoneEnabled = false
        recorder.startCapture(handler: { [weak self] (sampleBuffer, bufferType, error) in
            if let error = error {
                print("Error capturing sample buffer: (error.localizedDescription)")
                return
            }
            if bufferType == .video {
                self?.processSampleBuffer(sampleBuffer: sampleBuffer)
            }
        }) { error in
            if let error = error {
                print("Error starting capture: (error.localizedDescription)")
            } else {
                print("Capture started successfully")
            }
        }
    }

    func stopCapturing() {
        recorder.stopCapture { [weak self] error in
            if let error = error {
                print("Error stopping capture: (error.localizedDescription)")
            }
            self?.outputStream?.close()
            self?.outputStream = nil
        }
        endBackgroundTask()
    }

    private func processSampleBuffer(sampleBuffer: CMSampleBuffer) {
        guard let outputStream = outputStream else {
            print("Output stream is nil")
            return
        }

        guard CMSampleBufferDataIsReady(sampleBuffer) else {
            print("Sample buffer data is not ready")
            return
        }

        if let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
        
            let ciImage = CIImage(cvImageBuffer: imageBuffer)
            let context = CIContext()
            
        
            if let jpegData = context.jpegRepresentation(of: ciImage, colorSpace: CGColorSpaceCreateDeviceRGB(), options: [:]) {
                var frameSize = UInt32(jpegData.count)
                var sizeData = Data(bytes: &frameSize, count: MemoryLayout<UInt32>.size)
                
                // Send the frame size
                sizeData.withUnsafeBytes { (rawBufferPointer: UnsafeRawBufferPointer) in
                    if let baseAddress = rawBufferPointer.baseAddress {
                        let bytesWritten = outputStream.write(baseAddress.assumingMemoryBound(to: UInt8.self), maxLength: sizeData.count)
                        if bytesWritten <= 0 {
                            print("Error writing frame size to output stream: (outputStream.streamError?.localizedDescription ?? "Unknown error")")
                            return
                        } else {
                            print("Wrote frame size (frameSize) to output stream")
                        }
                    }
                }
                
            
                jpegData.withUnsafeBytes { (rawBufferPointer: UnsafeRawBufferPointer) in
                    if let baseAddress = rawBufferPointer.baseAddress {
                        let bytesWritten = outputStream.write(baseAddress.assumingMemoryBound(to: UInt8.self), maxLength: jpegData.count)
                        if bytesWritten <= 0 {
                            print("Error writing frame data to output stream: (outputStream.streamError?.localizedDescription ?? "Unknown error")")
                        } else {
                            print("Wrote (bytesWritten) bytes of sample buffer data to the output stream")
                        }
                    }
                }
            } else {
                print("Error converting CIImage to JPEG data")
            }
        } else {
            print("Failed to get image buffer")
        }
    }


    func setupStream() {
        var readStream: Unmanaged<CFReadStream>?
        var writeStream: Unmanaged<CFWriteStream>?

        let ipAddress = "192.168.x.xx" 
        let port: UInt32 = 5001
        print("Attempting to connect to (ipAddress) on port (port)")

        CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, ipAddress as CFString, port, &readStream, &writeStream)

        guard let outStream = writeStream?.takeRetainedValue() else {
            print("Failed to create write stream")
            return
        }
        outputStream = outStream
        outputStream?.delegate = self
        outputStream?.schedule(in: .current, forMode: .default)
        outputStream?.open()

        print("Stream setup complete, attempting to open output stream")
    }

    func stream(_ aStream: Stream, handle eventCode: Stream.Event) {
        switch eventCode {
        case .openCompleted:
            print("Stream opened successfully")
        case .hasBytesAvailable:
            print("Stream has bytes available")
        case .hasSpaceAvailable:
            print("Stream has space available")
        case .errorOccurred:
            if let error = aStream.streamError {
                print("Stream error occurred: (error)")
            }
        case .endEncountered:
            print("Stream end encountered")
            aStream.close()
            aStream.remove(from: .current, forMode: .default)
        default:
            print("Unknown stream event")
        }
    }

    private func registerBackgroundTask() {
        backgroundTask = UIApplication.shared.beginBackgroundTask { [weak self] in
            self?.endBackgroundTask()
        }
        assert(backgroundTask != .invalid)
    }

    private func endBackgroundTask() {
        if backgroundTask != .invalid {
            UIApplication.shared.endBackgroundTask(backgroundTask)
            backgroundTask = .invalid
        }
    }
}

Visible stream

It only shows this much on my mac screen. Please help!. I am not able to figure out a way to stream my complete screen. Is it possible?

iOS version – 18
Xcode version – 16
Macos version – sequoia 15

Edit:

I tried using RPBroadcastHandler as suggested in comment so I,

  1. Added New target in my App (Broadcast upload extension) and got a new target scaffolded in my App.
  2. This is what the project structure looks like now.
    Project Structure
  3. The updated SampleHandler.swift looks like this
import ReplayKit
import VideoToolbox

class SampleHandler: RPBroadcastSampleHandler {
    var outputStream: OutputStream?

    override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
        setupStream()
    }
    
    override func broadcastPaused() { }
    
    override func broadcastResumed() { }
    
    override func broadcastFinished() {
        outputStream?.close()
    }
    
    func setupStream() {
        var writeStream: Unmanaged<CFWriteStream>?
        let ipAddress = "192.168.1.38"
        let port: UInt32 = 5000
        
        CFStreamCreatePairWithSocketToHost(kCFAllocatorDefault, ipAddress as CFString, port, nil, &writeStream)
        
        outputStream = writeStream?.takeRetainedValue()
        outputStream?.schedule(in: .current, forMode: .default)
        outputStream?.open()
    }
    
    override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
        guard let outputStream = outputStream, sampleBufferType == .video else { return }
        
        guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
        let ciImage = CIImage(cvImageBuffer: imageBuffer)
        let context = CIContext()
        
        if let jpegData = context.jpegRepresentation(of: ciImage, colorSpace: CGColorSpaceCreateDeviceRGB(), options: [:]) {
            let _ = jpegData.withUnsafeBytes { outputStream.write($0.bindMemory(to: UInt8.self).baseAddress!, maxLength: jpegData.count) }
        }
    }
}
  1. I left the other files unchanged and "Build and Run".
  2. Xcode prompts me to select an app to run, I choose "ScreenStreamer" As it is what includes the extension.
  3. Once the app is installed, it shows this dialog.
    Error Dialog
  4. Error details –
The specified capability is not supported by this device.
Domain: com.apple.dt.CoreDeviceError
Code: 1001
User Info: {
    DVTErrorCreationDateKey = "2024-08-02 12:18:47 +0000";
    IDERunOperationFailingWorker = IDEDebugXPCCoreDeviceWorker;
    ServiceName = "com.apple.instruments.dtservicehub";
}
--

Event Metadata: com.apple.dt.IDERunOperationWorkerFinished : {
    "device_identifier" = "00008020-000848683A84002E";
    "device_isCoreDevice" = 1;
    "device_model" = "iPhone11,6";
    "device_osBuild" = "18.0 (22A5316j)";
    "device_platform" = "com.apple.platform.iphoneos";
    "device_thinningType" = "iPhone11,6";
    "dvt_coredevice_version" = "397.16";
    "dvt_coresimulator_version" = 986;
    "dvt_mobiledevice_version" = "1759.0.5";
    "launchSession_schemeCommand" = Run;
    "launchSession_state" = 1;
    "launchSession_targetArch" = arm64;
    "operation_duration_ms" = 3252;
    "operation_errorCode" = 1001;
    "operation_errorDomain" = "com.apple.dt.CoreDeviceError";
    "operation_errorWorker" = IDEDebugXPCCoreDeviceWorker;
    "operation_name" = IDERunOperationWorkerGroup;
    "param_debugger_attachToExtensions" = 1;
    "param_debugger_attachToXPC" = 1;
    "param_debugger_type" = 1;
    "param_destination_isProxy" = 0;
    "param_destination_platform" = "com.apple.platform.iphoneos";
    "param_diag_113575882_enable" = 0;
    "param_diag_MainThreadChecker_stopOnIssue" = 0;
    "param_diag_MallocStackLogging_enableDuringAttach" = 0;
    "param_diag_MallocStackLogging_enableForXPC" = 1;
    "param_diag_allowLocationSimulation" = 1;
    "param_diag_checker_tpc_enable" = 1;
    "param_diag_gpu_frameCapture_enable" = 0;
    "param_diag_gpu_shaderValidation_enable" = 0;
    "param_diag_gpu_validation_enable" = 0;
    "param_diag_memoryGraphOnResourceException" = 0;
    "param_diag_mtc_enable" = 1;
    "param_diag_queueDebugging_enable" = 1;
    "param_diag_runtimeProfile_generate" = 0;
    "param_diag_sanitizer_asan_enable" = 0;
    "param_diag_sanitizer_tsan_enable" = 0;
    "param_diag_sanitizer_tsan_stopOnIssue" = 0;
    "param_diag_sanitizer_ubsan_enable" = 0;
    "param_diag_sanitizer_ubsan_stopOnIssue" = 0;
    "param_diag_showNonLocalizedStrings" = 0;
    "param_diag_viewDebugging_enabled" = 1;
    "param_diag_viewDebugging_insertDylibOnLaunch" = 1;
    "param_install_style" = 2;
    "param_launcher_UID" = 2;
    "param_launcher_allowDeviceSensorReplayData" = 0;
    "param_launcher_kind" = 0;
    "param_launcher_style" = 0;
    "param_launcher_substyle" = 2;
    "param_runnable_appExtensionHostRunMode" = 0;
    "param_runnable_productType" = "com.apple.product-type.app-extension";
    "param_structuredConsoleMode" = 1;
    "param_testing_launchedForTesting" = 0;
    "param_testing_suppressSimulatorApp" = 0;
    "param_testing_usingCLI" = 0;
    "sdk_canonicalName" = "iphoneos18.0";
    "sdk_osVersion" = "18.0";
    "sdk_variant" = iphoneos;
}
--


System Information

macOS Version 15.0 (Build 24A5298h)
Xcode 16.0 (23047) (Build 16A5211f)
Timestamp: 2024-08-02T17:48:47+05:30

I am not sure what is causing this error.

2

Answers


  1. Chosen as BEST ANSWER

    Thank you @iSpain17 @GoksuBayy for suggestions. Here's what worked for me. I set up the broadcast extension like usual and created a picker button to launch the Broadcast Extension picker.

    ViewController.swift

    import UIKit
    import ReplayKit
    
    class ViewController: UIViewController {
        override func viewDidLoad() {
            super.viewDidLoad()
            view.addSubview(pickerButton)
            pickerButton.translatesAutoresizingMaskIntoConstraints = false
            NSLayoutConstraint.activate([
                pickerButton.centerXAnchor.constraint(equalTo: view.centerXAnchor),
                pickerButton.centerYAnchor.constraint(equalTo: view.centerYAnchor),
                pickerButton.widthAnchor.constraint(equalToConstant: 44),
                pickerButton.heightAnchor.constraint(equalToConstant: 44)
            ])
            view.bringSubviewToFront(pickerButton)
        }
        let pickerButton: RPSystemBroadcastPickerView = {
            let picker = RPSystemBroadcastPickerView(frame: CGRect(origin: .zero, size: CGSize(width: 44, height: 44)))
            picker.preferredExtension = "com.MH.screen-sharing-example.screen-share-extension"
            return picker
        }()
       
    }
    

    Then for processing Video buffer, this implementation worked for me.

    SampleHandler.swift

    //
    //  SampleHandler.swift
    //  screen-share-extension
    //
    //  Created by Prayas Gautam on 05/08/24.
    //
    
    import UIKit
    import ReplayKit
    import Network
    import Foundation
    
    
    class SampleHandler: RPBroadcastSampleHandler {
        
        var connection: NWConnection?
        var lastSendTime: TimeInterval = 0
        let sendInterval: TimeInterval = 1.0 / 10.0 // 30 FPS
    
        override init() {
            super.init()
            setupConnection()
            NotificationCenter.default.addObserver(self, selector: #selector(appDidEnterBackground), name: UIApplication.didEnterBackgroundNotification, object: nil)
            NotificationCenter.default.addObserver(self, selector: #selector(appWillEnterForeground), name: UIApplication.willEnterForegroundNotification, object: nil)
        }
        
        deinit {
            NotificationCenter.default.removeObserver(self)
        }
        
        func setupConnection() {
            let host = NWEndpoint.Host("192.168.x.xx")
            let port = NWEndpoint.Port(integerLiteral: 8080)
            connection = NWConnection(host: host, port: port, using: .udp)
            connection?.start(queue: .main)
        }
        
        func processVideoSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
            guard let connection = connection else { return }
                   
                   // Convert sample buffer to data
           guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }
           let ciImage = CIImage(cvImageBuffer: imageBuffer)
           let context = CIContext()
           guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else { return }
           
           // Resize the image
           let uiImage = UIImage(cgImage: cgImage)
           let size = CGSize(width: uiImage.size.width / 2, height: uiImage.size.height / 2) // Reduce resolution by half
           UIGraphicsBeginImageContext(size)
           uiImage.draw(in: CGRect(origin: .zero, size: size))
           let resizedImage = UIGraphicsGetImageFromCurrentImageContext()
           UIGraphicsEndImageContext()
           
           guard let jpegData = resizedImage?.jpegData(compressionQuality: 0.2) else { return } // Adjust compression quality
           
           // Send data over UDP
           connection.send(content: jpegData, completion: .contentProcessed({ sendError in
               if let sendError = sendError {
                   NSLog("Failed to send data: (sendError)")
               } else {
                   NSLog("Data sent successfully")
               }
           }))
        }
        
        @objc func appDidEnterBackground() {
            // Pause streaming or reduce frequency
            NSLog("App entered background")
        }
        
        @objc func appWillEnterForeground() {
            // Resume streaming
            NSLog("App will enter foreground")
        }
        override func broadcastStarted(withSetupInfo setupInfo: [String : NSObject]?) {
            // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional. 
        }
        
        override func broadcastPaused() {
            // User has requested to pause the broadcast. Samples will stop being delivered.
        }
        
        override func broadcastResumed() {
            // User has requested to resume the broadcast. Samples delivery will resume.
        }
        
        override func broadcastFinished() {
            // User has requested to finish the broadcast.
        }
        
        override func processSampleBuffer(_ sampleBuffer: CMSampleBuffer, with sampleBufferType: RPSampleBufferType) {
            switch sampleBufferType {
            case RPSampleBufferType.video:
                let currentTime = CACurrentMediaTime()
                if currentTime - lastSendTime >= sendInterval {
                    lastSendTime = currentTime
                    processVideoSampleBuffer(sampleBuffer)
                }
                break
            case RPSampleBufferType.audioApp:
                // Handle audio sample buffer for app audio
                break
            case RPSampleBufferType.audioMic:
                // Handle audio sample buffer for mic audio
                break
            @unknown default:
                // Handle other sample buffer types
                fatalError("Unknown type of sample buffer")
            }
        }
    }
    
    

    Python script to receive the stream on PC.

    import socket
    import cv2
    import numpy as np
    
    # Set up socket
    UDP_IP = "0.0.0.0"
    UDP_PORT = 8080
    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    sock.bind((UDP_IP, UDP_PORT))
    
    while True:
        data, addr = sock.recvfrom(65536)  
    
        np_data = np.frombuffer(data, dtype=np.uint8)
        frame = cv2.imdecode(np_data, 1)
        
        if frame is not None:
            cv2.imshow('Video Stream', frame)
            
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
    
    # Release resources
    sock.close()
    cv2.destroyAllWindows()
    
    

  2. It is not possible I guess, Apple RPSCreenRecorder documentation says "Your app can record the audio and video inside of the app" so you can not record iPhone screen except for your app screen. However you can use RPBroadcastSampleHandler to stream your iPhone. see: RPBroadcastSampleHandler

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search