6.1 C
Canberra
Monday, October 27, 2025

ios – How one can cross UIViewRepresentable with a AVSampleBufferDisplayLayer to SwiftUI


When the view hundreds on gadget I simply have a black display. I’ve very comparable code working with a Storyboard occasion and utilizing UIViewController, however I have to get away from that as I’m working with visionOS and wish to summary this away into utilizing RealityKit and VideoPlayerLayer, however for prototyping and iterating different duties I would love this AVSampleBufferDisplayLayer to work… which in the end I should cross an AVSampleBufferVideoRenderer to VideoPlayerLayer so getting this implementation works helps me dive into the RealityKit rendering.

Right here is the code, I’ve stuffed the whole lot into one file for ease of debugging and questions.

Thanks!

struct MirrorView: View {
    var physique: some View {
        VStack {
            LayerView()
        }
    }
}

struct LayerView: UIViewRepresentable {
    func makeUIView(context: Context) -> UIView {
        print("LayerUIView is being created")
        return LayerUIView()
            
    }
    
    func updateUIView(_ uiView: UIView, context: UIViewRepresentableContext) {
        print("LayerUIView is being up to date")
    }
}

class LayerUIView: UIView {
    
    personal let networking = Networking()
    personal let displayLayer = AVSampleBufferDisplayLayer()
    personal var subscriptions = Set()
    personal var sampleBufferTask: Job?
    
    override init(body: CGRect) {
        tremendous.init(body: body)
        print("LayerUIView initialized")
        setupVideoLayer()
        setupNetworking()
    }
    
    required init?(coder: NSCoder) {
        fatalError("init(coder:) has not been carried out")
    }
    
    override func layoutSubviews() {
        tremendous.layoutSubviews()
        layer.body = bounds
    }
    
    personal func setupVideoLayer() {
        
        displayLayer.body = bounds
        displayLayer.videoGravity = .resizeAspect
        
        layer.addSublayer(displayLayer)
        NotificationCenter.default.addObserver(
                self,
                selector: #selector(handleFailedToDecodeNotification(_:)),
                title: .AVSampleBufferDisplayLayerFailedToDecode,
                object: displayLayer
            )
        }

        @objc personal func handleFailedToDecodeNotification(_ notification: Notification) {
            if let error = notification.userInfo?[AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey] {
                print("Did not decode pattern buffer. Error: (error)")
            } else {
                print("Did not decode pattern buffer. No error data obtainable.")
            }
        }
    
    personal func setupNetworking() {
        networking.startAdvertising()
        print("Networking is related: (networking.isConnected)")
        startSampleBufferTask()
    }

    // MARK: - Job Administration
    personal func startSampleBufferTask() {
        
        sampleBufferTask = Job {
            for await sampleBuffer in networking.sampleBuffers {
                let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
                print("Format Description: (String(describing: formatDescription))")

                let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
                print("Presentation Timestamp: (presentationTimeStamp)")

                let period = CMSampleBufferGetDuration(sampleBuffer)
                print("Period: (period)")
                
                DispatchQueue.principal.async {
                    self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
                }
            }
        }
    }

    personal func stopSampleBufferTask() {
        sampleBufferTask?.cancel()
        sampleBufferTask = nil
    }
}

#Preview {
    MirrorView()
}

I created a UIKit model of this and efficiently loaded as an iOS gadget within the AVP’s.

Right here is the ViewController operating that code:


class ViewController: UIViewController { //, VideoDecoderAnnexBAdaptorDelegate {
    
    // MARK: - Properties
    personal let networking = Networking()
    personal let displayLayer = AVSampleBufferDisplayLayer()
    personal var subscriptions = Set()
    personal var sampleBufferTask: Job?
    
    // MARK: - Lifecycle
    override func viewDidLoad() {
        tremendous.viewDidLoad()
        
        setupVideoLayer()
        setupNetworking()
    }
    
    override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
        return .allButUpsideDown
    }

    override var shouldAutorotate: Bool {
        return true
    }
    
    override func viewWillTransition(to measurement: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
        tremendous.viewWillTransition(to: measurement, with: coordinator)
        
        coordinator.animate(alongsideTransition: { _ in
            // Modify format for brand spanking new orientation
            self.displayLayer.body = self.view.bounds
        }, completion: nil)
    }
    
    // MARK: - Setup Strategies
    personal func setupVideoLayer() {
        displayLayer.body = view.bounds
        displayLayer.videoGravity = .resizeAspect
        displayLayer.backgroundColor = UIColor.black.cgColor
        view.layer.addSublayer(displayLayer)
        NotificationCenter.default.addObserver(
                self,
                selector: #selector(handleFailedToDecodeNotification(_:)),
                title: .AVSampleBufferDisplayLayerFailedToDecode,
                object: displayLayer
            )
        }

        @objc personal func handleFailedToDecodeNotification(_ notification: Notification) {
            if let error = notification.userInfo?[AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey] {
                print("Did not decode pattern buffer. Error: (error)")
            } else {
                print("Did not decode pattern buffer. No error data obtainable.")
            }
        }
    
    personal func setupNetworking() {
        networking.startAdvertising()
        print("Networking is related: (networking.isConnected)")
        startSampleBufferTask()
    }

    // MARK: - Job Administration
    personal func startSampleBufferTask() {
        
        sampleBufferTask = Job {
            for await sampleBuffer in networking.sampleBuffers {
                let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
                print("Format Description: (String(describing: formatDescription))")

                let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
                print("Presentation Timestamp: (presentationTimeStamp)")

                let period = CMSampleBufferGetDuration(sampleBuffer)
                print("Period: (period)")
                
                DispatchQueue.principal.async {
                    self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
                }
            }
        }
    }

    personal func stopSampleBufferTask() {
        sampleBufferTask?.cancel()
        sampleBufferTask = nil
    }
}

Related Articles

LEAVE A REPLY

Please enter your comment!
Please enter your name here

[td_block_social_counter facebook="tagdiv" twitter="tagdivofficial" youtube="tagdiv" style="style8 td-social-boxed td-social-font-icons" tdc_css="eyJhbGwiOnsibWFyZ2luLWJvdHRvbSI6IjM4IiwiZGlzcGxheSI6IiJ9LCJwb3J0cmFpdCI6eyJtYXJnaW4tYm90dG9tIjoiMzAiLCJkaXNwbGF5IjoiIn0sInBvcnRyYWl0X21heF93aWR0aCI6MTAxOCwicG9ydHJhaXRfbWluX3dpZHRoIjo3Njh9" custom_title="Stay Connected" block_template_id="td_block_template_8" f_header_font_family="712" f_header_font_transform="uppercase" f_header_font_weight="500" f_header_font_size="17" border_color="#dd3333"]
- Advertisement -spot_img

Latest Articles