When the view hundreds on gadget I simply have a black display. I’ve very comparable code working with a Storyboard occasion and utilizing UIViewController, however I have to get away from that as I’m working with visionOS and wish to summary this away into utilizing RealityKit and VideoPlayerLayer, however for prototyping and iterating different duties I would love this AVSampleBufferDisplayLayer to work… which in the end I should cross an AVSampleBufferVideoRenderer to VideoPlayerLayer so getting this implementation works helps me dive into the RealityKit rendering.
Right here is the code, I’ve stuffed the whole lot into one file for ease of debugging and questions.
Thanks!
struct MirrorView: View {
var physique: some View {
VStack {
LayerView()
}
}
}
struct LayerView: UIViewRepresentable {
func makeUIView(context: Context) -> UIView {
print("LayerUIView is being created")
return LayerUIView()
}
func updateUIView(_ uiView: UIView, context: UIViewRepresentableContext) {
print("LayerUIView is being up to date")
}
}
class LayerUIView: UIView {
personal let networking = Networking()
personal let displayLayer = AVSampleBufferDisplayLayer()
personal var subscriptions = Set()
personal var sampleBufferTask: Job?
override init(body: CGRect) {
tremendous.init(body: body)
print("LayerUIView initialized")
setupVideoLayer()
setupNetworking()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been carried out")
}
override func layoutSubviews() {
tremendous.layoutSubviews()
layer.body = bounds
}
personal func setupVideoLayer() {
displayLayer.body = bounds
displayLayer.videoGravity = .resizeAspect
layer.addSublayer(displayLayer)
NotificationCenter.default.addObserver(
self,
selector: #selector(handleFailedToDecodeNotification(_:)),
title: .AVSampleBufferDisplayLayerFailedToDecode,
object: displayLayer
)
}
@objc personal func handleFailedToDecodeNotification(_ notification: Notification) {
if let error = notification.userInfo?[AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey] {
print("Did not decode pattern buffer. Error: (error)")
} else {
print("Did not decode pattern buffer. No error data obtainable.")
}
}
personal func setupNetworking() {
networking.startAdvertising()
print("Networking is related: (networking.isConnected)")
startSampleBufferTask()
}
// MARK: - Job Administration
personal func startSampleBufferTask() {
sampleBufferTask = Job {
for await sampleBuffer in networking.sampleBuffers {
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
print("Format Description: (String(describing: formatDescription))")
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Presentation Timestamp: (presentationTimeStamp)")
let period = CMSampleBufferGetDuration(sampleBuffer)
print("Period: (period)")
DispatchQueue.principal.async {
self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
}
}
}
}
personal func stopSampleBufferTask() {
sampleBufferTask?.cancel()
sampleBufferTask = nil
}
}
#Preview {
MirrorView()
}
I created a UIKit model of this and efficiently loaded as an iOS gadget within the AVP’s.
Right here is the ViewController operating that code:
class ViewController: UIViewController { //, VideoDecoderAnnexBAdaptorDelegate {
// MARK: - Properties
personal let networking = Networking()
personal let displayLayer = AVSampleBufferDisplayLayer()
personal var subscriptions = Set()
personal var sampleBufferTask: Job?
// MARK: - Lifecycle
override func viewDidLoad() {
tremendous.viewDidLoad()
setupVideoLayer()
setupNetworking()
}
override var supportedInterfaceOrientations: UIInterfaceOrientationMask {
return .allButUpsideDown
}
override var shouldAutorotate: Bool {
return true
}
override func viewWillTransition(to measurement: CGSize, with coordinator: UIViewControllerTransitionCoordinator) {
tremendous.viewWillTransition(to: measurement, with: coordinator)
coordinator.animate(alongsideTransition: { _ in
// Modify format for brand spanking new orientation
self.displayLayer.body = self.view.bounds
}, completion: nil)
}
// MARK: - Setup Strategies
personal func setupVideoLayer() {
displayLayer.body = view.bounds
displayLayer.videoGravity = .resizeAspect
displayLayer.backgroundColor = UIColor.black.cgColor
view.layer.addSublayer(displayLayer)
NotificationCenter.default.addObserver(
self,
selector: #selector(handleFailedToDecodeNotification(_:)),
title: .AVSampleBufferDisplayLayerFailedToDecode,
object: displayLayer
)
}
@objc personal func handleFailedToDecodeNotification(_ notification: Notification) {
if let error = notification.userInfo?[AVSampleBufferDisplayLayerFailedToDecodeNotificationErrorKey] {
print("Did not decode pattern buffer. Error: (error)")
} else {
print("Did not decode pattern buffer. No error data obtainable.")
}
}
personal func setupNetworking() {
networking.startAdvertising()
print("Networking is related: (networking.isConnected)")
startSampleBufferTask()
}
// MARK: - Job Administration
personal func startSampleBufferTask() {
sampleBufferTask = Job {
for await sampleBuffer in networking.sampleBuffers {
let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer)
print("Format Description: (String(describing: formatDescription))")
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
print("Presentation Timestamp: (presentationTimeStamp)")
let period = CMSampleBufferGetDuration(sampleBuffer)
print("Period: (period)")
DispatchQueue.principal.async {
self.displayLayer.sampleBufferRenderer.enqueue(sampleBuffer)
}
}
}
}
personal func stopSampleBufferTask() {
sampleBufferTask?.cancel()
sampleBufferTask = nil
}
}
