[Tabber] ----------- onInitPlayer ---------
[Tabber] url : nil
[Tabber] liveUrl : nil
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / startRetry -----------
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / updateRetry -----------
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / osPip 아님 -----------
[Tabber] currentLiveUrl : nil
[Tabber] SL 오버레이 웹뷰에서 수신받음 : onSetLiveStreamUrl
[Tabber] onSetLiveStreamUrl 에서 didUpdateVideoUrl 호출함 -> Optional(<https://live.us.shoplive.cloud/live/45-5tXLuZIWjkWQdhjI-ue2.m3u8>)
[Tabber] Internal Player 플레이어 뷰 onDidUpdateVideo 호출됨
[Tabber] Internal Player 에서 didUpdateVideoUrl 호출 -> <https://live.us.shoplive.cloud/live/45-5tXLuZIWjkWQdhjI-ue2.m3u8>
웹뷰에서 호출한 데이터로 플레이
다시 돌아와도 그대로 웹뷰에서 가져온거 씀
onSetCampaignKey 를 왜 이후에 해야하나? → internalPlayerView가 옵셔널이라 생성되기 전까지는 onSetCampaignKey 함수 안먹힘
[Tabber] [7e69b0a299a7] 방송 -> onSetCampaignKey 호출
[Tabber] [7e69b0a299a7] 방송 : liveUrl -> nil
[Tabber] ----------- onInitPlayer ---------
[Tabber] url : nil
[Tabber] liveUrl : nil
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / startRetry -----------
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / updateRetry -----------
[Tabber] ----------- HLSInternalViewLiveStreamRetryManager / osPip 아님 -----------
[Tabber] currentLiveUrl : nil
[Tabber] [7e69b0a299a7] 방송 -> onSetCampaignKey callLiveUrlFectchAPI -> true
[Tabber] [7e69b0a299a7] 방송 타입 : ShopLiveStreamType(rawValue: 0)
[Tabber] [7e69b0a299a7] 방송 url -> Optional("<https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>")
[Tabber][HLS] onSetLiveUrl 로 URL 세팅 하려고 함 -> Optional("<https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>")
[Tabber] SL 오버레이 웹뷰에서 수신받음 : onSetLiveStreamUrl
[Tabber] onSetLiveStreamUrl 에서 didUpdateVideoUrl 호출함 -> Optional(<https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>)
[Tabber] Internal Player 플레이어 뷰 onDidUpdateVideo 호출됨
[Tabber] Internal Player 에서 didUpdateVideoUrl 호출 -> <https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>
[Tabber] HLSInternalPlayerViewModel 진입해서 onDidUpdateVideoUrl 호출함
[Tabber] url -> <https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>
[Tabber] onDidUpdateVideoUrl 에서 체크결과 liveUrl [있음] -> 리턴 종료
[Tabber] 웹뷰에서 내려주는 onSetEgressProtocolsUrl 로 didUpdateVideoUrl 호출함 | url : <https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>
[Tabber] Internal Player 플레이어 뷰 onDidUpdateVideo 호출됨
[Tabber] Internal Player 에서 didUpdateVideoUrl 호출 -> <https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>
[Tabber] HLSInternalPlayerViewModel 진입해서 onDidUpdateVideoUrl 호출함
[Tabber] url -> <https://live.us.shoplive.cloud/live/45-2ekKAdlXAXgAGMgI-ue2.m3u8>
[Tabber] onDidUpdateVideoUrl 에서 체크결과 liveUrl [있음] -> 리턴 종료
API 호출 중
activate호출하여 nil 값으로 url 시도
API 호출 완료, 세팅
웹뷰 값 올떄까지 기다림
이미 값 있다고 방송 시청 안떄림
//
// IntegratedPlayerView.swift
// ShopLiveCorePlayerSDK
//
// Created by yong C on 10/12/25.
// Copyright © 2025 com.app. All rights reserved.
//
import Foundation
import UIKit
import AVKit
import ShopliveSDKCommon
public final class IntegratedPlayerView: UIView, ShopLiveHLSPlayerViewInterface, ShopLiveRTCPlayerViewInterface {
private lazy var inAppDisplayLayer: AVSampleBufferDisplayLayer = {
let layer = AVSampleBufferDisplayLayer()
layer.videoGravity = .resizeAspectFill
layer.backgroundColor = UIColor.clear.cgColor
return layer
}()
private lazy var osDisplayLayer: AVSampleBufferDisplayLayer = {
let layer = AVSampleBufferDisplayLayer()
layer.videoGravity = .resizeAspectFill
layer.opacity = 0
layer.backgroundColor = UIColor.clear.cgColor
return layer
}()
private lazy var player: AVPlayer = AVPlayer()
private lazy var playerLayer: AVPlayerLayer = {
let layer = AVPlayerLayer(player: player)
layer.videoGravity = .resizeAspectFill
layer.backgroundColor = UIColor.clear.cgColor
return layer
}()
public var rtcResultHanlder: ((ShopLiveRTCPlayerViewResult) -> ())?
public var hlsResultHandler: ((ShopLiveCorePlayerSDK.ShopLiveHLSPlayerViewResult) -> ())?
private var isVideoOrientationLandscape: Bool = false
private var currentStreamType: ShopLiveStreamType = .WEBRTC
override init(frame: CGRect) {
super.init(frame: frame)
self.clipsToBounds = true
setupInitialStreamType()
}
required init?(coder: NSCoder) {
fatalError("init(coder:) has not been implemented")
}
private func setupInitialStreamType() {
layer.insertSublayer(osDisplayLayer, at: 0)
layer.addSublayer(inAppDisplayLayer)
}
override public func layoutSubviews() {
super.layoutSubviews()
switch currentStreamType {
case .HLS:
playerLayer.frame = bounds
case .WEBRTC:
inAppDisplayLayer.frame = bounds
calculateOsPipLayerSize()
}
}
public func switchView(to type: ShopLiveStreamType) {
guard currentStreamType != type else { return }
CATransaction.begin()
CATransaction.setDisableActions(true)
switch type {
case .HLS:
switchToHLS()
case .WEBRTC:
switchToRTC()
}
CATransaction.commit()
currentStreamType = type
setNeedsLayout()
}
private func switchToHLS() {
cleanupRTCLayers()
if playerLayer.superlayer == nil {
layer.addSublayer(playerLayer)
}
playerLayer.frame = bounds
playerLayer.player = player
}
private func switchToRTC() {
cleanupHLSLayer()
if osDisplayLayer.superlayer == nil {
layer.insertSublayer(osDisplayLayer, at: 0)
}
if inAppDisplayLayer.superlayer == nil {
layer.addSublayer(inAppDisplayLayer)
}
inAppDisplayLayer.frame = bounds
calculateOsPipLayerSize()
}
private func cleanupRTCLayers() {
if #available(iOS 17.0, *) {
inAppDisplayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true)
osDisplayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true)
} else {
inAppDisplayLayer.flushAndRemoveImage()
osDisplayLayer.flushAndRemoveImage()
}
inAppDisplayLayer.removeFromSuperlayer()
osDisplayLayer.removeFromSuperlayer()
}
private func cleanupHLSLayer() {
player.pause()
player.replaceCurrentItem(with: nil)
playerLayer.player = nil
playerLayer.removeFromSuperlayer()
}
// MARK: - RTC Actions
public func rtcAction(_ action: ShopLiveRTCPlayerViewAction) {
switch action {
case .feedSampleBuffer(let buffer):
self.onFeedSampleBuffer(buffer: buffer)
case .requestShowOrHideInAppDisplayLayer(let needToShow):
self.onRequestShowOrHideInAppDisplayLayer(needToShow: needToShow)
case .setVideoGravity(let videoGravity):
self.onSetVideoGravity(gravity: videoGravity)
case .flushBuffer:
onFlushBuffer()
case .tearDown:
self.onRTCTearDown()
}
}
private func onSetOsSampleDisplayLayerSize(size: CGSize) {
let width = self.bounds.width
let layerWidth = size.width
let layerHeight = size.height
let layerXPos = (width / 2) - (layerWidth / 2)
osDisplayLayer.frame = .init(x: layerXPos, y: 0, width: layerWidth, height: layerHeight)
}
private func onFeedSampleBuffer(buffer: CMSampleBuffer?) {
guard let buffer = buffer,
currentStreamType == .WEBRTC else { return }
if osDisplayLayer.status == .failed {
osDisplayLayer.flush()
} else {
osDisplayLayer.enqueue(buffer)
}
inAppDisplayLayer.enqueue(buffer)
}
private func onRequestShowOrHideOsPipLayer(needToShow: Bool) {
DispatchQueue.main.async { [weak self] in
self?.osDisplayLayer.isHidden = !needToShow
}
}
private func onRequestShowOrHideInAppDisplayLayer(needToShow: Bool) {
DispatchQueue.main.async { [weak self] in
self?.inAppDisplayLayer.isHidden = !needToShow
}
}
private func onSetVideoGravity(gravity: AVLayerVideoGravity) {
osDisplayLayer.videoGravity = gravity
inAppDisplayLayer.videoGravity = gravity
}
private func onViewDidRotate() {
calculateOsPipLayerSize()
}
private func onSetVideoOrientation(isLandscape: Bool) {
self.isVideoOrientationLandscape = isLandscape
calculateOsPipLayerSize()
}
private func onFlushBuffer() {
guard currentStreamType == .WEBRTC else { return }
if #available(iOS 17.0, *) {
inAppDisplayLayer.sampleBufferRenderer.flush()
osDisplayLayer.sampleBufferRenderer.flush()
} else {
inAppDisplayLayer.flush()
osDisplayLayer.flush()
}
inAppDisplayLayer.setNeedsDisplay()
osDisplayLayer.setNeedsDisplay()
}
private func onRTCTearDown() {
if #available(iOS 17.0, *) {
inAppDisplayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true)
osDisplayLayer.sampleBufferRenderer.flush(removingDisplayedImage: true)
} else {
inAppDisplayLayer.flushAndRemoveImage()
osDisplayLayer.flushAndRemoveImage()
}
inAppDisplayLayer.isHidden = true
osDisplayLayer.isHidden = true
}
public func getDisplayLayer() -> AVSampleBufferDisplayLayer? {
return self.osDisplayLayer
}
public func getInAppDisplayLayerVideoGravity() -> AVLayerVideoGravity? {
return self.inAppDisplayLayer.videoGravity
}
// MARK: - HLS Actions
public func hlsAction(_ action: ShopLiveHLSPlayerViewAction) {
switch action {
case .refreshLayer(let videoGravity):
self.onRefreshLayer(videoGravity: videoGravity)
case .setVideoGravity(let videoGravity):
self.onSetVideoGravitiy(videoGravity: videoGravity)
case .tearDown:
self.onHLSTearDown()
}
}
private func onRefreshLayer(videoGravity: AVLayerVideoGravity) {
guard currentStreamType == .HLS else { return }
playerLayer.videoGravity = videoGravity
playerLayer.player = player
playerLayer.frame = self.bounds
}
private func onSetVideoGravitiy(videoGravity: AVLayerVideoGravity) {
playerLayer.videoGravity = videoGravity
}
private func onHLSTearDown() {
player.pause()
player.replaceCurrentItem(with: nil)
playerLayer.player = nil
}
public func getAVPlayer() -> AVPlayer? {
return self.player
}
public func getPlayerLayer() -> AVPlayerLayer? {
return self.playerLayer
}
private func setLayout() {
if self.playerLayer.superlayer == nil {
self.layer.addSublayer(self.playerLayer)
}
self.setNeedsLayout()
}
}
// MARK: - Private Helpers
extension IntegratedPlayerView {
private func calculateOsPipLayerSize() {
let width = self.bounds.width
let height = self.bounds.height
if isVideoOrientationLandscape {
let layerHeight = height
let layerWidth = height * (9 / 16)
let layerXPos = (width / 2) - (layerWidth / 2)
osDisplayLayer.frame = .init(x: layerXPos, y: 0, width: layerWidth, height: layerHeight)
} else {
let layerHeight = height
let layerWidth = height * (9 / 16)
let layerXPos = (width / 2) - (layerWidth / 2)
osDisplayLayer.frame = .init(x: layerXPos, y: 0, width: layerWidth, height: layerHeight)
}
}
}