diff --git a/packages/react-native-sdk/__tests__/components/CallParticipantsGrid.test.tsx b/packages/react-native-sdk/__tests__/components/CallParticipantsGrid.test.tsx index 874c528946..8a8f837325 100644 --- a/packages/react-native-sdk/__tests__/components/CallParticipantsGrid.test.tsx +++ b/packages/react-native-sdk/__tests__/components/CallParticipantsGrid.test.tsx @@ -47,6 +47,11 @@ describe('CallParticipantsGrid', () => { }, ); + // Advance timers to allow RxJS debounceTime subscriptions to emit + await act(() => { + jest.advanceTimersByTime(300); + }); + expect( await screen.findByTestId(ComponentTestIds.CALL_PARTICIPANTS_GRID), ).toBeVisible(); @@ -104,6 +109,11 @@ describe('CallParticipantsGrid', () => { }, ); + // Advance timers to allow RxJS debounceTime subscriptions to emit + await act(() => { + jest.advanceTimersByTime(300); + }); + const visibleParticipantsItems = call.state.participants.map((p) => ({ key: p.sessionId, item: 'some-item', @@ -147,7 +157,7 @@ const simulateOnViewableItemsChanged = async ( viewableItems, }); // Advance pending timers to allow the FlatList to rerender - // This is needed because of useDebouncedValue we use in + // This is needed because of RxJS debounceTime we use in // forceUpdateValue to force rerender the FlatList jest.advanceTimersByTime(500); }); diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureAvatarView.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureAvatarView.swift new file mode 100644 index 0000000000..e7b7b79531 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureAvatarView.swift @@ -0,0 +1,273 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import UIKit + +/// A view that displays an avatar placeholder when video is disabled in PiP mode. +/// Shows either a loaded image from URL, initials, or a default person icon. +final class PictureInPictureAvatarView: UIView { + + // MARK: - Properties + + /// The participant's name, used to generate initials + var participantName: String? { + didSet { + PictureInPictureLogger.log("AvatarView.participantName didSet: '\(participantName ?? "nil")'") + updateInitials() + } + } + + /// The URL string for the participant's profile image + var imageURL: String? { + didSet { + loadImage() + } + } + + /// Whether video is enabled - when true, the avatar should be hidden (alpha = 0) + /// Note: We use alpha instead of isHidden to match upstream SwiftUI behavior. + /// Using isHidden can cause layout issues because iOS may skip layoutSubviews for hidden views. + var isVideoEnabled: Bool = true { + didSet { + updateVisibility() + // When becoming visible (video disabled), refresh content to ensure initials are shown + // This is needed when the same avatarView instance is reused across PiP sessions + if !isVideoEnabled { + PictureInPictureLogger.log("AvatarView isVideoEnabled=false, refreshing content") + updateInitials() + } + } + } + + // MARK: - Private Properties + + private let containerView: UIView = { + let view = UIView() + view.translatesAutoresizingMaskIntoConstraints = false + view.backgroundColor = UIColor(red: 0.12, green: 0.13, blue: 0.15, alpha: 1.0) // Dark background + return view + }() + + private let avatarContainerView: UIView = { + let view = UIView() + view.translatesAutoresizingMaskIntoConstraints = false + view.backgroundColor = UIColor(red: 0.0, green: 0.47, blue: 1.0, alpha: 1.0) // Stream blue + view.clipsToBounds = true + return view + }() + + private let initialsLabel: UILabel = { + let label = UILabel() + label.translatesAutoresizingMaskIntoConstraints = false + label.textColor = .white + label.textAlignment = .center + label.font = UIFont.systemFont(ofSize: 32, weight: .semibold) + label.adjustsFontSizeToFitWidth = true + label.minimumScaleFactor = 0.5 + return label + }() + + private let imageView: UIImageView = { + let imageView = UIImageView() + imageView.translatesAutoresizingMaskIntoConstraints = false + imageView.contentMode = .scaleAspectFill + imageView.clipsToBounds = true + imageView.isHidden = true + return imageView + }() + + private let placeholderImageView: UIImageView = { + let imageView = UIImageView() + imageView.translatesAutoresizingMaskIntoConstraints = false + imageView.contentMode = .scaleAspectFit + imageView.tintColor = .white + // Use SF Symbol for person icon + if let personImage = UIImage(systemName: "person.fill") { + imageView.image = personImage + } + imageView.isHidden = true + return imageView + }() + + private var currentImageLoadTask: URLSessionDataTask? + private var avatarSizeConstraints: [NSLayoutConstraint] = [] + + // MARK: - Lifecycle + + override init(frame: CGRect) { + super.init(frame: frame) + setUp() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func layoutSubviews() { + super.layoutSubviews() + PictureInPictureLogger.log("AvatarView layoutSubviews: bounds=\(bounds), isHidden=\(isHidden)") + updateAvatarSize() + } + + // MARK: - Private Helpers + + private func setUp() { + addSubview(containerView) + containerView.addSubview(avatarContainerView) + avatarContainerView.addSubview(initialsLabel) + avatarContainerView.addSubview(imageView) + avatarContainerView.addSubview(placeholderImageView) + + NSLayoutConstraint.activate([ + containerView.leadingAnchor.constraint(equalTo: leadingAnchor), + containerView.trailingAnchor.constraint(equalTo: trailingAnchor), + containerView.topAnchor.constraint(equalTo: topAnchor), + containerView.bottomAnchor.constraint(equalTo: bottomAnchor), + + avatarContainerView.centerXAnchor.constraint(equalTo: containerView.centerXAnchor), + avatarContainerView.centerYAnchor.constraint(equalTo: containerView.centerYAnchor), + + initialsLabel.leadingAnchor.constraint(equalTo: avatarContainerView.leadingAnchor, constant: 4), + initialsLabel.trailingAnchor.constraint(equalTo: avatarContainerView.trailingAnchor, constant: -4), + initialsLabel.topAnchor.constraint(equalTo: avatarContainerView.topAnchor, constant: 4), + initialsLabel.bottomAnchor.constraint(equalTo: avatarContainerView.bottomAnchor, constant: -4), + + imageView.leadingAnchor.constraint(equalTo: avatarContainerView.leadingAnchor), + imageView.trailingAnchor.constraint(equalTo: avatarContainerView.trailingAnchor), + imageView.topAnchor.constraint(equalTo: avatarContainerView.topAnchor), + imageView.bottomAnchor.constraint(equalTo: avatarContainerView.bottomAnchor), + + placeholderImageView.centerXAnchor.constraint(equalTo: avatarContainerView.centerXAnchor), + placeholderImageView.centerYAnchor.constraint(equalTo: avatarContainerView.centerYAnchor), + placeholderImageView.widthAnchor.constraint(equalTo: avatarContainerView.widthAnchor, multiplier: 0.5), + placeholderImageView.heightAnchor.constraint(equalTo: avatarContainerView.heightAnchor, multiplier: 0.5) + ]) + + updateAvatarSize() + updateVisibility() + // Ensure initial content state is correct (show placeholder when no name/image) + updateInitials() + } + + private func updateAvatarSize() { + // Remove old constraints + NSLayoutConstraint.deactivate(avatarSizeConstraints) + + // Avatar size should be about 40% of the smaller dimension + let minDimension = min(bounds.width, bounds.height) + let avatarSize = max(minDimension * 0.4, 60) // Minimum 60pt + + PictureInPictureLogger.log("AvatarView updateAvatarSize: bounds=\(bounds), minDimension=\(minDimension), avatarSize=\(avatarSize)") + + avatarSizeConstraints = [ + avatarContainerView.widthAnchor.constraint(equalToConstant: avatarSize), + avatarContainerView.heightAnchor.constraint(equalToConstant: avatarSize) + ] + NSLayoutConstraint.activate(avatarSizeConstraints) + + // Force immediate layout to apply the new constraints + // This is needed because constraints set during layoutSubviews + // won't be resolved until the next layout pass otherwise + containerView.setNeedsLayout() + containerView.layoutIfNeeded() + + // Update corner radius after layout is complete + avatarContainerView.layer.cornerRadius = avatarContainerView.bounds.width / 2 + + PictureInPictureLogger.log("AvatarView updateAvatarSize FINAL: avatarContainer.frame=\(avatarContainerView.frame)") + } + + private func updateVisibility() { + // Hide avatar when video is enabled using alpha (not isHidden) + // Using alpha instead of isHidden ensures layoutSubviews is always called, + // which is critical for proper constraint-based layout. This matches + // upstream SwiftUI's opacity-based visibility switching. + let newAlpha: CGFloat = isVideoEnabled ? 0 : 1 + PictureInPictureLogger.log("AvatarView updateVisibility: isVideoEnabled=\(isVideoEnabled), setting alpha=\(newAlpha)") + alpha = newAlpha + + // Force layout update when becoming visible to ensure proper sizing + if !isVideoEnabled { + PictureInPictureLogger.log("AvatarView updateVisibility: becoming visible, forcing layout") + setNeedsLayout() + layoutIfNeeded() + } + } + + private func updateInitials() { + guard let name = participantName, !name.isEmpty else { + PictureInPictureLogger.log("AvatarView updateInitials: no name, showing placeholder. avatarContainer.frame=\(avatarContainerView.frame)") + initialsLabel.text = nil + initialsLabel.isHidden = true + // Show placeholder when there's no image loaded + placeholderImageView.isHidden = imageView.image != nil + return + } + + let initials = generateInitials(from: name) + PictureInPictureLogger.log("AvatarView updateInitials: name=\(name), initials=\(initials), imageView.image=\(imageView.image != nil ? "loaded" : "nil"), avatarContainer.frame=\(avatarContainerView.frame)") + initialsLabel.text = initials + initialsLabel.isHidden = imageView.image != nil + placeholderImageView.isHidden = true + } + + private func generateInitials(from name: String) -> String { + let components = name.split(separator: " ") + if components.count >= 2 { + let first = components[0].prefix(1) + let last = components[1].prefix(1) + return "\(first)\(last)".uppercased() + } else if let first = components.first { + return String(first.prefix(2)).uppercased() + } + return "" + } + + private func loadImage() { + // Cancel any existing task + currentImageLoadTask?.cancel() + currentImageLoadTask = nil + + guard let urlString = imageURL, !urlString.isEmpty, let url = URL(string: urlString) else { + imageView.image = nil + imageView.isHidden = true + updateInitials() + return + } + + let requestURLString = urlString + + // Load image asynchronously + var requestTask: URLSessionDataTask? + let task = URLSession.shared.dataTask(with: url) { [weak self] data, _, error in + DispatchQueue.main.async { [weak self] in + guard let self = self else { return } + guard let requestTask else { return } + guard self.currentImageLoadTask === requestTask else { return } + defer { self.currentImageLoadTask = nil } + + // Ignore stale/cancelled responses so only the latest request can mutate UI. + if let nsError = error as NSError?, nsError.code == NSURLErrorCancelled { + return + } + guard self.imageURL == requestURLString else { return } + + guard error == nil, let data = data, let image = UIImage(data: data) else { + self.imageView.image = nil + self.imageView.isHidden = true + self.updateInitials() + return + } + + self.imageView.image = image + self.imageView.isHidden = false + self.initialsLabel.isHidden = true + self.placeholderImageView.isHidden = true + } + } + requestTask = task + currentImageLoadTask = task + task.resume() + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureConnectionQualityIndicator.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureConnectionQualityIndicator.swift new file mode 100644 index 0000000000..2efc01b045 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureConnectionQualityIndicator.swift @@ -0,0 +1,162 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import UIKit + +/// A view representing a connection quality indicator for Picture-in-Picture. +/// Displays three vertical bars that indicate connection quality levels: +/// - Excellent: All 3 bars green +/// - Good: 2 bars green, 1 bar gray +/// - Poor: 1 bar red, 2 bars gray +/// - Unknown: All bars hidden +/// This aligns with upstream stream-video-swift ConnectionQualityIndicator. +final class PictureInPictureConnectionQualityIndicator: UIView { + + // MARK: - Connection Quality Enum + + /// Connection quality levels matching the stream-video-swift/video-client enum + enum ConnectionQuality: Int { + case unspecified = 0 // Unknown + case poor = 1 + case good = 2 + case excellent = 3 + } + + // MARK: - Properties + + /// The current connection quality level + var connectionQuality: ConnectionQuality = .unspecified { + didSet { + updateIndicator() + } + } + + /// Size of the indicator view + private let indicatorSize: CGFloat = 24 + + /// Width of each bar + private let barWidth: CGFloat = 3 + + /// Spacing between bars + private let barSpacing: CGFloat = 2 + + // MARK: - Colors + + private let goodColor = UIColor(red: 0.2, green: 0.8, blue: 0.4, alpha: 1.0) // Green + private let badColor = UIColor(red: 0.9, green: 0.3, blue: 0.3, alpha: 1.0) // Red + private let inactiveColor = UIColor.white.withAlphaComponent(0.5) + + // MARK: - UI Components + + /// Background container with rounded corner + private lazy var containerView: UIView = { + let view = UIView() + view.translatesAutoresizingMaskIntoConstraints = false + view.backgroundColor = UIColor.black.withAlphaComponent(0.6) + // Apply rounded corner only to top-left + view.layer.cornerRadius = 8 + view.layer.maskedCorners = [.layerMinXMinYCorner] // top-left only + return view + }() + + /// Stack view containing the three bars + private lazy var barsStackView: UIStackView = { + let stack = UIStackView() + stack.translatesAutoresizingMaskIntoConstraints = false + stack.axis = .horizontal + stack.alignment = .bottom + stack.spacing = barSpacing + stack.distribution = .equalSpacing + return stack + }() + + /// First (shortest) bar + private lazy var bar1: UIView = { + createBar(height: barWidth * 2) + }() + + /// Second (medium) bar + private lazy var bar2: UIView = { + createBar(height: barWidth * 3) + }() + + /// Third (tallest) bar + private lazy var bar3: UIView = { + createBar(height: barWidth * 4) + }() + + // MARK: - Initialization + + override init(frame: CGRect) { + super.init(frame: frame) + setUp() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + // MARK: - Private Methods + + private func setUp() { + isUserInteractionEnabled = false + isHidden = true // Hidden by default (unknown quality) + + addSubview(containerView) + containerView.addSubview(barsStackView) + + barsStackView.addArrangedSubview(bar1) + barsStackView.addArrangedSubview(bar2) + barsStackView.addArrangedSubview(bar3) + + NSLayoutConstraint.activate([ + containerView.trailingAnchor.constraint(equalTo: trailingAnchor), + containerView.bottomAnchor.constraint(equalTo: bottomAnchor), + containerView.widthAnchor.constraint(equalToConstant: indicatorSize), + containerView.heightAnchor.constraint(equalToConstant: indicatorSize), + + barsStackView.centerXAnchor.constraint(equalTo: containerView.centerXAnchor), + barsStackView.centerYAnchor.constraint(equalTo: containerView.centerYAnchor) + ]) + + updateIndicator() + } + + private func createBar(height: CGFloat) -> UIView { + let bar = UIView() + bar.translatesAutoresizingMaskIntoConstraints = false + bar.backgroundColor = inactiveColor + bar.layer.cornerRadius = 1 + bar.layer.masksToBounds = true + + NSLayoutConstraint.activate([ + bar.widthAnchor.constraint(equalToConstant: barWidth), + bar.heightAnchor.constraint(equalToConstant: height) + ]) + + return bar + } + + private func updateIndicator() { + switch connectionQuality { + case .excellent: + isHidden = false + bar1.backgroundColor = goodColor + bar2.backgroundColor = goodColor + bar3.backgroundColor = goodColor + case .good: + isHidden = false + bar1.backgroundColor = goodColor + bar2.backgroundColor = goodColor + bar3.backgroundColor = inactiveColor + case .poor: + isHidden = false + bar1.backgroundColor = badColor + bar2.backgroundColor = inactiveColor + bar3.backgroundColor = inactiveColor + case .unspecified: + isHidden = true + } + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContent.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContent.swift new file mode 100644 index 0000000000..b2c2c2302f --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContent.swift @@ -0,0 +1,173 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// +// Adapted from stream-video-swift for React Native SDK +// Original: https://github.com/GetStream/stream-video-swift/blob/develop/Sources/StreamVideoSwiftUI/Utils/PictureInPicture/PictureInPictureContent.swift +// + +import Foundation + +/// Represents the content state for the Picture-in-Picture window. +/// +/// This enum defines the different states that the PiP window can display: +/// - `inactive`: No content is being shown (PiP is not active) +/// - `video`: Live video from a participant (camera or screen share) +/// - `avatar`: Participant avatar placeholder (when video is disabled) +/// - `screenSharing`: Screen share content with indicator overlay +/// - `reconnecting`: Connection recovery indicator +/// +/// The React Native SDK receives content state from the JavaScript layer through +/// the bridge, unlike the upstream Swift SDK which observes call state internally. +enum PictureInPictureContent: Equatable, CustomStringConvertible { + /// No content - PiP is inactive or transitioning + case inactive + + /// Video content from a participant + /// - Parameters: + /// - track: The WebRTC video track to render + /// - participantName: The participant's display name (for fallback) + /// - participantImageURL: URL to participant's profile image (for fallback) + case video(track: RTCVideoTrack?, participantName: String?, participantImageURL: String?) + + /// Screen sharing content + /// - Parameters: + /// - track: The WebRTC video track containing screen share + /// - participantName: Name of the participant sharing their screen + case screenSharing(track: RTCVideoTrack?, participantName: String?) + + /// Avatar placeholder shown when video is disabled + /// - Parameters: + /// - participantName: The participant's display name (for initials) + /// - participantImageURL: URL to participant's profile image + case avatar(participantName: String?, participantImageURL: String?) + + /// Connection recovery indicator + case reconnecting + + // MARK: - CustomStringConvertible + + var description: String { + switch self { + case .inactive: + return ".inactive" + case let .video(track, name, _): + return ".video(track:\(track?.trackId ?? "nil"), name:\(name ?? "-"))" + case let .screenSharing(track, name): + return ".screenSharing(track:\(track?.trackId ?? "nil"), name:\(name ?? "-"))" + case let .avatar(name, _): + return ".avatar(name:\(name ?? "-"))" + case .reconnecting: + return ".reconnecting" + } + } + + // MARK: - Equatable + + static func == (lhs: PictureInPictureContent, rhs: PictureInPictureContent) -> Bool { + switch (lhs, rhs) { + case (.inactive, .inactive): + return true + case let (.video(lhsTrack, lhsName, lhsImage), .video(rhsTrack, rhsName, rhsImage)): + return isSameTrackInstance(lhsTrack, rhsTrack) + && lhsName == rhsName + && lhsImage == rhsImage + case let (.screenSharing(lhsTrack, lhsName), .screenSharing(rhsTrack, rhsName)): + return isSameTrackInstance(lhsTrack, rhsTrack) + && lhsName == rhsName + case let (.avatar(lhsName, lhsImage), .avatar(rhsName, rhsImage)): + return lhsName == rhsName + && lhsImage == rhsImage + case (.reconnecting, .reconnecting): + return true + default: + return false + } + } + + /// Track identity must be reference-based so reconnect-created tracks + /// with reused `trackId` still propagate through content updates. + private static func isSameTrackInstance(_ lhs: RTCVideoTrack?, _ rhs: RTCVideoTrack?) -> Bool { + switch (lhs, rhs) { + case (nil, nil): + return true + case let (lhsTrack?, rhsTrack?): + return lhsTrack === rhsTrack + default: + return false + } + } + + // MARK: - Convenience Properties + + /// Returns the video track if this content has one, nil otherwise + var track: RTCVideoTrack? { + switch self { + case let .video(track, _, _): + return track + case let .screenSharing(track, _): + return track + case .inactive, .avatar, .reconnecting: + return nil + } + } + + /// Returns the participant name if available + var participantName: String? { + switch self { + case let .video(_, name, _): + return name + case let .screenSharing(_, name): + return name + case let .avatar(name, _): + return name + case .inactive, .reconnecting: + return nil + } + } + + /// Returns the participant image URL if available + var participantImageURL: String? { + switch self { + case let .video(_, _, imageURL): + return imageURL + case let .avatar(_, imageURL): + return imageURL + case .inactive, .screenSharing, .reconnecting: + return nil + } + } + + /// Whether this content represents an active video stream + var hasActiveVideo: Bool { + switch self { + case .video, .screenSharing: + return true + case .inactive, .avatar, .reconnecting: + return false + } + } + + /// Whether this content is screen sharing + var isScreenSharing: Bool { + if case .screenSharing = self { + return true + } + return false + } + + /// Whether this content shows an avatar + var isShowingAvatar: Bool { + if case .avatar = self { + return true + } + return false + } + + /// Whether this content shows the reconnection view + var isReconnecting: Bool { + if case .reconnecting = self { + return true + } + return false + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContentState.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContentState.swift new file mode 100644 index 0000000000..7676749b0f --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureContentState.swift @@ -0,0 +1,123 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// +// Adapted from stream-video-swift PictureInPictureStore for React Native SDK +// The React Native SDK receives state from the JavaScript bridge rather than +// observing call state internally, so this is a simplified state container. +// + +import Combine +import Foundation + +/// Manages the content state for the Picture-in-Picture window. +/// +/// This class provides centralized state management for the PiP content view system. +/// Unlike the upstream `PictureInPictureStore` which uses a Flux-like action/dispatch pattern, +/// this implementation is optimized for the React Native bridge where state updates come +/// from the JavaScript layer. +/// +/// State changes are published via Combine to allow reactive updates in the view layer. +/// +/// Concurrency model: +/// - This state container is main-thread confined. +/// - `RTCVideoTrack` references are never sent across queues. +final class PictureInPictureContentState { + + /// A full state snapshot that can be applied atomically. + struct Snapshot { + var track: RTCVideoTrack? + var participantName: String? + var participantImageURL: String? + var isVideoEnabled: Bool + var isScreenSharing: Bool + var isReconnecting: Bool + } + + // MARK: - Published State + + /// The current content being displayed in the PiP window. + @Published private(set) var content: PictureInPictureContent = .inactive + + /// Publisher for observing content changes. + var contentPublisher: AnyPublisher { + $content.eraseToAnyPublisher() + } + + // MARK: - Private + + private var snapshot: Snapshot = makeDefaultSnapshot() + + // MARK: - Initialization + + init() {} + + // MARK: - State Update + + /// Applies all content inputs in one step to avoid parallel update paths. + func apply(_ snapshot: Snapshot) { + ensureMainThread() + self.snapshot = snapshot + publishIfNeeded(for: snapshot) + } + + /// Resets all state to defaults. + /// Called when cleaning up after a call ends. + func reset() { + ensureMainThread() + snapshot = Self.makeDefaultSnapshot() + if content != .inactive { + content = .inactive + } + } + + /// Computes and publishes content based on the latest snapshot. + private func publishIfNeeded(for snapshot: Snapshot) { + let newContent: PictureInPictureContent + + // Priority order: reconnecting > screen sharing > avatar (video disabled) > video > avatar fallback + if snapshot.isReconnecting { + newContent = .reconnecting + } else if snapshot.isScreenSharing { + newContent = .screenSharing( + track: snapshot.track, + participantName: snapshot.participantName + ) + } else if !snapshot.isVideoEnabled { + newContent = .avatar( + participantName: snapshot.participantName, + participantImageURL: snapshot.participantImageURL + ) + } else if snapshot.isVideoEnabled, snapshot.track != nil { + newContent = .video( + track: snapshot.track, + participantName: snapshot.participantName, + participantImageURL: snapshot.participantImageURL + ) + } else { + newContent = .avatar( + participantName: snapshot.participantName, + participantImageURL: snapshot.participantImageURL + ) + } + + if content != newContent { + content = newContent + } + } + + /// PiP content state is expected to be mutated on the main thread only. + private func ensureMainThread() { + dispatchPrecondition(condition: .onQueue(.main)) + } + + private static func makeDefaultSnapshot() -> Snapshot { + Snapshot( + track: nil, + participantName: nil, + participantImageURL: nil, + isVideoEnabled: true, + isScreenSharing: false, + isReconnecting: false + ) + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureDelegateProxy.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureDelegateProxy.swift new file mode 100644 index 0000000000..35684de290 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureDelegateProxy.swift @@ -0,0 +1,89 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVKit +import Combine + +/// A wrapper around AVPictureInPictureControllerDelegate that publishes all +/// delegate method calls via a single Combine publisher. +/// +/// This proxy enables reactive handling of PiP lifecycle events and allows +/// multiple subscribers to observe PiP state changes through a unified interface. +final class PictureInPictureDelegateProxy: NSObject, AVPictureInPictureControllerDelegate { + + /// Enum representing each AVPictureInPictureControllerDelegate method call + /// with its respective associated values. + enum Event: CustomStringConvertible { + case willStart(AVPictureInPictureController) + case didStart(AVPictureInPictureController) + case failedToStart(AVPictureInPictureController, Error) + case willStop(AVPictureInPictureController) + case didStop(AVPictureInPictureController) + case restoreUI(AVPictureInPictureController, (Bool) -> Void) + + var description: String { + switch self { + case .willStart: + return ".willStart" + case .didStart: + return ".didStart" + case let .failedToStart(_, error): + return ".failedToStart(error: \(error.localizedDescription))" + case .willStop: + return ".willStop" + case .didStop: + return ".didStop" + case .restoreUI: + return ".restoreUI" + } + } + } + + /// The Combine publisher that emits Picture-in-Picture delegate events. + var publisher: AnyPublisher { + eventSubject.eraseToAnyPublisher() + } + + private let eventSubject = PassthroughSubject() + + // MARK: - AVPictureInPictureControllerDelegate + + func pictureInPictureControllerWillStartPictureInPicture( + _ pictureInPictureController: AVPictureInPictureController + ) { + eventSubject.send(.willStart(pictureInPictureController)) + } + + func pictureInPictureControllerDidStartPictureInPicture( + _ pictureInPictureController: AVPictureInPictureController + ) { + eventSubject.send(.didStart(pictureInPictureController)) + } + + func pictureInPictureController( + _ pictureInPictureController: AVPictureInPictureController, + failedToStartPictureInPictureWithError error: Error + ) { + eventSubject.send(.failedToStart(pictureInPictureController, error)) + } + + func pictureInPictureControllerWillStopPictureInPicture( + _ pictureInPictureController: AVPictureInPictureController + ) { + eventSubject.send(.willStop(pictureInPictureController)) + } + + func pictureInPictureControllerDidStopPictureInPicture( + _ pictureInPictureController: AVPictureInPictureController + ) { + eventSubject.send(.didStop(pictureInPictureController)) + } + + func pictureInPictureController( + _ pictureInPictureController: AVPictureInPictureController, + restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void + ) { + eventSubject.send(.restoreUI(pictureInPictureController, completionHandler)) + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureEnforcedStopAdapter.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureEnforcedStopAdapter.swift new file mode 100644 index 0000000000..a984804615 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureEnforcedStopAdapter.swift @@ -0,0 +1,166 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVKit +import Combine +import UIKit + +/// An adapter responsible for enforcing the stop of Picture-in-Picture +/// playback when the application returns to the foreground. +/// +/// This adapter listens to application state changes and PiP activity to ensure +/// PiP is stopped when the app becomes active (foreground). This behavior matches +/// iOS user expectations where PiP should dismiss when returning to the app. +final class PictureInPictureEnforcedStopAdapter { + + private enum DisposableKey: String { + case stopEnforceOperation + } + + private enum ApplicationState: Equatable { + case foreground + case background + case unknown + } + + /// Refresh-rate-based timer interval used for enforcement attempts. + private let refreshRate: TimeInterval + + /// Lifecycle subscriptions. + private var cancellables: Set = [] + + /// Keyed operations that can be replaced/cancelled independently. + private var operationCancellables: [String: AnyCancellable] = [:] + + /// Initializes the adapter with a Picture-in-Picture controller and + /// starts observing application state and PiP activity to enforce stop. + /// + /// - Parameter pictureInPictureController: The PiP controller to manage. + init(_ pictureInPictureController: StreamPictureInPictureControllerProtocol) { + refreshRate = Self.makeRefreshRate() + + // Keep enforcement strictly state-driven: we only run the stop loop + // while the app is foregrounded *and* PiP is still active. + Publishers + .CombineLatest( + Self.makeApplicationStatePublisher(), + pictureInPictureController + .isPictureInPictureActivePublisher + .removeDuplicates() + ) + .receive(on: DispatchQueue.main) + .sink { [weak self, weak pictureInPictureController] applicationState, isActive in + self?.didUpdate( + applicationState: applicationState, + isPictureInPictureActive: isActive, + pictureInPictureController: pictureInPictureController + ) + } + .store(in: &cancellables) + } + + deinit { + cancellables.removeAll() + removeAllOperations() + } + + // MARK: - Private helpers + + private func didUpdate( + applicationState: ApplicationState, + isPictureInPictureActive: Bool, + pictureInPictureController: StreamPictureInPictureControllerProtocol? + ) { + switch (applicationState, isPictureInPictureActive) { + case (.foreground, true): + // Foreground + active PiP is the only state where we enforce stop. + startStopEnforcement(for: pictureInPictureController) + default: + // Any other state (background/inactive PiP) should tear down the loop. + removeOperation(for: DisposableKey.stopEnforceOperation.rawValue) + } + } + + private func startStopEnforcement( + for pictureInPictureController: StreamPictureInPictureControllerProtocol? + ) { + guard let pictureInPictureController else { + removeOperation(for: DisposableKey.stopEnforceOperation.rawValue) + return + } + + let operation = Timer + .publish(every: refreshRate, on: .main, in: .common) + .autoconnect() + .filter { _ in + UIApplication.shared.applicationState == .active + } + .sink { [weak pictureInPictureController] _ in + // Calling stop repeatedly at display cadence covers cases where + // AVKit does not settle PiP shutdown on the first attempt. + pictureInPictureController?.stopPictureInPicture() + } + + store(operation, key: DisposableKey.stopEnforceOperation.rawValue) + } + + private func store(_ operation: AnyCancellable, key: String) { + // Keyed replacement ensures exactly one enforcement loop is active. + removeOperation(for: key) + operationCancellables[key] = operation + } + + private func removeOperation(for key: String) { + operationCancellables[key]?.cancel() + operationCancellables[key] = nil + } + + private func removeAllOperations() { + operationCancellables.values.forEach { $0.cancel() } + operationCancellables.removeAll() + } + + private static func makeApplicationStatePublisher( + notificationCenter: NotificationCenter = .default + ) -> AnyPublisher { + let foreground = Publishers.Merge( + notificationCenter + .publisher(for: UIApplication.willEnterForegroundNotification) + .map { _ in ApplicationState.foreground }, + notificationCenter + .publisher(for: UIApplication.didBecomeActiveNotification) + .map { _ in ApplicationState.foreground } + ) + let background = notificationCenter + .publisher(for: UIApplication.didEnterBackgroundNotification) + .map { _ in ApplicationState.background } + + return Publishers.Merge(foreground, background) + // Emit the current app state immediately so newly created adapters + // do not wait for the next lifecycle notification. + .prepend(currentApplicationState()) + .removeDuplicates() + .eraseToAnyPublisher() + } + + private static func currentApplicationState() -> ApplicationState { + switch UIApplication.shared.applicationState { + case .active: + return .foreground + case .background: + return .background + case .inactive: + return .unknown + @unknown default: + return .unknown + } + } + + private static func makeRefreshRate() -> TimeInterval { + // Keep cadence aligned to the device's display refresh rate while + // enforcing a practical minimum (30fps) for older/limited devices. + let maximumFramesPerSecond = max(30, UIScreen.main.maximumFramesPerSecond) + return 1.0 / Double(maximumFramesPerSecond) + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureLogger.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureLogger.swift new file mode 100644 index 0000000000..b0179922d6 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureLogger.swift @@ -0,0 +1,16 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import Foundation + +/// Centralized logger for Picture in Picture diagnostics. +/// +/// Logging is debug-only to avoid production noise and overhead. +enum PictureInPictureLogger { + static func log(_ message: @autoclosure () -> String) { + #if DEBUG + NSLog("PiP - %@", message()) + #endif + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureParticipantOverlayView.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureParticipantOverlayView.swift new file mode 100644 index 0000000000..bf681bf1a2 --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureParticipantOverlayView.swift @@ -0,0 +1,217 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import UIKit + +/// A view that displays participant information overlay in Picture-in-Picture mode. +/// Shows participant name, pin indicator, sound indicator, and video paused indicator +/// at the bottom-left of the PiP window. +/// This aligns with upstream stream-video-swift ParticipantInfoView. +final class PictureInPictureParticipantOverlayView: UIView { + + // MARK: - Properties + + /// The participant's name to display + var participantName: String? { + didSet { + nameLabel.text = participantName + updateVisibility() + } + } + + /// Whether the participant is pinned + var isPinned: Bool = false { + didSet { + pinIconView.isHidden = !isPinned + } + } + + /// Whether the participant has audio enabled (not muted) + var hasAudio: Bool = true { + didSet { + updateSoundIndicator() + } + } + + /// Whether the video track is paused/disabled + var isTrackPaused: Bool = false { + didSet { + updateVideoPausedIndicator() + } + } + + /// Controls whether the overlay is shown + var isOverlayEnabled: Bool = true { + didSet { + updateVisibility() + } + } + + // MARK: - UI Components + + /// Container for the bottom info bar with gradient background + private lazy var containerView: UIView = { + let view = UIView() + view.translatesAutoresizingMaskIntoConstraints = false + return view + }() + + /// Gradient layer for the bottom fade effect + private lazy var gradientLayer: CAGradientLayer = { + let layer = CAGradientLayer() + layer.colors = [ + UIColor.clear.cgColor, + UIColor.black.withAlphaComponent(0.6).cgColor + ] + layer.locations = [0.0, 1.0] + return layer + }() + + /// Container for the content (name + indicators) + private lazy var contentStackView: UIStackView = { + let stack = UIStackView() + stack.translatesAutoresizingMaskIntoConstraints = false + stack.axis = .horizontal + stack.spacing = 4 + stack.alignment = .center + return stack + }() + + /// Pin indicator icon (shown when participant is pinned) + private lazy var pinIconView: UIImageView = { + let imageView = UIImageView() + imageView.translatesAutoresizingMaskIntoConstraints = false + imageView.contentMode = .scaleAspectFit + imageView.tintColor = .white + + // Use SF Symbol for pin + let config = UIImage.SymbolConfiguration(pointSize: 10, weight: .medium) + imageView.image = UIImage(systemName: "pin.fill", withConfiguration: config) + imageView.isHidden = true // Hidden by default + imageView.setContentHuggingPriority(.required, for: .horizontal) + imageView.setContentCompressionResistancePriority(.required, for: .horizontal) + return imageView + }() + + /// Label showing participant name + private lazy var nameLabel: UILabel = { + let label = UILabel() + label.translatesAutoresizingMaskIntoConstraints = false + label.font = .systemFont(ofSize: 11, weight: .medium) + label.textColor = .white + label.lineBreakMode = .byTruncatingTail + label.setContentHuggingPriority(.defaultLow, for: .horizontal) + label.setContentCompressionResistancePriority(.defaultLow, for: .horizontal) + return label + }() + + /// Video paused indicator icon (wifi.slash when track is paused) + private lazy var videoPausedIconView: UIImageView = { + let imageView = UIImageView() + imageView.translatesAutoresizingMaskIntoConstraints = false + imageView.contentMode = .scaleAspectFit + imageView.tintColor = .white + + // Use SF Symbol for video paused (wifi.slash as in upstream) + let config = UIImage.SymbolConfiguration(pointSize: 10, weight: .medium) + imageView.image = UIImage(systemName: "wifi.slash", withConfiguration: config) + imageView.isHidden = true // Hidden by default + imageView.setContentHuggingPriority(.required, for: .horizontal) + imageView.setContentCompressionResistancePriority(.required, for: .horizontal) + return imageView + }() + + /// Sound indicator icon (microphone on/off) + private lazy var soundIndicatorView: UIImageView = { + let imageView = UIImageView() + imageView.translatesAutoresizingMaskIntoConstraints = false + imageView.contentMode = .scaleAspectFit + imageView.tintColor = .white + imageView.setContentHuggingPriority(.required, for: .horizontal) + imageView.setContentCompressionResistancePriority(.required, for: .horizontal) + return imageView + }() + + // MARK: - Initialization + + override init(frame: CGRect) { + super.init(frame: frame) + setUp() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + override func layoutSubviews() { + super.layoutSubviews() + // Update gradient frame when view bounds change + CATransaction.begin() + CATransaction.setDisableActions(true) + gradientLayer.frame = containerView.bounds + CATransaction.commit() + } + + // MARK: - Private Methods + + private func setUp() { + isUserInteractionEnabled = false + isHidden = true // Hidden by default until participant info is set + + addSubview(containerView) + containerView.layer.insertSublayer(gradientLayer, at: 0) + containerView.addSubview(contentStackView) + + contentStackView.addArrangedSubview(pinIconView) + contentStackView.addArrangedSubview(nameLabel) + contentStackView.addArrangedSubview(videoPausedIconView) + contentStackView.addArrangedSubview(soundIndicatorView) + + NSLayoutConstraint.activate([ + // Container positioned at the bottom + containerView.leadingAnchor.constraint(equalTo: leadingAnchor), + containerView.trailingAnchor.constraint(equalTo: trailingAnchor), + containerView.bottomAnchor.constraint(equalTo: bottomAnchor), + containerView.heightAnchor.constraint(equalToConstant: 28), + + // Content stack with padding + contentStackView.leadingAnchor.constraint(equalTo: containerView.leadingAnchor, constant: 8), + contentStackView.trailingAnchor.constraint(lessThanOrEqualTo: containerView.trailingAnchor, constant: -8), + contentStackView.bottomAnchor.constraint(equalTo: containerView.bottomAnchor, constant: -6), + + // Icon sizes + pinIconView.widthAnchor.constraint(equalToConstant: 12), + pinIconView.heightAnchor.constraint(equalToConstant: 12), + videoPausedIconView.widthAnchor.constraint(equalToConstant: 12), + videoPausedIconView.heightAnchor.constraint(equalToConstant: 12), + soundIndicatorView.widthAnchor.constraint(equalToConstant: 12), + soundIndicatorView.heightAnchor.constraint(equalToConstant: 12) + ]) + + // Initialize indicators + updateSoundIndicator() + updateVideoPausedIndicator() + } + + private func updateVisibility() { + // Show overlay only if enabled and we have a participant name + let hasName = participantName != nil && !(participantName?.isEmpty ?? true) + isHidden = !isOverlayEnabled || !hasName + } + + private func updateSoundIndicator() { + let config = UIImage.SymbolConfiguration(pointSize: 10, weight: .medium) + if hasAudio { + soundIndicatorView.image = UIImage(systemName: "mic.fill", withConfiguration: config) + soundIndicatorView.tintColor = .white + } else { + soundIndicatorView.image = UIImage(systemName: "mic.slash.fill", withConfiguration: config) + soundIndicatorView.tintColor = UIColor(white: 0.7, alpha: 1.0) // Slightly dimmed when muted + } + } + + private func updateVideoPausedIndicator() { + videoPausedIconView.isHidden = !isTrackPaused + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureReconnectionView.swift b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureReconnectionView.swift new file mode 100644 index 0000000000..44ee13e4fa --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/PictureInPictureReconnectionView.swift @@ -0,0 +1,193 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import UIKit + +/// A view that displays a reconnection indicator when the call connection is being recovered. +/// Shows three pulsing dots with a "Reconnecting" message, matching upstream CallingIndicator style. +final class PictureInPictureReconnectionView: UIView { + + // MARK: - Properties + + /// Whether the view should be visible (when reconnecting) + var isReconnecting: Bool = false { + didSet { + updateVisibility() + } + } + + // MARK: - Private Properties + + private let containerView: UIView = { + let view = UIView() + view.translatesAutoresizingMaskIntoConstraints = false + view.backgroundColor = UIColor(red: 0.12, green: 0.13, blue: 0.15, alpha: 0.85) + return view + }() + + private let contentStackView: UIStackView = { + let stack = UIStackView() + stack.translatesAutoresizingMaskIntoConstraints = false + stack.axis = .vertical + stack.alignment = .center + stack.spacing = 8 + return stack + }() + + private let messageLabel: UILabel = { + let label = UILabel() + label.translatesAutoresizingMaskIntoConstraints = false + label.text = "Reconnecting" + label.textColor = .white + label.textAlignment = .center + label.font = UIFont.systemFont(ofSize: 16, weight: .medium) + label.accessibilityIdentifier = "reconnectingMessage" + return label + }() + + /// Three dots indicator matching upstream CallingIndicator style + private let dotsStackView: UIStackView = { + let stack = UIStackView() + stack.translatesAutoresizingMaskIntoConstraints = false + stack.axis = .horizontal + stack.alignment = .center + stack.spacing = 2 // Matches upstream + stack.accessibilityIdentifier = "callingIndicator" + return stack + }() + + private let dotSize: CGFloat = 4 // Matches upstream + private var dots: [UIView] = [] + + // MARK: - Lifecycle + + override init(frame: CGRect) { + super.init(frame: frame) + setUp() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + deinit { + stopAnimation() + } + + // MARK: - Private Helpers + + private func createDot() -> UIView { + let dot = UIView() + dot.translatesAutoresizingMaskIntoConstraints = false + dot.backgroundColor = .white + dot.layer.cornerRadius = dotSize / 2 + dot.alpha = 0 // Start invisible (matches upstream) + NSLayoutConstraint.activate([ + dot.widthAnchor.constraint(equalToConstant: dotSize), + dot.heightAnchor.constraint(equalToConstant: dotSize) + ]) + return dot + } + + private func setUp() { + addSubview(containerView) + containerView.addSubview(contentStackView) + + // Order matches upstream: text first, then dots indicator + contentStackView.addArrangedSubview(messageLabel) + contentStackView.addArrangedSubview(dotsStackView) + + // Add three dots (matches upstream) + for _ in 0..<3 { + let dot = createDot() + dots.append(dot) + dotsStackView.addArrangedSubview(dot) + } + + NSLayoutConstraint.activate([ + containerView.leadingAnchor.constraint(equalTo: leadingAnchor), + containerView.trailingAnchor.constraint(equalTo: trailingAnchor), + containerView.topAnchor.constraint(equalTo: topAnchor), + containerView.bottomAnchor.constraint(equalTo: bottomAnchor), + + contentStackView.centerXAnchor.constraint(equalTo: containerView.centerXAnchor), + contentStackView.centerYAnchor.constraint(equalTo: containerView.centerYAnchor), + contentStackView.leadingAnchor.constraint(greaterThanOrEqualTo: containerView.leadingAnchor, constant: 16), + contentStackView.trailingAnchor.constraint(lessThanOrEqualTo: containerView.trailingAnchor, constant: -16) + ]) + + // Initially hidden + updateVisibility() + } + + override func didMoveToWindow() { + super.didMoveToWindow() + // Restart animation when view is added to window (animations are removed when view leaves window) + if window != nil && isReconnecting && !isHidden { + startAnimation() + } + } + + private func updateVisibility() { + isHidden = !isReconnecting + + if isReconnecting { + startAnimation() + } else { + stopAnimation() + } + } + + // MARK: - Animation (matches upstream CallingIndicator) + + /// Starts the pulsing animation matching upstream exactly: + /// - All dots animate from alpha 0 → 1 + /// - Same 0.2s delay for all dots + /// - 1 second duration + /// - Different easing: easeOut, easeInOut, easeIn + /// - Repeat forever with autoreverse + private func startAnimation() { + // Only animate if we're in a window + guard window != nil else { + PictureInPictureLogger.log("ReconnectionView: startAnimation called but not in window yet") + return + } + + PictureInPictureLogger.log("ReconnectionView: starting dot animation with CABasicAnimation") + + // Stop any existing animations first + stopAnimation() + + // Use CABasicAnimation for better compatibility with PiP + // Matches upstream: easeOut, easeInOut, easeIn timing functions + let timingFunctions: [CAMediaTimingFunction] = [ + CAMediaTimingFunction(name: .easeOut), + CAMediaTimingFunction(name: .easeInEaseOut), + CAMediaTimingFunction(name: .easeIn) + ] + + for (index, dot) in dots.enumerated() { + let animation = CABasicAnimation(keyPath: "opacity") + animation.fromValue = 0.0 + animation.toValue = 1.0 + animation.duration = 1.0 + animation.beginTime = CACurrentMediaTime() + 0.2 // 0.2s delay + animation.timingFunction = timingFunctions[index] + animation.autoreverses = true + animation.repeatCount = .infinity + animation.fillMode = .forwards + animation.isRemovedOnCompletion = false + + dot.layer.add(animation, forKey: "pulseAnimation") + dot.alpha = 0 // Set initial state + } + } + + private func stopAnimation() { + dots.forEach { dot in + dot.layer.removeAllAnimations() + dot.alpha = 0 + } + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/StreamAVPictureInPictureVideoCallViewController.swift b/packages/react-native-sdk/ios/PictureInPicture/StreamAVPictureInPictureVideoCallViewController.swift index 84f8bc87c8..0f0bcc10b8 100644 --- a/packages/react-native-sdk/ios/PictureInPicture/StreamAVPictureInPictureVideoCallViewController.swift +++ b/packages/react-native-sdk/ios/PictureInPicture/StreamAVPictureInPictureVideoCallViewController.swift @@ -7,13 +7,13 @@ import Foundation /// Describes an object that can be used to present picture-in-picture content. protocol StreamAVPictureInPictureViewControlling: AnyObject { - + /// The closure to call whenever the picture-in-picture window size changes. var onSizeUpdate: ((CGSize) -> Void)? { get set } - + /// The track that will be rendered on picture-in-picture window. var track: RTCVideoTrack? { get set } - + /// The preferred size for the picture-in-picture window. /// - Important: This should **always** be greater to ``CGSize.zero``. If not, iOS throws /// a cryptic error with content `PGPegasus code:-1003` @@ -24,17 +24,65 @@ protocol StreamAVPictureInPictureViewControlling: AnyObject { /// The layer that renders the incoming frames from WebRTC. var displayLayer: CALayer { get } + + // MARK: - Avatar Placeholder Properties + + /// The participant's name for the avatar placeholder + var participantName: String? { get set } + + /// The URL string for the participant's profile image + var participantImageURL: String? { get set } + + /// Whether video is enabled - when false, shows avatar placeholder + var isVideoEnabled: Bool { get set } + + // MARK: - Reconnection Properties + + /// Whether the call is reconnecting - when true, shows reconnection view + var isReconnecting: Bool { get set } + + // MARK: - Screen Sharing Properties + + /// Whether screen sharing is active (used for content state tracking) + var isScreenSharing: Bool { get set } + + // MARK: - Participant Overlay Properties + + /// Whether the participant has audio enabled (shown in participant overlay) + var hasAudio: Bool { get set } + + /// Whether the video track is paused (shown in participant overlay) + var isTrackPaused: Bool { get set } + + /// Whether the participant is pinned (shown in participant overlay) + var isPinned: Bool { get set } + + /// Whether the participant is currently speaking (shows border highlight) + var isSpeaking: Bool { get set } + + /// The connection quality level (0: unknown, 1: poor, 2: good, 3: excellent) + var connectionQuality: Int { get set } + + // MARK: - Content State System + + /// The content state manager for unified state handling. + /// When set, the view controller subscribes to content changes automatically. + var contentState: PictureInPictureContentState? { get set } + + /// The current content being displayed. + /// Can be set directly for one-off updates or managed via contentState for reactive updates. + var content: PictureInPictureContent { get set } } @available(iOS 15.0, *) final class StreamAVPictureInPictureVideoCallViewController: AVPictureInPictureVideoCallViewController, StreamAVPictureInPictureViewControlling { - + private let contentView: StreamPictureInPictureVideoRenderer = .init(windowSizePolicy: StreamPictureInPictureAdaptiveWindowSizePolicy()) - + var onSizeUpdate: ((CGSize) -> Void)? - + var track: RTCVideoTrack? { get { contentView.track } set { contentView.track = newValue } @@ -46,7 +94,77 @@ final class StreamAVPictureInPictureVideoCallViewController: AVPictureInPictureV } var displayLayer: CALayer { contentView.displayLayer } - + + // MARK: - Avatar Placeholder Properties + + var participantName: String? { + get { contentView.participantName } + set { contentView.participantName = newValue } + } + + var participantImageURL: String? { + get { contentView.participantImageURL } + set { contentView.participantImageURL = newValue } + } + + var isVideoEnabled: Bool { + get { contentView.isVideoEnabled } + set { contentView.isVideoEnabled = newValue } + } + + // MARK: - Reconnection Properties + + var isReconnecting: Bool { + get { contentView.isReconnecting } + set { contentView.isReconnecting = newValue } + } + + // MARK: - Screen Sharing Properties + + var isScreenSharing: Bool { + get { contentView.isScreenSharing } + set { contentView.isScreenSharing = newValue } + } + + // MARK: - Participant Overlay Properties + + var hasAudio: Bool { + get { contentView.hasAudio } + set { contentView.hasAudio = newValue } + } + + var isTrackPaused: Bool { + get { contentView.isTrackPaused } + set { contentView.isTrackPaused = newValue } + } + + var isPinned: Bool { + get { contentView.isPinned } + set { contentView.isPinned = newValue } + } + + var isSpeaking: Bool { + get { contentView.isSpeaking } + set { contentView.isSpeaking = newValue } + } + + var connectionQuality: Int { + get { contentView.connectionQuality } + set { contentView.connectionQuality = newValue } + } + + // MARK: - Content State System + + var contentState: PictureInPictureContentState? { + get { contentView.contentState } + set { contentView.contentState = newValue } + } + + var content: PictureInPictureContent { + get { contentView.content } + set { contentView.content = newValue } + } + // MARK: - Lifecycle @available(*, unavailable) diff --git a/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureController.swift b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureController.swift index df6dc468b9..038bb1ad99 100644 --- a/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureController.swift +++ b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureController.swift @@ -7,31 +7,36 @@ import Combine import Foundation /// A controller class for picture-in-picture whenever that is possible. -@objc final class StreamPictureInPictureController: NSObject, AVPictureInPictureControllerDelegate { - +/// +/// This controller manages the Picture-in-Picture window state and handles transitions +/// between foreground and background states. It uses the `PictureInPictureContentState` +/// for centralized state management and a delegate proxy pattern to enable reactive +/// handling of PiP lifecycle events. +@objc final class StreamPictureInPictureController: NSObject { + // MARK: - Properties - + /// The RTCVideoTrack for which the picture-in-picture session is created. @objc public var track: RTCVideoTrack? { didSet { didUpdate(track) // Called when the `track` property changes } } - + /// The UIView that contains the video content. @objc public var sourceView: UIView? { didSet { didUpdate(sourceView) // Called when the `sourceView` property changes } } - + /// A closure called when the picture-in-picture view's size changes. public var onSizeUpdate: ((CGSize) -> Void)? { didSet { contentViewController?.onSizeUpdate = onSizeUpdate // Updates the onSizeUpdate closure of the content view controller } } - + /// A closure called when the picture-in-picture state changes. public var onPiPStateChange: ((Bool) -> Void)? @@ -44,28 +49,118 @@ import Foundation /// A boolean value indicating whether the picture-in-picture session should start automatically when the app enters background. public var canStartPictureInPictureAutomaticallyFromInline: Bool - + + // MARK: - Content State Properties + // These properties update the centralized content state, which manages view switching + + /// The participant's name for the avatar placeholder + @objc public var participantName: String? { + didSet { + syncContentStateIfNeeded() + } + } + + /// The URL string for the participant's profile image + @objc public var participantImageURL: String? { + didSet { + syncContentStateIfNeeded() + } + } + + /// Whether video is enabled - when false, shows avatar placeholder + @objc public var isVideoEnabled: Bool = true { + didSet { + syncContentStateIfNeeded() + } + } + + /// Whether the call is reconnecting - when true, shows reconnection view + @objc public var isReconnecting: Bool = false { + didSet { + syncContentStateIfNeeded() + } + } + + /// Whether screen sharing is active (used for content state tracking) + @objc public var isScreenSharing: Bool = false { + didSet { + syncContentStateIfNeeded() + } + } + + /// Whether the participant has audio enabled (shown in participant overlay) + @objc public var hasAudio: Bool = true { + didSet { + contentViewController?.hasAudio = hasAudio + } + } + + /// Whether the video track is paused (shown in participant overlay) + @objc public var isTrackPaused: Bool = false { + didSet { + contentViewController?.isTrackPaused = isTrackPaused + } + } + + /// Whether the participant is pinned (shown in participant overlay) + @objc public var isPinned: Bool = false { + didSet { + contentViewController?.isPinned = isPinned + } + } + + /// Whether the participant is currently speaking (shows border highlight) + @objc public var isSpeaking: Bool = false { + didSet { + contentViewController?.isSpeaking = isSpeaking + } + } + + /// The connection quality level (0: unknown, 1: poor, 2: good, 3: excellent) + @objc public var connectionQuality: Int = 0 { + didSet { + contentViewController?.connectionQuality = connectionQuality + } + } + // MARK: - Private Properties - + /// The AVPictureInPictureController object. private var pictureInPictureController: AVPictureInPictureController? - + /// The StreamAVPictureInPictureViewControlling object that manages the picture-in-picture view. private var contentViewController: StreamAVPictureInPictureViewControlling? - + + /// Centralized content state manager for unified state handling. + /// This manages the content switching between video, avatar, reconnection, and screen share views. + private let contentState = PictureInPictureContentState() + /// A set of `AnyCancellable` objects used to manage subscriptions. private var cancellableBag: Set = [] - - /// A `AnyCancellable` object used to ensure that the active track is enabled while in picture-in-picture - /// mode. - private var ensureActiveTrackIsEnabledCancellable: AnyCancellable? - + + /// Delegate proxy that publishes PiP lifecycle events via Combine. + private let delegateProxy = PictureInPictureDelegateProxy() + + /// Adapter responsible for enforcing the stop of PiP when the app returns to foreground. + private var enforcedStopAdapter: PictureInPictureEnforcedStopAdapter? + /// A `StreamPictureInPictureTrackStateAdapter` object that manages the state of the /// active track. private let trackStateAdapter: StreamPictureInPictureTrackStateAdapter = .init() + + /// When true, multiple content fields are being updated as one transition. + private var isApplyingContentSnapshot = false + // MARK: - Content State Access + + /// Returns the current content being displayed in the PiP window. + /// This is useful for debugging and logging purposes. + var currentContent: PictureInPictureContent { + contentState.content + } + // MARK: - Lifecycle - + /// Initializes the controller and creates the content view /// /// - Parameter canStartPictureInPictureAutomaticallyFromInline A boolean value @@ -77,7 +172,7 @@ import Foundation guard AVPictureInPictureController.isPictureInPictureSupported() else { return nil } - + let contentViewController: StreamAVPictureInPictureViewControlling? = { if #available(iOS 15.0, *) { return StreamAVPictureInPictureVideoCallViewController() @@ -85,60 +180,121 @@ import Foundation return nil } }() - // contentViewController?.preferredContentSize = .init(width: 400, height: 320) + // Set a default preferred content size to avoid iOS PGPegasus code:-1003 error + // This will be updated later when track dimensions become available + contentViewController?.preferredContentSize = .init(width: 640, height: 480) self.contentViewController = contentViewController self.contentViewController?.isMirrored = isMirrored self.canStartPictureInPictureAutomaticallyFromInline = canStartPictureInPictureAutomaticallyFromInline super.init() + + // Wire up the content state to the view controller for reactive updates (US-008) + // This enables the unified content view system where contentState changes + // automatically drive view switching in the renderer + contentViewController?.contentState = contentState + syncContentState() + + // Subscribe to delegate proxy events for reactive PiP state handling + setupDelegateProxySubscriptions() + + // Subscribe to content state changes for logging + setupContentStateSubscriptions() } - - func setPreferredContentSize(_ size: CGSize) { - contentViewController?.preferredContentSize = size - } - - // MARK: - AVPictureInPictureControllerDelegate - - func pictureInPictureController( - _ pictureInPictureController: AVPictureInPictureController, - restoreUserInterfaceForPictureInPictureStopWithCompletionHandler completionHandler: @escaping (Bool) -> Void - ) { - completionHandler(true) - } - - public func pictureInPictureControllerWillStartPictureInPicture( - _ pictureInPictureController: AVPictureInPictureController - ) { - } - - public func pictureInPictureControllerDidStartPictureInPicture( - _ pictureInPictureController: AVPictureInPictureController - ) { - onPiPStateChange?(true) + + // MARK: - Private Setup + + /// Sets up subscriptions to the delegate proxy's event publisher. + private func setupDelegateProxySubscriptions() { + delegateProxy.publisher + .sink { [weak self] event in + self?.handleDelegateEvent(event) + } + .store(in: &cancellableBag) } - - public func pictureInPictureController( - _ pictureInPictureController: AVPictureInPictureController, - failedToStartPictureInPictureWithError error: Error - ) { - NSLog("PiP - failedToStartPictureInPictureWithError:\(error)") + + /// Sets up subscriptions to content state changes for logging and debugging. + private func setupContentStateSubscriptions() { + contentState.contentPublisher + .removeDuplicates() + .sink { content in + PictureInPictureLogger.log("Content state changed to: \(content)") + } + .store(in: &cancellableBag) } - - public func pictureInPictureControllerWillStopPictureInPicture( - _ pictureInPictureController: AVPictureInPictureController - ) { + + /// Handles events from the delegate proxy. + private func handleDelegateEvent(_ event: PictureInPictureDelegateProxy.Event) { + switch event { + case .didStart: + onPiPStateChange?(true) + case .didStop: + onPiPStateChange?(false) + case let .failedToStart(_, error): + PictureInPictureLogger.log("failedToStartPictureInPictureWithError: \(error.localizedDescription)") + // Notify JS that PiP failed to start so it can update its state accordingly + onPiPStateChange?(false) + case let .restoreUI(_, completionHandler): + completionHandler(true) + case .willStart, .willStop: + // No action needed for will start/stop events + break + } } - public func pictureInPictureControllerDidStopPictureInPicture( - _ pictureInPictureController: AVPictureInPictureController - ) { - onPiPStateChange?(false) + func setPreferredContentSize(_ size: CGSize) { + // Guard against setting zero size to avoid iOS PGPegasus code:-1003 error + guard size.width > 0, size.height > 0 else { + PictureInPictureLogger.log("Ignoring setPreferredContentSize with zero dimensions: \(size)") + return + } + contentViewController?.preferredContentSize = size } - + // MARK: - Private helpers - + private func didUpdate(_ track: RTCVideoTrack?) { - contentViewController?.track = track trackStateAdapter.activeTrack = track + syncContentStateIfNeeded() + } + + private func syncContentStateIfNeeded() { + guard !isApplyingContentSnapshot else { return } + syncContentState() + } + + private func syncContentState() { + let snapshot = PictureInPictureContentState.Snapshot( + track: track, + participantName: participantName, + participantImageURL: participantImageURL, + isVideoEnabled: isVideoEnabled, + isScreenSharing: isScreenSharing, + isReconnecting: isReconnecting + ) + contentState.apply(snapshot) + } + + /// Applies all content-driving fields as a single state transition. + func applyContentSnapshot( + track: RTCVideoTrack?, + participantName: String?, + participantImageURL: String?, + isVideoEnabled: Bool, + isScreenSharing: Bool, + isReconnecting: Bool + ) { + isApplyingContentSnapshot = true + defer { + isApplyingContentSnapshot = false + syncContentState() + } + + self.track = track + self.participantName = participantName + self.participantImageURL = participantImageURL + self.isVideoEnabled = isVideoEnabled + self.isScreenSharing = isScreenSharing + self.isReconnecting = isReconnecting } @objc private func didUpdate(_ sourceView: UIView?) { @@ -149,7 +305,7 @@ import Foundation pictureInPictureController? .publisher(for: \.isPictureInPicturePossible) - .sink { NSLog("PiP - isPictureInPicturePossible:\($0)") } + .sink { PictureInPictureLogger.log("isPictureInPicturePossible:\($0)") } .store(in: &cancellableBag) pictureInPictureController? @@ -169,6 +325,19 @@ import Foundation } @objc func cleanup() { + // Cancel all Combine subscriptions + cancellableBag.removeAll() + + // Reset the content state to inactive + contentState.reset() + + // Disable the track state adapter to stop its timer + trackStateAdapter.isEnabled = false + trackStateAdapter.activeTrack = nil + + // Release the enforced stop adapter + enforcedStopAdapter = nil + sourceView = nil contentViewController?.track = nil contentViewController = nil @@ -177,7 +346,6 @@ import Foundation } pictureInPictureController?.delegate = nil pictureInPictureController = nil - } private func makePictureInPictureController(with sourceView: UIView) { @@ -190,13 +358,19 @@ import Foundation ) ) } - + if #available(iOS 14.2, *) { pictureInPictureController? .canStartPictureInPictureAutomaticallyFromInline = canStartPictureInPictureAutomaticallyFromInline } - - pictureInPictureController?.delegate = self + + // Use the delegate proxy for reactive event handling + pictureInPictureController?.delegate = delegateProxy + + // Create the enforced stop adapter to handle app foreground transitions + if let pipController = pictureInPictureController { + enforcedStopAdapter = PictureInPictureEnforcedStopAdapter(pipController) + } } private func didUpdatePictureInPictureActiveState(_ isActive: Bool) { diff --git a/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureControllerProtocol.swift b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureControllerProtocol.swift new file mode 100644 index 0000000000..6e28daea5d --- /dev/null +++ b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureControllerProtocol.swift @@ -0,0 +1,30 @@ +// +// Copyright © 2024 Stream.io Inc. All rights reserved. +// + +import AVKit +import Combine +import Foundation + +/// Protocol defining the interface for Picture-in-Picture controller functionality. +/// +/// This abstraction allows for easier testing and decouples components from the +/// concrete `AVPictureInPictureController` implementation. +protocol StreamPictureInPictureControllerProtocol: AnyObject { + /// Publisher that emits whenever the Picture-in-Picture active state changes. + /// Consumers should rely on this stream instead of synchronous snapshots so + /// lifecycle adapters can react to state transitions deterministically. + var isPictureInPictureActivePublisher: AnyPublisher { get } + + /// Stops the Picture-in-Picture playback if it is currently active. + func stopPictureInPicture() +} + +/// Extends `AVPictureInPictureController` to conform to `StreamPictureInPictureControllerProtocol`. +/// +/// This extension provides a Combine publisher for observing the `isPictureInPictureActive` property. +extension AVPictureInPictureController: StreamPictureInPictureControllerProtocol { + var isPictureInPictureActivePublisher: AnyPublisher { + publisher(for: \.isPictureInPictureActive).eraseToAnyPublisher() + } +} diff --git a/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureVideoRenderer.swift b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureVideoRenderer.swift index 3dfcb42493..c0a9dbbd36 100644 --- a/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureVideoRenderer.swift +++ b/packages/react-native-sdk/ios/PictureInPicture/StreamPictureInPictureVideoRenderer.swift @@ -7,8 +7,42 @@ import Foundation import UIKit /// A view that can be used to render an instance of `RTCVideoTrack` +/// +/// This view manages the display of different content types in the PiP window: +/// - Video content from a participant's camera or screen share +/// - Avatar placeholder when video is disabled +/// - Screen sharing indicator overlay +/// - Reconnection view during connection recovery +/// +/// The content can be managed either through individual properties (legacy approach) +/// or through the unified `content` property using `PictureInPictureContent` enum. final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { - + + // MARK: - Content State (New unified approach) + + /// The current content being displayed, using the unified content enum. + /// Setting this property automatically updates all overlay views and the video track. + var content: PictureInPictureContent = .inactive { + didSet { + guard content != oldValue else { return } + applyContent(content) + } + } + + /// The content state manager for reactive state updates. + /// When set, the renderer subscribes to content changes automatically. + var contentState: PictureInPictureContentState? { + didSet { + subscribeToContentState() + } + } + + /// Cancellable for content state subscription + private var contentStateCancellable: AnyCancellable? + private var isApplyingContentBatch = false + + // MARK: - Individual Properties (Legacy approach - still supported) + /// The rendering track. var track: RTCVideoTrack? { didSet { @@ -16,11 +50,17 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { // - stopFrameStreaming for the old track // - startFrameStreaming for the new track and only if we are already // in picture-in-picture. - guard oldValue != track else { return } + PictureInPictureLogger.log("Renderer: track changed from \(oldValue?.trackId ?? "nil") to \(track?.trackId ?? "nil")") + guard !isSameTrackInstance(oldValue, track) else { return } + trackSize = .zero prepareForTrackRendering(oldValue) + if !isApplyingContentBatch { + // Track changes coming from non-content flows should still refresh overlays immediately. + updateOverlayVisibility() + } } } - + /// The layer that renders the track's frames. var displayLayer: CALayer { contentView.layer } @@ -36,7 +76,90 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { /// A policy defining how the Picture in Picture window should be resized in order to better fit /// the rendering frame size. var pictureInPictureWindowSizePolicy: PictureInPictureWindowSizePolicy - + + // MARK: - Avatar Placeholder Properties + + /// The participant's name for the avatar and overlay + var participantName: String? { + didSet { + PictureInPictureLogger.log("Renderer.participantName didSet: '\(participantName ?? "nil")', forwarding to avatarView") + avatarView.participantName = participantName + participantOverlayView.participantName = participantName + } + } + + /// The URL string for the participant's profile image + var participantImageURL: String? { + didSet { + avatarView.imageURL = participantImageURL + } + } + + /// Whether video is enabled - when false, shows avatar placeholder + var isVideoEnabled: Bool = true { + didSet { + PictureInPictureLogger.log("Renderer: isVideoEnabled changed from \(oldValue) to \(isVideoEnabled), avatarView.participantName='\(avatarView.participantName ?? "nil")'") + if !isApplyingContentBatch { + updateOverlayVisibility() + } + } + } + + /// Whether the call is reconnecting - when true, shows reconnection view + var isReconnecting: Bool = false { + didSet { + reconnectionView.isReconnecting = isReconnecting + if !isApplyingContentBatch { + updateOverlayVisibility() + } + } + } + + /// Whether screen sharing is active (used for content state tracking) + var isScreenSharing: Bool = false + + /// Whether the participant has audio enabled (shown in participant overlay) + var hasAudio: Bool = true { + didSet { + participantOverlayView.hasAudio = hasAudio + } + } + + /// Whether the video track is paused (shown in participant overlay) + var isTrackPaused: Bool = false { + didSet { + participantOverlayView.isTrackPaused = isTrackPaused + } + } + + /// Whether the participant is pinned (shown in participant overlay) + var isPinned: Bool = false { + didSet { + participantOverlayView.isPinned = isPinned + } + } + + /// Whether the participant is currently speaking (shows border highlight) + var isSpeaking: Bool = false { + didSet { + updateSpeakingIndicator() + } + } + + /// The connection quality level (0: unknown, 1: poor, 2: good, 3: excellent) + var connectionQuality: Int = 0 { + didSet { + connectionQualityIndicator.connectionQuality = PictureInPictureConnectionQualityIndicator.ConnectionQuality(rawValue: connectionQuality) ?? .unspecified + } + } + + /// Whether the participant overlay is enabled + var isParticipantOverlayEnabled: Bool = true { + didSet { + participantOverlayView.isOverlayEnabled = isParticipantOverlayEnabled + } + } + /// The publisher which is used to streamline the frames received from the track. private let bufferPublisher: PassthroughSubject = .init() @@ -68,7 +191,7 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { didUpdateTrackSize() } } - + /// A property that defines if the RTCVideoFrame instances that will be rendered need to be resized /// to fid the view's contentSize. private var requiresResize = false { @@ -95,7 +218,59 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { /// A size ratio threshold used to determine if skipping frames is required. private let sizeRatioThreshold: CGFloat = 15 - + + /// The avatar view shown when video is disabled + /// Note: Uses alpha=0 for visibility instead of isHidden to match upstream SwiftUI behavior + /// and ensure layoutSubviews is always called for proper constraint layout. + private lazy var avatarView: PictureInPictureAvatarView = { + let view = PictureInPictureAvatarView() + view.translatesAutoresizingMaskIntoConstraints = false + view.alpha = 0 // Initially invisible (video enabled by default) + return view + }() + + /// The reconnection view shown when connection is being recovered + private lazy var reconnectionView: PictureInPictureReconnectionView = { + let view = PictureInPictureReconnectionView() + view.translatesAutoresizingMaskIntoConstraints = false + view.isHidden = true // Initially hidden (not reconnecting by default) + return view + }() + + + /// The participant overlay view showing name and mute status + private lazy var participantOverlayView: PictureInPictureParticipantOverlayView = { + let view = PictureInPictureParticipantOverlayView() + view.translatesAutoresizingMaskIntoConstraints = false + return view + }() + + /// Connection quality indicator view (bottom-right) + private lazy var connectionQualityIndicator: PictureInPictureConnectionQualityIndicator = { + let view = PictureInPictureConnectionQualityIndicator() + view.translatesAutoresizingMaskIntoConstraints = false + return view + }() + + /// Speaking indicator border layer + private lazy var speakingBorderLayer: CAShapeLayer = { + let layer = CAShapeLayer() + layer.fillColor = UIColor.clear.cgColor + layer.strokeColor = UIColor(red: 0.0, green: 0.8, blue: 0.6, alpha: 1.0).cgColor // Teal green + layer.lineWidth = 2 + layer.isHidden = true + return layer + }() + + /// The speaking indicator corner radius (matches upstream) + private var speakingCornerRadius: CGFloat { + if #available(iOS 26.0, *) { + return 32 + } else { + return 16 + } + } + // MARK: - Lifecycle @available(*, unavailable) @@ -112,15 +287,28 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { // Depending on the window we are moving we either start or stop // streaming frames from the track. if newWindow != nil { + PictureInPictureLogger.log("Renderer: willMove(toWindow:) - added to window, track=\(track?.trackId ?? "nil"), isVideoEnabled=\(isVideoEnabled)") + trackSize = .zero + updateOverlayVisibility() startFrameStreaming(for: track, on: newWindow) } else { + PictureInPictureLogger.log("Renderer: willMove(toWindow:) - removed from window") stopFrameStreaming(for: track) + trackSize = .zero + updateOverlayVisibility() } } override func layoutSubviews() { super.layoutSubviews() contentSize = frame.size + + // Update speaking border frame + CATransaction.begin() + CATransaction.setDisableActions(true) + speakingBorderLayer.frame = bounds + speakingBorderLayer.path = UIBezierPath(roundedRect: bounds.insetBy(dx: 1, dy: 1), cornerRadius: speakingCornerRadius).cgPath + CATransaction.commit() } // MARK: - Rendering lifecycle @@ -134,7 +322,12 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { guard let frame = frame else { return } - + + // Ignore empty frames + if frame.width <= 0 || frame.height <= 0 { + return + } + // Update the trackSize and re-calculate rendering properties if the size // has changed. trackSize = .init(width: Int(frame.width), height: Int(frame.height)) @@ -159,15 +352,96 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { /// Set up the view's hierarchy. private func setUp() { + // Add speaking border layer first (behind everything else) + layer.addSublayer(speakingBorderLayer) + addSubview(contentView) + addSubview(avatarView) + addSubview(reconnectionView) + addSubview(participantOverlayView) + addSubview(connectionQualityIndicator) + NSLayoutConstraint.activate([ contentView.leadingAnchor.constraint(equalTo: leadingAnchor), contentView.trailingAnchor.constraint(equalTo: trailingAnchor), contentView.topAnchor.constraint(equalTo: topAnchor), - contentView.bottomAnchor.constraint(equalTo: bottomAnchor) + contentView.bottomAnchor.constraint(equalTo: bottomAnchor), + + avatarView.leadingAnchor.constraint(equalTo: leadingAnchor), + avatarView.trailingAnchor.constraint(equalTo: trailingAnchor), + avatarView.topAnchor.constraint(equalTo: topAnchor), + avatarView.bottomAnchor.constraint(equalTo: bottomAnchor), + + reconnectionView.leadingAnchor.constraint(equalTo: leadingAnchor), + reconnectionView.trailingAnchor.constraint(equalTo: trailingAnchor), + reconnectionView.topAnchor.constraint(equalTo: topAnchor), + reconnectionView.bottomAnchor.constraint(equalTo: bottomAnchor), + + // Participant overlay positioned at bottom + participantOverlayView.leadingAnchor.constraint(equalTo: leadingAnchor), + participantOverlayView.trailingAnchor.constraint(equalTo: trailingAnchor), + participantOverlayView.topAnchor.constraint(equalTo: topAnchor), + participantOverlayView.bottomAnchor.constraint(equalTo: bottomAnchor), + + // Connection quality indicator at bottom-right + connectionQualityIndicator.trailingAnchor.constraint(equalTo: trailingAnchor), + connectionQualityIndicator.bottomAnchor.constraint(equalTo: bottomAnchor), + connectionQualityIndicator.widthAnchor.constraint(equalToConstant: 28), + connectionQualityIndicator.heightAnchor.constraint(equalToConstant: 28) ]) } - + + /// Updates the visibility of overlay views based on current state. + /// Priority: reconnection view > avatar view > video content + /// + /// The avatar view is shown when: + /// - Video is explicitly disabled (isVideoEnabled = false), OR + /// - Track is nil + /// + /// IMPORTANT: Participant overlay (name, mic, connection quality) is shown on top of BOTH + /// video AND avatar views, matching the upstream stream-video-swift implementation. + /// The overlay is only hidden during reconnection. + private func updateOverlayVisibility() { + // Reconnection view takes highest priority + if isReconnecting { + PictureInPictureLogger.log("updateOverlayVisibility: isReconnecting=true, hiding avatar, showing reconnection") + reconnectionView.isHidden = false + avatarView.alpha = 0 + avatarView.isVideoEnabled = true + // Hide participant overlay ONLY during reconnection (matches upstream) + participantOverlayView.isOverlayEnabled = false + } else { + reconnectionView.isHidden = true + // Avatar view shows when video is disabled OR when we don't have a track + let shouldShowVideo = isVideoEnabled && track != nil + let shouldShowAvatar = !shouldShowVideo + PictureInPictureLogger.log("updateOverlayVisibility: isVideoEnabled=\(isVideoEnabled), track=\(track?.trackId ?? "nil"), shouldShowAvatar=\(shouldShowAvatar)") + + // Update avatar visibility - setting isVideoEnabled triggers internal layout + avatarView.isVideoEnabled = !shouldShowAvatar + avatarView.alpha = shouldShowAvatar ? 1 : 0 + + // Force layout when avatar becomes visible to ensure proper sizing + if shouldShowAvatar { + PictureInPictureLogger.log("updateOverlayVisibility: showing avatar, forcing layout. participantName=\(participantName ?? "nil"), avatarView.participantName='\(avatarView.participantName ?? "nil")'") + avatarView.setNeedsLayout() + avatarView.layoutIfNeeded() + } + + // Participant overlay shows on BOTH video and avatar (matches upstream) + // Only hide during reconnection + participantOverlayView.isOverlayEnabled = true + } + } + + /// Updates the speaking indicator border visibility based on isSpeaking state. + /// The border is shown when the participant is speaking, on BOTH video and avatar views + /// (matching upstream behavior). Only hidden during reconnection. + private func updateSpeakingIndicator() { + let shouldShowBorder = isSpeaking && !isReconnecting + speakingBorderLayer.isHidden = !shouldShowBorder + } + /// A method used to process the frame's buffer and enqueue on the rendering view. private func process(_ buffer: CMSampleBuffer) { guard @@ -198,14 +472,14 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { on window: UIWindow? ) { guard window != nil, let track else { return } - + bufferUpdatesCancellable = bufferPublisher .receive(on: DispatchQueue.main) .sink { [weak self] in self?.process($0) } - + track.add(self) } - + /// A method that stops the frame consumption from the track. Used automatically when the rendering /// view move's away from the window or when the track changes. private func stopFrameStreaming(for track: RTCVideoTrack?) { @@ -257,4 +531,102 @@ final class StreamPictureInPictureVideoRenderer: UIView, RTCVideoRenderer { requiresResize = false startFrameStreaming(for: track, on: window) } + + private func isSameTrackInstance(_ lhs: RTCVideoTrack?, _ rhs: RTCVideoTrack?) -> Bool { + switch (lhs, rhs) { + case (nil, nil): + return true + case let (lhsTrack?, rhsTrack?): + return lhsTrack === rhsTrack + default: + return false + } + } + + // MARK: - Content State System + + /// Subscribes to the content state manager for reactive updates. + private func subscribeToContentState() { + contentStateCancellable?.cancel() + contentStateCancellable = nil + + guard let contentState = contentState else { return } + + contentStateCancellable = contentState.contentPublisher + .receive(on: DispatchQueue.main) + .sink { [weak self] newContent in + self?.content = newContent + } + } + + /// Applies the given content state to update all view components. + /// This method synchronizes the unified content enum with the individual properties + /// for backward compatibility while providing a cleaner API. + private func applyContent(_ content: PictureInPictureContent) { + isApplyingContentBatch = true + defer { + isApplyingContentBatch = false + updateOverlayVisibility() + } + + switch content { + case .inactive: + // Clear everything + track = nil + participantName = nil + participantImageURL = nil + isVideoEnabled = true + isReconnecting = false + isScreenSharing = false + + case let .video(newTrack, name, imageURL): + // Show video content + track = newTrack + participantName = name + participantImageURL = imageURL + isVideoEnabled = true + isReconnecting = false + isScreenSharing = false + + case let .screenSharing(newTrack, name): + // Show screen sharing content with indicator + track = newTrack + participantName = name + participantImageURL = nil + isVideoEnabled = true + isReconnecting = false + isScreenSharing = true + + case let .avatar(name, imageURL): + // Show avatar placeholder (video disabled) + // Keep existing track for potential quick re-enable + participantName = name + participantImageURL = imageURL + isVideoEnabled = false + isReconnecting = false + isScreenSharing = false + + case .reconnecting: + // Show reconnection view + // Keep existing track and participant info for recovery + isReconnecting = true + isScreenSharing = false + } + } + + /// Returns the current content as a `PictureInPictureContent` enum value. + /// This is useful for reading the current state in a unified way. + func getCurrentContent() -> PictureInPictureContent { + if isReconnecting { + return .reconnecting + } else if !isVideoEnabled { + return .avatar(participantName: participantName, participantImageURL: participantImageURL) + } else if isScreenSharing { + return .screenSharing(track: track, participantName: participantName) + } else if track != nil { + return .video(track: track, participantName: participantName, participantImageURL: participantImageURL) + } else { + return .avatar(participantName: participantName, participantImageURL: participantImageURL) + } + } } diff --git a/packages/react-native-sdk/ios/RTCViewPip.swift b/packages/react-native-sdk/ios/RTCViewPip.swift index 58a046cb0b..f515fd04c5 100644 --- a/packages/react-native-sdk/ios/RTCViewPip.swift +++ b/packages/react-native-sdk/ios/RTCViewPip.swift @@ -10,11 +10,85 @@ import React @objc(RTCViewPip) class RTCViewPip: UIView { - - private var pictureInPictureController = StreamPictureInPictureController() + + private var pictureInPictureController: StreamPictureInPictureController? = StreamPictureInPictureController() private var webRtcModule: WebRTCModule? - + @objc var onPiPChange: RCTBubblingEventBlock? + + // MARK: - Avatar Placeholder Properties + + /// The participant's name for the avatar placeholder + @objc public var participantName: NSString? = nil { + didSet { + PictureInPictureLogger.log("RTCViewPip.participantName didSet: \(participantName as String? ?? "nil"), controller exists: \(pictureInPictureController != nil)") + pictureInPictureController?.participantName = participantName as String? + } + } + + /// The URL string for the participant's profile image + @objc public var participantImageURL: NSString? = nil { + didSet { + PictureInPictureLogger.log("RTCViewPip.participantImageURL didSet: \(participantImageURL as String? ?? "nil"), controller exists: \(pictureInPictureController != nil)") + pictureInPictureController?.participantImageURL = participantImageURL as String? + } + } + + // MARK: - Reconnection Properties + + /// Whether the call is reconnecting - when true, shows reconnection view + @objc public var isReconnecting: Bool = false { + didSet { + pictureInPictureController?.isReconnecting = isReconnecting + } + } + + // MARK: - Screen Sharing Properties + + /// Whether screen sharing is active (used for content state tracking) + @objc public var isScreenSharing: Bool = false { + didSet { + pictureInPictureController?.isScreenSharing = isScreenSharing + } + } + + // MARK: - Participant Overlay Properties + + /// Whether the participant has audio enabled (shown in participant overlay) + @objc public var hasAudio: Bool = true { + didSet { + pictureInPictureController?.hasAudio = hasAudio + } + } + + /// Whether the video track is paused (shown in participant overlay) + @objc public var isTrackPaused: Bool = false { + didSet { + pictureInPictureController?.isTrackPaused = isTrackPaused + } + } + + /// Whether the participant is pinned (shown in participant overlay) + @objc public var isPinned: Bool = false { + didSet { + pictureInPictureController?.isPinned = isPinned + } + } + + /// Whether the participant is currently speaking (shows border highlight) + @objc public var isSpeaking: Bool = false { + didSet { + pictureInPictureController?.isSpeaking = isSpeaking + } + } + + /// The connection quality level (0: unknown, 1: poor, 2: good, 3: excellent) + @objc public var connectionQuality: Int = 0 { + didSet { + pictureInPictureController?.connectionQuality = connectionQuality + } + } + @objc public var mirror: Bool = false { didSet { self.pictureInPictureController?.isMirrored = mirror @@ -38,27 +112,36 @@ class RTCViewPip: UIView { didSet { // https://github.com/react-native-webrtc/react-native-webrtc/blob/8dfc9c394b4bf627c0214255466ebd3b160ca563/ios/RCTWebRTC/RTCVideoViewManager.m#L405-L418 guard let streamURLString = streamURL as String? else { - NSLog("PiP - No streamURL set") + PictureInPictureLogger.log("No streamURL set, clearing track") + DispatchQueue.main.async { + self.applyTrackStateToController(track: nil, isVideoEnabled: false) + } return } - + guard let stream = self.webRtcModule?.stream(forReactTag: streamURLString) else { - NSLog("PiP - No stream for streamURL: -\(streamURLString)") + PictureInPictureLogger.log("No stream for streamURL: -\(streamURLString), clearing track") + DispatchQueue.main.async { + self.applyTrackStateToController(track: nil, isVideoEnabled: false) + } return } - + guard let videoTrack = stream.videoTracks.first else { - NSLog("PiP - No video track for streamURL: -\(streamURLString)") + PictureInPictureLogger.log("No video track for streamURL: -\(streamURLString), clearing track") + DispatchQueue.main.async { + self.applyTrackStateToController(track: nil, isVideoEnabled: false) + } return } - if (self.pictureInPictureController?.track == videoTrack) { - NSLog("PiP - Skipping video track for streamURL: -\(streamURLString)") + if isSameTrackInstance(self.pictureInPictureController?.track, videoTrack) { + PictureInPictureLogger.log("Skipping video track for streamURL: -\(streamURLString)") return } - + DispatchQueue.main.async { - NSLog("PiP - Setting video track for streamURL: -\(streamURLString) trackId: \(videoTrack.trackId)") - self.pictureInPictureController?.track = videoTrack + PictureInPictureLogger.log("Setting video track for streamURL: -\(streamURLString) trackId: \(videoTrack.trackId)") + self.applyTrackStateToController(track: videoTrack, isVideoEnabled: true) } } } @@ -70,30 +153,40 @@ class RTCViewPip: UIView { @objc func onCallClosed() { - NSLog("PiP - pictureInPictureController cleanup called") + PictureInPictureLogger.log("pictureInPictureController cleanup called") self.pictureInPictureController?.cleanup() self.pictureInPictureController = nil } @objc func setPreferredContentSize(_ size: CGSize) { - NSLog("PiP - RTCViewPip setPreferredContentSize \(size)") + PictureInPictureLogger.log("RTCViewPip setPreferredContentSize \(size)") self.pictureInPictureController?.setPreferredContentSize(size) } override func didMoveToSuperview() { super.didMoveToSuperview() if self.superview == nil { - NSLog("PiP - RTCViewPip has been removed from its superview.") + PictureInPictureLogger.log("RTCViewPip has been removed from its superview.") NotificationCenter.default.removeObserver(self) DispatchQueue.main.async { - NSLog("PiP - onCallClosed called due to view detaching") + PictureInPictureLogger.log("onCallClosed called due to view detaching") self.onCallClosed() } } else { - NSLog("PiP - RTCViewPip has been added to a superview.") + PictureInPictureLogger.log("RTCViewPip has been added to a superview.") setupNotificationObserver() DispatchQueue.main.async { + // Recreate controller if it was previously cleaned up + // This allows PiP to work again for subsequent calls + let wasNil = self.pictureInPictureController == nil + if wasNil { + PictureInPictureLogger.log("Recreating pictureInPictureController for new session") + self.pictureInPictureController = StreamPictureInPictureController() + // Re-apply all current properties to the new controller + // This is necessary because React Native may have set props while controller was nil + self.applyCurrentPropertiesToController() + } self.pictureInPictureController?.sourceView = self self.pictureInPictureController?.isMirrored = self.mirror // Set up PiP state change callback @@ -103,20 +196,93 @@ class RTCViewPip: UIView { if let reactTag = self.reactTag, let bridge = self.webRtcModule?.bridge { if let manager = bridge.module(for: RTCViewPipManager.self) as? RTCViewPipManager, let size = manager.getCachedSize(for: reactTag) { - NSLog("PiP - Applying cached size \(size) for reactTag \(reactTag)") + PictureInPictureLogger.log("Applying cached size \(size) for reactTag \(reactTag)") self.setPreferredContentSize(size) } } } } } + + /// Re-applies all current property values to the controller. + /// This is needed after controller recreation because didSet doesn't fire + /// when the property values haven't changed on the React Native side. + /// + /// NOTE: This reads from RTCViewPip's own properties (self.participantName, etc.) + /// which retain their values even after controller cleanup. + private func applyCurrentPropertiesToController() { + guard let controller = pictureInPictureController else { + PictureInPictureLogger.log("applyCurrentPropertiesToController: controller is nil, skipping") + return + } + + PictureInPictureLogger.log("applyCurrentPropertiesToController STARTING:") + PictureInPictureLogger.log(" participantName: '\(participantName as String? ?? "nil")'") + PictureInPictureLogger.log(" participantImageURL: '\(participantImageURL as String? ?? "nil")'") + PictureInPictureLogger.log(" streamURL: '\(streamURL as String? ?? "nil")'") + + let resolvedTrack: RTCVideoTrack? + let isVideoEnabled: Bool + if let streamURLString = streamURL as String?, + let stream = webRtcModule?.stream(forReactTag: streamURLString), + let videoTrack = stream.videoTracks.first { + PictureInPictureLogger.log("Re-applying track from streamURL: \(streamURLString), trackId: \(videoTrack.trackId)") + resolvedTrack = videoTrack + isVideoEnabled = true + } else { + // No stream URL or no track means video is disabled - show avatar + PictureInPictureLogger.log("No valid stream/track, setting isVideoEnabled=false for avatar") + resolvedTrack = nil + isVideoEnabled = false + } + + // Keep PiP content transitions store-driven with one snapshot update. + controller.applyContentSnapshot( + track: resolvedTrack, + participantName: participantName as String?, + participantImageURL: participantImageURL as String?, + isVideoEnabled: isVideoEnabled, + isScreenSharing: isScreenSharing, + isReconnecting: isReconnecting + ) + + controller.hasAudio = hasAudio + controller.isTrackPaused = isTrackPaused + controller.isPinned = isPinned + controller.isSpeaking = isSpeaking + controller.connectionQuality = connectionQuality + PictureInPictureLogger.log("applyCurrentPropertiesToController COMPLETED") + } + + /// Applies track/video availability without splitting a single change into multiple setters. + private func applyTrackStateToController(track: RTCVideoTrack?, isVideoEnabled: Bool) { + pictureInPictureController?.applyContentSnapshot( + track: track, + participantName: participantName as String?, + participantImageURL: participantImageURL as String?, + isVideoEnabled: isVideoEnabled, + isScreenSharing: isScreenSharing, + isReconnecting: isReconnecting + ) + } + + private func isSameTrackInstance(_ lhs: RTCVideoTrack?, _ rhs: RTCVideoTrack?) -> Bool { + switch (lhs, rhs) { + case (nil, nil): + return true + case let (lhsTrack?, rhsTrack?): + return lhsTrack === rhsTrack + default: + return false + } + } private func sendPiPChangeEvent(isActive: Bool) { guard let onPiPChange = onPiPChange else { return } - - NSLog("PiP - Sending PiP state change event: \(isActive)") + + PictureInPictureLogger.log("Sending PiP state change event: \(isActive)") onPiPChange(["active": isActive]) } } diff --git a/packages/react-native-sdk/ios/RTCViewPipManager.mm b/packages/react-native-sdk/ios/RTCViewPipManager.mm index 414c1bcddf..86d122d5e6 100644 --- a/packages/react-native-sdk/ios/RTCViewPipManager.mm +++ b/packages/react-native-sdk/ios/RTCViewPipManager.mm @@ -13,6 +13,15 @@ @interface RCT_EXTERN_MODULE(RTCViewPipManager, RCTViewManager) RCT_EXPORT_VIEW_PROPERTY(streamURL, NSString) RCT_EXPORT_VIEW_PROPERTY(mirror, BOOL) RCT_EXPORT_VIEW_PROPERTY(onPiPChange, RCTBubblingEventBlock) +RCT_EXPORT_VIEW_PROPERTY(participantName, NSString) +RCT_EXPORT_VIEW_PROPERTY(participantImageURL, NSString) +RCT_EXPORT_VIEW_PROPERTY(isReconnecting, BOOL) +RCT_EXPORT_VIEW_PROPERTY(isScreenSharing, BOOL) +RCT_EXPORT_VIEW_PROPERTY(hasAudio, BOOL) +RCT_EXPORT_VIEW_PROPERTY(isTrackPaused, BOOL) +RCT_EXPORT_VIEW_PROPERTY(isPinned, BOOL) +RCT_EXPORT_VIEW_PROPERTY(isSpeaking, BOOL) +RCT_EXPORT_VIEW_PROPERTY(connectionQuality, NSInteger) RCT_EXTERN_METHOD(onCallClosed:(nonnull NSNumber*) reactTag) RCT_EXTERN_METHOD(setPreferredContentSize:(nonnull NSNumber *)reactTag width:(CGFloat)w height:(CGFloat)h); diff --git a/packages/react-native-sdk/ios/RTCViewPipManager.swift b/packages/react-native-sdk/ios/RTCViewPipManager.swift index 4c7d55f1c8..59028b5d22 100644 --- a/packages/react-native-sdk/ios/RTCViewPipManager.swift +++ b/packages/react-native-sdk/ios/RTCViewPipManager.swift @@ -32,7 +32,7 @@ class RTCViewPipManager: RCTViewManager { pipView.onCallClosed() } } else { - NSLog("PiP - onCallClosed cant be called, Invalid view returned from registry, expecting RTCViewPip") + PictureInPictureLogger.log("onCallClosed cant be called, Invalid view returned from registry, expecting RTCViewPip") } }) } @@ -51,7 +51,7 @@ class RTCViewPipManager: RCTViewManager { } else { // If the view is not found, cache the size. // this happens when this method is called before the view can attach react super view - NSLog("PiP - View not found for reactTag \(reactTag), caching size.") + PictureInPictureLogger.log("View not found for reactTag \(reactTag), caching size.") self.cachedSizes[reactTag] = size } }) @@ -60,7 +60,7 @@ class RTCViewPipManager: RCTViewManager { func getCachedSize(for reactTag: NSNumber) -> CGSize? { let size = self.cachedSizes.removeValue(forKey: reactTag) if size != nil { - NSLog("PiP - Found and removed cached size for reactTag \(reactTag).") + PictureInPictureLogger.log("Found and removed cached size for reactTag \(reactTag).") } return size } diff --git a/packages/react-native-sdk/src/components/Call/CallContent/CallContent.tsx b/packages/react-native-sdk/src/components/Call/CallContent/CallContent.tsx index c49703fd7b..d9423a0123 100644 --- a/packages/react-native-sdk/src/components/Call/CallContent/CallContent.tsx +++ b/packages/react-native-sdk/src/components/Call/CallContent/CallContent.tsx @@ -21,11 +21,12 @@ import { useCall, useCallStateHooks } from '@stream-io/video-react-bindings'; import { CallingState, type StreamReaction, + type StreamVideoParticipant, videoLoggerSystem, } from '@stream-io/video-client'; +import { debounceTime } from 'rxjs'; import { Z_INDEX } from '../../../constants'; -import { useDebouncedValue } from '../../../utils/hooks'; import { FloatingParticipantView as DefaultFloatingParticipantView, type FloatingParticipantViewProps, @@ -134,16 +135,23 @@ export const CallContent = ({ theme: { callContent }, } = useTheme(); const call = useCall(); - const { - useHasOngoingScreenShare, - useRemoteParticipants, - useLocalParticipant, - } = useCallStateHooks(); + const { useHasOngoingScreenShare, useLocalParticipant } = useCallStateHooks(); useAutoEnterPiPEffect(disablePictureInPicture); - const _remoteParticipants = useRemoteParticipants(); - const remoteParticipants = useDebouncedValue(_remoteParticipants, 300); // we debounce the remote participants to avoid unnecessary rerenders that happen when participant tracks are all subscribed simultaneously + const [remoteParticipants, setRemoteParticipants] = useState< + StreamVideoParticipant[] + >(() => call?.state.remoteParticipants ?? []); + useEffect(() => { + if (!call) { + setRemoteParticipants([]); + return; + } + const sub = call.state.remoteParticipants$ + .pipe(debounceTime(300)) + .subscribe(setRemoteParticipants); + return () => sub.unsubscribe(); + }, [call]); const localParticipant = useLocalParticipant(); const isInPiPMode = useIsInPiPMode(); const hasScreenShare = useHasOngoingScreenShare(); diff --git a/packages/react-native-sdk/src/components/Call/CallContent/RTCViewPipIOS.tsx b/packages/react-native-sdk/src/components/Call/CallContent/RTCViewPipIOS.tsx index 2d2083b0ea..d2b5933d16 100644 --- a/packages/react-native-sdk/src/components/Call/CallContent/RTCViewPipIOS.tsx +++ b/packages/react-native-sdk/src/components/Call/CallContent/RTCViewPipIOS.tsx @@ -1,21 +1,25 @@ import { CallingState, + SfuModels, + hasAudio, + hasPausedTrack, hasScreenShare, - speakerLayoutSortPreset, type StreamVideoParticipant, videoLoggerSystem, type VideoTrackType, + hasVideo, + isPinned, } from '@stream-io/video-client'; import { useCall, useCallStateHooks } from '@stream-io/video-react-bindings'; import type { MediaStream } from '@stream-io/react-native-webrtc'; -import React, { useEffect, useMemo, useCallback } from 'react'; +import React, { useEffect, useCallback, useState } from 'react'; import { findNodeHandle } from 'react-native'; import { onNativeCallClosed, onNativeDimensionsUpdated, RTCViewPipNative, } from './RTCViewPipNative'; -import { useDebouncedValue } from '../../../utils/hooks'; +import { debounceTime } from 'rxjs'; import { shouldDisableIOSLocalVideoOnBackgroundRef } from '../../../utils/internal/shouldDisableIOSLocalVideoOnBackground'; import { useTrackDimensions } from '../../../hooks/useTrackDimensions'; import { isInPiPMode$ } from '../../../utils/internal/rxSubjects'; @@ -40,12 +44,26 @@ export const RTCViewPipIOS = React.memo((props: Props) => { onPiPChange, } = props; const call = useCall(); - const { useParticipants, useCameraState } = useCallStateHooks(); - const _allParticipants = useParticipants({ - sortBy: speakerLayoutSortPreset, - }); + const { useCameraState, useCallCallingState } = useCallStateHooks(); + const callingState = useCallCallingState(); const { direction } = useCameraState(); - const allParticipants = useDebouncedValue(_allParticipants, 300); // we debounce the participants to avoid unnecessary rerenders that happen when participant tracks are all subscribed simultaneously + + const [allParticipants, setAllParticipants] = useState< + StreamVideoParticipant[] + >(call?.state.participants ?? []); + + // we debounce the participants to avoid unnecessary rerenders + // that happen when participant tracks are all subscribed simultaneously + useEffect(() => { + if (!call) { + setAllParticipants([]); + return; + } + const subscription = call.state.participants$ + .pipe(debounceTime(300)) + .subscribe(setAllParticipants); + return () => subscription.unsubscribe(); + }, [call]); const [dominantSpeaker, dominantSpeaker2] = allParticipants.filter( (participant) => @@ -118,24 +136,63 @@ export const RTCViewPipIOS = React.memo((props: Props) => { ? screenShareStream : videoStream) as unknown as MediaStream | undefined; + const isPublishingTrack = + isScreenSharing || + (participantInSpotlight && hasVideo(participantInSpotlight)); + + const streamURL = isPublishingTrack + ? videoStreamToRender?.toURL() + : undefined; + const mirror = isScreenSharing ? false : mirrorOverride !== undefined ? mirrorOverride : !!participantInSpotlight?.isLocalParticipant && direction === 'front'; - const streamURL = useMemo(() => { - if (!videoStreamToRender) { - return undefined; - } - return videoStreamToRender?.toURL(); - }, [videoStreamToRender]); - const handlePiPChange = (event: { nativeEvent: { active: boolean } }) => { isInPiPMode$.next(event.nativeEvent.active); onPiPChange?.(event.nativeEvent.active); }; + // Get participant info for avatar placeholder + const participantName = participantInSpotlight?.name || undefined; + const participantImageURL = participantInSpotlight?.image || undefined; + + // Determine if the call is reconnecting or offline + const isReconnecting = + callingState === CallingState.MIGRATING || + callingState === CallingState.RECONNECTING || + callingState === CallingState.RECONNECTING_FAILED || + callingState === CallingState.OFFLINE; + + // Determine if the participant has audio enabled + const participantHasAudio = participantInSpotlight + ? hasAudio(participantInSpotlight) + : true; + + // Determine if the video track is paused + const trackType: VideoTrackType = isScreenSharing + ? 'screenShareTrack' + : 'videoTrack'; + + const isVideoTrackPaused = participantInSpotlight + ? hasPausedTrack(participantInSpotlight, trackType) + : false; + + // Determine if the participant is pinned + const participantIsPinned = participantInSpotlight + ? isPinned(participantInSpotlight) + : false; + + // Determine if the participant is speaking + const participantIsSpeaking = participantInSpotlight?.isSpeaking ?? false; + + // Get connection quality (convert enum to number: UNSPECIFIED=0, POOR=1, GOOD=2, EXCELLENT=3) + const participantConnectionQuality = + participantInSpotlight?.connectionQuality ?? + SfuModels.ConnectionQuality.UNSPECIFIED; + return ( <> { mirror={mirror} ref={nativeRef} onPiPChange={handlePiPChange} + participantName={participantName} + participantImageURL={participantImageURL} + isReconnecting={isReconnecting} + isScreenSharing={isScreenSharing} + hasAudio={participantHasAudio} + isTrackPaused={isVideoTrackPaused} + isPinned={participantIsPinned} + isSpeaking={participantIsSpeaking} + connectionQuality={participantConnectionQuality} /> {participantInSpotlight && ( void; + /** The participant's name for the avatar placeholder when video is disabled */ + participantName?: string; + /** The URL string for the participant's profile image */ + participantImageURL?: string; + /** Whether the call is reconnecting - when true, shows reconnection view */ + isReconnecting?: boolean; + /** Whether screen sharing is active (used for content state tracking) */ + isScreenSharing?: boolean; + /** Whether the participant has audio enabled (shown in participant overlay) */ + hasAudio?: boolean; + /** Whether the video track is paused (shown in participant overlay) */ + isTrackPaused?: boolean; + /** Whether the participant is pinned (shown in participant overlay) */ + isPinned?: boolean; + /** Whether the participant is currently speaking (shows border highlight) */ + isSpeaking?: boolean; + /** The connection quality level (0: unknown, 1: poor, 2: good, 3: excellent) */ + connectionQuality?: number; }; const NativeComponent: HostComponent = @@ -65,6 +83,24 @@ export const RTCViewPipNative = React.memo( mirror={props.mirror} // eslint-disable-next-line react/prop-types onPiPChange={props.onPiPChange} + // eslint-disable-next-line react/prop-types + participantName={props.participantName} + // eslint-disable-next-line react/prop-types + participantImageURL={props.participantImageURL} + // eslint-disable-next-line react/prop-types + isReconnecting={props.isReconnecting} + // eslint-disable-next-line react/prop-types + isScreenSharing={props.isScreenSharing} + // eslint-disable-next-line react/prop-types + hasAudio={props.hasAudio} + // eslint-disable-next-line react/prop-types + isTrackPaused={props.isTrackPaused} + // eslint-disable-next-line react/prop-types + isPinned={props.isPinned} + // eslint-disable-next-line react/prop-types + isSpeaking={props.isSpeaking} + // eslint-disable-next-line react/prop-types + connectionQuality={props.connectionQuality} // @ts-expect-error - types issue ref={ref} /> diff --git a/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsGrid.tsx b/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsGrid.tsx index 3718dd2489..bd4e804ff5 100644 --- a/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsGrid.tsx +++ b/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsGrid.tsx @@ -1,7 +1,7 @@ -import React from 'react'; +import React, { useEffect, useState } from 'react'; import { StyleSheet, View, type ViewStyle } from 'react-native'; -import { useCallStateHooks } from '@stream-io/video-react-bindings'; -import { useDebouncedValue } from '../../../utils/hooks/useDebouncedValue'; +import { useCall, useCallStateHooks } from '@stream-io/video-react-bindings'; +import { debounceTime } from 'rxjs'; import { CallParticipantsList as DefaultCallParticipantsList, type CallParticipantsListComponentProps, @@ -49,19 +49,33 @@ export const CallParticipantsGrid = ({ const { theme: { colors, callParticipantsGrid }, } = useTheme(); - const { - useRemoteParticipants, - useParticipants, - useLocalParticipant, - useDominantSpeaker, - } = useCallStateHooks(); - const _remoteParticipants = useRemoteParticipants(); + const call = useCall(); + const { useLocalParticipant, useDominantSpeaker } = useCallStateHooks(); const localParticipant = useLocalParticipant(); - const _allParticipants = useParticipants(); const dominantSpeaker = useDominantSpeaker(); - // we debounce the participants arrays to avoid unnecessary rerenders that happen when participant tracks are all subscribed simultaneously - const remoteParticipants = useDebouncedValue(_remoteParticipants, 300); - const allParticipants = useDebouncedValue(_allParticipants, 300); + const [remoteParticipants, setRemoteParticipants] = useState< + StreamVideoParticipant[] + >(() => call?.state.remoteParticipants ?? []); + const [allParticipants, setAllParticipants] = useState< + StreamVideoParticipant[] + >(() => call?.state.participants ?? []); + useEffect(() => { + if (!call) { + setRemoteParticipants([]); + setAllParticipants([]); + return; + } + const sub1 = call.state.remoteParticipants$ + .pipe(debounceTime(300)) + .subscribe(setRemoteParticipants); + const sub2 = call.state.participants$ + .pipe(debounceTime(300)) + .subscribe(setAllParticipants); + return () => { + sub1.unsubscribe(); + sub2.unsubscribe(); + }; + }, [call]); const landscapeStyles: ViewStyle = { flexDirection: landscape ? 'row' : 'column', }; diff --git a/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsSpotlight.tsx b/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsSpotlight.tsx index 0f68ab246c..062221c91d 100644 --- a/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsSpotlight.tsx +++ b/packages/react-native-sdk/src/components/Call/CallLayout/CallParticipantsSpotlight.tsx @@ -1,11 +1,11 @@ -import React, { useMemo } from 'react'; +import React, { useEffect, useMemo, useState } from 'react'; import { hasScreenShare, - speakerLayoutSortPreset, + type StreamVideoParticipant, } from '@stream-io/video-client'; -import { useCallStateHooks } from '@stream-io/video-react-bindings'; +import { useCall } from '@stream-io/video-react-bindings'; import { StyleSheet, View, type ViewStyle } from 'react-native'; -import { useDebouncedValue } from '../../../utils/hooks/useDebouncedValue'; +import { debounceTime } from 'rxjs'; import { ComponentTestIds } from '../../../constants/TestIds'; import { CallParticipantsList as DefaultCallParticipantsList, @@ -56,15 +56,24 @@ export const CallParticipantsSpotlight = ({ theme: { callParticipantsSpotlight, variants }, } = useTheme(); const styles = useStyles(); - const { useParticipants } = useCallStateHooks(); - const _allParticipants = useParticipants({ - sortBy: speakerLayoutSortPreset, - }); - const allParticipants = useDebouncedValue(_allParticipants, 300); // we debounce the participants to avoid unnecessary rerenders that happen when participant tracks are all subscribed simultaneously + const call = useCall(); + const [allParticipants, setAllParticipants] = useState< + StreamVideoParticipant[] + >(() => call?.state.participants ?? []); + useEffect(() => { + if (!call) { + setAllParticipants([]); + return; + } + const sub = call.state.participants$ + .pipe(debounceTime(300)) + .subscribe(setAllParticipants); + return () => sub.unsubscribe(); + }, [call]); const [participantInSpotlight, ...otherParticipants] = allParticipants; const isScreenShareOnSpotlight = participantInSpotlight && hasScreenShare(participantInSpotlight); - const isUserAloneInCall = _allParticipants?.length === 1; + const isUserAloneInCall = allParticipants.length === 1; const isInPiP = useIsInPiPMode(); diff --git a/packages/react-native-sdk/src/components/Call/CallParticipantsList/CallParticipantsList.tsx b/packages/react-native-sdk/src/components/Call/CallParticipantsList/CallParticipantsList.tsx index 8d4a22c433..b67553790c 100644 --- a/packages/react-native-sdk/src/components/Call/CallParticipantsList/CallParticipantsList.tsx +++ b/packages/react-native-sdk/src/components/Call/CallParticipantsList/CallParticipantsList.tsx @@ -1,7 +1,7 @@ import React, { useCallback, + useEffect, useMemo, - useReducer, useRef, useState, } from 'react'; @@ -17,7 +17,7 @@ import { type StreamVideoParticipantPatches, VisibilityState, } from '@stream-io/video-client'; -import { useDebouncedValue } from '../../../utils/hooks/useDebouncedValue'; +import { Subject, debounceTime } from 'rxjs'; import { useCall } from '@stream-io/video-react-bindings'; import { ComponentTestIds } from '../../../constants/TestIds'; import { @@ -104,9 +104,24 @@ export const CallParticipantsList = ({ // we use a HashSet to track the currently viewable participants // and a separate force update state to rerender the component to inform that the HashSet has changed // NOTE: we use set instead of array or object for O(1) lookup, add and delete - const viewableParticipantSessionIds = useRef>(new Set()); - const [_forceUpdateValue, forceUpdate] = useReducer((x) => x + 1, 0); - const forceUpdateValue = useDebouncedValue(_forceUpdateValue, 500); // we debounce forced value to avoid multiple viewability change continuous rerenders due to callbacks that occurs simultaneously during a large list scroll or when scrolling is completed + // Lazy ref init: avoids recreating instances on every render (useRef doesn't support initializer fns) + const viewableParticipantSessionIds = useRef>(null!); + if (!viewableParticipantSessionIds.current) { + viewableParticipantSessionIds.current = new Set(); + } + const forceUpdate$Ref = useRef>(null!); + if (!forceUpdate$Ref.current) { + forceUpdate$Ref.current = new Subject(); + } + const forceUpdate$ = forceUpdate$Ref.current; + const [forceUpdateValue, setForceUpdateValue] = useState(0); + useEffect(() => { + const sub = forceUpdate$.pipe(debounceTime(500)).subscribe(() => { + setForceUpdateValue((v) => v + 1); + }); + return () => sub.unsubscribe(); + }, [forceUpdate$]); + const forceUpdate = useCallback(() => forceUpdate$.next(), [forceUpdate$]); // we use a ref to store the active call object // so that it can be used in the onViewableItemsChanged callback diff --git a/packages/react-native-sdk/src/utils/hooks/index.ts b/packages/react-native-sdk/src/utils/hooks/index.ts index 57403bbc8b..3a29340b4e 100644 --- a/packages/react-native-sdk/src/utils/hooks/index.ts +++ b/packages/react-native-sdk/src/utils/hooks/index.ts @@ -1,2 +1 @@ -export * from './useDebouncedValue'; export * from './usePrevious'; diff --git a/packages/react-native-sdk/src/utils/hooks/useDebouncedValue.ts b/packages/react-native-sdk/src/utils/hooks/useDebouncedValue.ts deleted file mode 100644 index 448bc33504..0000000000 --- a/packages/react-native-sdk/src/utils/hooks/useDebouncedValue.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { useEffect, useState } from 'react'; - -/** - * this is a custom hook that takes a value and a delay and returns a debounced value - * @param {T} value - * @param {number} delay - * @returns {T} - */ -export function useDebouncedValue(value: T, delay: number): T { - const [debouncedValue, setDebouncedValue] = useState(value); - - useEffect(() => { - const timer = setTimeout(() => setDebouncedValue(value), delay); - - return () => { - clearTimeout(timer); - }; - }, [value, delay]); - - return debouncedValue; -} diff --git a/sample-apps/react-native/dogfood/ios/Podfile.lock b/sample-apps/react-native/dogfood/ios/Podfile.lock index 7a8eb3cc4c..1d26d6347c 100644 --- a/sample-apps/react-native/dogfood/ios/Podfile.lock +++ b/sample-apps/react-native/dogfood/ios/Podfile.lock @@ -3028,7 +3028,7 @@ PODS: - stream-react-native-webrtc (137.1.0): - React-Core - StreamWebRTC (~> 137.0.54) - - stream-video-react-native (1.29.3): + - stream-video-react-native (1.29.4): - boost - DoubleConversion - fast_float @@ -3465,7 +3465,7 @@ SPEC CHECKSUMS: stream-io-noise-cancellation-react-native: 56787bb94ff912ee17661f4b24a3c4f9551f38ba stream-io-video-filters-react-native: 8fdd1a1fcade0dcd699fd2e5b61b2152c0056219 stream-react-native-webrtc: dd4bc6e9717e6d90204008c22a44bc1c1f605e3b - stream-video-react-native: 5be434cde5f0981f5832aa8099bf479bd239eeab + stream-video-react-native: 5e9f7b050e56505ac9d158e01989d7f09a9de997 StreamVideoNoiseCancellation: 41f5a712aba288f9636b64b17ebfbdff52c61490 StreamWebRTC: 57bd35729bcc46b008de4e741a5b23ac28b8854d VisionCamera: 891edb31806dd3a239c8a9d6090d6ec78e11ee80 @@ -3473,4 +3473,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: aa62ba474533b73121c2068a13a8b909b17efbaa -COCOAPODS: 1.16.2 +COCOAPODS: 1.15.2