Commit 3755332c by suolong

修复连麦问题

parent b2e801f6
......@@ -620,6 +620,8 @@
3E261EA82F2C5FAB0008C0C0 /* FUSLinkMicMediator.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E261EA62F2C5FAB0008C0C0 /* FUSLinkMicMediator.m */; };
3E261EB02F2CA6890008C0C0 /* FUSLinkMicStreamConfig.h in Headers */ = {isa = PBXBuildFile; fileRef = 3E261EAE2F2CA6890008C0C0 /* FUSLinkMicStreamConfig.h */; };
3E261EB12F2CA6890008C0C0 /* FUSLinkMicStreamConfig.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E261EAF2F2CA6890008C0C0 /* FUSLinkMicStreamConfig.m */; };
3E4DC89A2F580A46003070EC /* live_link_mic_boy_speak_anim.webp in Resources */ = {isa = PBXBuildFile; fileRef = 3E4DC8982F580A46003070EC /* live_link_mic_boy_speak_anim.webp */; };
3E4DC89B2F580A46003070EC /* live_link_mic_girl_speak_anim.webp in Resources */ = {isa = PBXBuildFile; fileRef = 3E4DC8992F580A46003070EC /* live_link_mic_girl_speak_anim.webp */; };
3E50054E2F556DC300058145 /* FUSLiveRTCData.swift in Sources */ = {isa = PBXBuildFile; fileRef = 3E50054D2F556DC300058145 /* FUSLiveRTCData.swift */; };
BE189D972C733B450008418B /* FSRActiveModel.h in Headers */ = {isa = PBXBuildFile; fileRef = BE189CE52C733B450008418B /* FSRActiveModel.h */; };
BE189D982C733B450008418B /* FSRActiveModel.m in Sources */ = {isa = PBXBuildFile; fileRef = BE189CE62C733B450008418B /* FSRActiveModel.m */; };
......@@ -2498,6 +2500,8 @@
3E261EA62F2C5FAB0008C0C0 /* FUSLinkMicMediator.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FUSLinkMicMediator.m; sourceTree = "<group>"; };
3E261EAE2F2CA6890008C0C0 /* FUSLinkMicStreamConfig.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FUSLinkMicStreamConfig.h; sourceTree = "<group>"; };
3E261EAF2F2CA6890008C0C0 /* FUSLinkMicStreamConfig.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FUSLinkMicStreamConfig.m; sourceTree = "<group>"; };
3E4DC8982F580A46003070EC /* live_link_mic_boy_speak_anim.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = live_link_mic_boy_speak_anim.webp; sourceTree = "<group>"; };
3E4DC8992F580A46003070EC /* live_link_mic_girl_speak_anim.webp */ = {isa = PBXFileReference; lastKnownFileType = file; path = live_link_mic_girl_speak_anim.webp; sourceTree = "<group>"; };
3E50054D2F556DC300058145 /* FUSLiveRTCData.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FUSLiveRTCData.swift; sourceTree = "<group>"; };
842ED23FEE639B8A5B65A322 /* Pods-FUSShowRoomModule.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FUSShowRoomModule.debug.xcconfig"; path = "Target Support Files/Pods-FUSShowRoomModule/Pods-FUSShowRoomModule.debug.xcconfig"; sourceTree = "<group>"; };
97F831FBE9C41BC899CF9232 /* Pods-FUSShowRoomModule.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-FUSShowRoomModule.release.xcconfig"; path = "Target Support Files/Pods-FUSShowRoomModule/Pods-FUSShowRoomModule.release.xcconfig"; sourceTree = "<group>"; };
......@@ -7019,6 +7023,8 @@
00E6CE272F56F89F00B63797 /* img_liveroom_input_linkmic_waiting_animation.webp */,
00E6CE1A2F56E36500B63797 /* live_pk_host_win_draw_reward_type1_anim.webp */,
00E6CE1B2F56E36500B63797 /* live_pk_host_win_draw_reward_type2_anim.webp */,
3E4DC8982F580A46003070EC /* live_link_mic_boy_speak_anim.webp */,
3E4DC8992F580A46003070EC /* live_link_mic_girl_speak_anim.webp */,
BEF675EC2C6B156500A670FB /* live_linkmic_bgImg.png */,
BEF675ED2C6B156500A670FB /* live_pk_background_image.png */,
00A1DCF62CA6B2C50000541F /* liveRoom_patAudience_pat_animate.webp */,
......@@ -7585,6 +7591,8 @@
BED65B742C5CE68700668116 /* FUSFilterItemCell.xib in Resources */,
BED65B8D2C5CE6EA00668116 /* FUSLiveTreasureBoxGrabView.xib in Resources */,
BEF6777B2C6B156600A670FB /* PK_Win_Animation_46@2x.png in Resources */,
3E4DC89A2F580A46003070EC /* live_link_mic_boy_speak_anim.webp in Resources */,
3E4DC89B2F580A46003070EC /* live_link_mic_girl_speak_anim.webp in Resources */,
BEF6779D2C6B156600A670FB /* pk_win_streak_2@3x.png in Resources */,
BEF677932C6B156600A670FB /* PK_Win_Animation_70@2x.png in Resources */,
BEF677372C6B156600A670FB /* PK_Start_Animation_4@2x.png in Resources */,
......
......@@ -2370,6 +2370,32 @@ typedef NS_ENUM(NSInteger, FUSStreamState) {
self.pipController.videoGravity = AVLayerVideoGravityResizeAspectFill;
self.pipController.delegate = self;
[self.pipController prepareWithCompletion:nil];
CGRect rect = FUSSwiftLiveHelper.shared.pkHelper.ocViewModel.currentPKState == FFPKStateNone ? CGRectMake(0, 0, UIView.fus_screenW, UIView.fus_screenW * 16 / 9) : CGRectMake(0, 0, UIView.fus_screenW, UIView.fus_screenW * 16 / 18);
[self setPictureInPictureVideoFrame:rect];
__weak typeof(self) weakSelf = self;
self.playView.playTypeDidChangedHandler = ^(FUSStreamPlayViewPlayType type) {
switch (type) {
case FUSStreamPlayViewPlayTypeNewPK:
{
CGRect rect = CGRectMake(0, 0, UIView.fus_screenW, UIView.fus_screenW * 16 / 18);
[weakSelf setPictureInPictureVideoFrame:rect];
break;
}
case FUSStreamPlayViewPlayTypePK:
case FUSStreamPlayViewPlayTypeNormal:
case FUSStreamPlayViewPlayTypeLinkMic:
[weakSelf setPictureInPictureVideoFrame:weakSelf.view.bounds];
break;
default:
break;
}
};
}
self.pipController.contentView = self.playView;
}
......
......@@ -20,7 +20,7 @@
//
// VELPictureInPictureController.h
// VELLiveDemo
//
//
// Created by Volcano Engine Team on 2023/08/30.
//
// Copyright (c) 2023/08/30 Beijing Volcano Engine Technology Ltd.
......@@ -31,8 +31,8 @@
#import <AVKit/AVKit.h>
#import <AVFoundation/AVFoundation.h>
//#import <VELCommon/VELCommon.h>
// 是否开启 iOS 15 以下画中画支持
// 会调用未公开 API, 可能会有审核风险,请业务自行决定是否开启
// 是否开启 iOS 15 以下画中画支持
// 会调用未公开 API, 可能会有审核风险,请业务自行决定是否开启
#define VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15 1
#ifndef VELPipLog
......@@ -52,27 +52,27 @@ NS_ASSUME_NONNULL_BEGIN
@class VELPictureInPictureController;
@protocol VELPictureInPictureDelegate <NSObject>
@optional
// 画中画准备完成
// 画中画准备完成
- (void)pictureInPictureIsReady:(VELPictureInPictureController *)pictureInPicture;
// 画中画将要开始
// 画中画将要开始
- (void)pictureInPictureWillStart:(VELPictureInPictureController *)pictureInPicture;
// 画中画已经开始
// 画中画已经开始
- (void)pictureInPictureDidStart:(VELPictureInPictureController *)pictureInPicture;
// 画中画启动失败
// 画中画启动失败
- (void)pictureInPicture:(VELPictureInPictureController *)pictureInPicture
failedToStartWithError:(NSError *)error;
// 画中画将要结束
// 画中画将要结束
- (void)pictureInPictureWillStop:(VELPictureInPictureController *)pictureInPicture isUserStop:(BOOL)isUserStop;
// 画中画已经结束
// 画中画已经结束
- (void)pictureInPictureDidStop:(VELPictureInPictureController *)pictureInPicture isUserStop:(BOOL)isUserStop;
// 画中画将要结束,需要重新布局原来的画面
// 如果没有实现当前代理,内部会自动布局,contentView 和其父视图大小完全一致
// 画中画将要结束,需要重新布局原来的画面
// 如果没有实现当前代理,内部会自动布局,contentView 和其父视图大小完全一致
- (void)pictureInPicture:(VELPictureInPictureController *)pictureInPicture
restoreUserInterfaceWithCompletionHandler:(void (^)(BOOL restored))completionHandler
isUserStop:(BOOL)isUserStop;
......@@ -101,97 +101,98 @@ typedef NS_ENUM(NSInteger, VELPictureInPictureType) {
VELPictureInPictureTypeContentSourceAVPlayer API_AVAILABLE(ios(15.0)) = 2,
#endif
VELPictureInPictureTypeContentSource API_AVAILABLE(ios(15.0)) = 3,
// 自动根据系统选择
// 自动根据系统选择
VELPictureInPictureTypeAuto = 4
};
@class VELPictureInPictureController;
// 画中画准备完成回调
// 画中画准备完成回调
typedef void (^VELPipPrepareCompletionBlock)(VELPictureInPictureController * pipController, NSError *_Nullable error);
// 基类,主要处理不同场景下的相同业务逻辑
// 基类,主要处理不同场景下的相同业务逻辑
@interface VELPictureInPictureController : NSObject
// 需要小窗的视图
// 需要小窗的视图
@property (nonatomic, weak, nullable) UIView *contentView;
// contentView 所在的容器
// contentView 所在的容器
@property (nonatomic, weak, nullable) UIViewController *contentController;
// 如果配置了 contentController 并且这个值设置为 YES。
// 会在画中画启动的时候,把控制器隐藏。画中画结束后,自动展示该控制器
// 自动隐藏 contentController 默认 NO
// 如果配置了 contentController 并且这个值设置为 YES。
// 会在画中画启动的时候,把控制器隐藏。画中画结束后,自动展示该控制器
// 自动隐藏 contentController 默认 NO
@property (nonatomic, assign) BOOL autoHideContentController;
// 视频在 contentView 中的尺寸布局,用来存放画中画占位视图,如果不设置,内部自动计算
// 主要用于视频在非填充模式下,会有黑边,再设置占位视图的时候,尺寸大小不正确会有 UI 突变的问题
// 默认取 contentView.frame
// 视频在 contentView 中的尺寸布局,用来存放画中画占位视图,如果不设置,内部自动计算
// 主要用于视频在非填充模式下,会有黑边,再设置占位视图的时候,尺寸大小不正确会有 UI 突变的问题
// 默认取 contentView.frame
@property (nonatomic, assign) CGRect videoFrame;
// 画中画占位视图,可以在画中画启动后,自行重新布局,内部初始会使用 contentView 的尺寸
// 画中画占位视图,可以在画中画启动后,自行重新布局,内部初始会使用 contentView 的尺寸
@property (nonatomic, strong, readonly) UIView *pipHolderView;
// 代理回调
// 代理回调
@property (nonatomic, weak) id <VELPictureInPictureDelegate> delegate;
// 画中画填充模式
// 默认: AVLayerVideoGravityResize
// 画中画填充模式
// 默认: AVLayerVideoGravityResize
@property (nonatomic, copy) AVLayerVideoGravity videoGravity;
// 进入后台时,自动弹出小窗, 默认 YES
// 如果配置了 YES,画中画配置完成后,会在后台持续循环播放黑帧视频,App 进入后台后,会自动弹出小窗
// 进入后台时,自动弹出小窗, 默认 YES
// 如果配置了 YES,画中画配置完成后,会在后台持续循环播放黑帧视频,App 进入后台后,会自动弹出小窗
@property (nonatomic, assign) BOOL canStartPictureInPictureAutomaticallyFromInline API_AVAILABLE(ios(14.2));
// 是否可以调用 start 方法开启画中画
// 是否可以调用 start 方法开启画中画
@property (nonatomic, assign, readonly) BOOL canStartPictureInPicture;
// 在手动启动画中画失败后,自动降级重试, 默认 NO
// 如果开启,iOS 15 及以上系统的降级顺序是 ContentSource -> AVPlayerViewController -> AVPlayer
// 如果开启,iOS 15 及以上系统的降级顺序是 AVPlayerViewController -> AVPlayer
// 在手动启动画中画失败后,自动降级重试, 默认 NO
// 如果开启,iOS 15 及以上系统的降级顺序是 ContentSource -> AVPlayerViewController -> AVPlayer
// 如果开启,iOS 15 及以上系统的降级顺序是 AVPlayerViewController -> AVPlayer
@property (nonatomic, assign, readonly) BOOL downgradeWhenStartFailed;
// 画中画状态
// 画中画状态
@property (atomic, assign, readonly) VELPictureInPictureState state;
// 画中画类型
// 画中画类型
@property (atomic, assign, readonly) VELPictureInPictureType type;
// 初始化画中画控制器
// 初始化画中画控制器
- (instancetype)initWithType:(VELPictureInPictureType)type NS_DESIGNATED_INITIALIZER;
// 初始化画中画控制器
// 初始化画中画控制器
- (instancetype)initWithContentView:(UIView *)contentView NS_DESIGNATED_INITIALIZER;
// 初始化画中画控制器
// 初始化画中画控制器
- (instancetype)initWithType:(VELPictureInPictureType)type contentView:(nullable UIView *)contentView NS_DESIGNATED_INITIALIZER;
// 准备
// 准备
- (void)prepareWithCompletion:(nullable VELPipPrepareCompletionBlock)completion;
// 主动开启画中画
// 注意:只有 App 有声音播放时,才能正常启动画中画
// 主动开启画中画
// 注意:只有 App 有声音播放时,才能正常启动画中画
- (void)startPictureInPicture;
// 停止当前正在展示的画中画,当进入后台,还会自动打开画中画
// 停止当前正在展示的画中画,当进入后台,还会自动打开画中画
- (void)stopPictureInPicture;
// 销毁当前画中画,不会再推入后台后自动开启画中画
// 销毁当前画中画,不会再推入后台后自动开启画中画
- (void)destroyPictureInPicture;
// iOS 15 && type == VELPictureInPictureTypeContentSource 时使用
// iOS 15 && type == VELPictureInPictureTypeContentSource 时使用
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer;
// iOS 15 && type == VELPictureInPictureTypeContentSource 时使用
// iOS 15 && type == VELPictureInPictureTypeContentSource 时使用
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
// 销毁画中画所有实例,调用此方法后,不会在后台的时候自动弹出画中画,可以释放一部分内存
// 销毁画中画所有实例,调用此方法后,不会在后台的时候自动弹出画中画,可以释放一部分内存
+ (void)destroyPictureInPicture;
// 是否支持画中画
// 是否支持画中画
+ (BOOL)isPictureInPictureSupported;
// 当前是否已经有画中画在播放
// 当前是否已经有画中画在播放
+ (BOOL)isPictureInPictureStarted;
@end
......
......@@ -20,7 +20,7 @@
//
// VELPictureInPictureController.m
// VELLiveDemo
//
//
// Created by Volcano Engine Team on 2023/08/30.
//
// Copyright (c) 2023/08/30 Beijing Volcano Engine Technology Ltd.
......@@ -81,6 +81,22 @@ FOUNDATION_EXTERN_INLINE void vel_sync_in_main_queue(dispatch_block_t block) {
@property (nonatomic, copy) AVLayerVideoGravity videoGravity;
- (void)rebuildSamplelayer;
@end
// MARK: - interface VELPipContentSourceProvider
@interface VELPipContentSourceProvider : NSObject
@property (nonatomic, assign) CGSize videoSize;
@property (nonatomic, strong) VELPipSampleBufferView *sampleBufferView;
@property (nonatomic, strong) AVSampleBufferDisplayLayer *sampleDisplayLayer;
@property (nonatomic, assign) CVPixelBufferRef darkPixelBuffer;
@property (nonatomic, strong) dispatch_source_t darkFrameTimer;
@property (nonatomic, strong) dispatch_queue_t darkFrameQueue;
@property (nonatomic, copy) AVLayerVideoGravity videoGravity;
@property (nonatomic, copy) void (^sampleDisplayLayerChanged)(VELPipContentSourceProvider *provider);
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)destroy;
@end
static BOOL vel_pip_is_started = NO;
// MARK: interface VELPictureInPictureController
@interface VELPictureInPictureController ()
......@@ -194,7 +210,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
self.type = type;
self.contentView = contentView;
self.originPipType = type;
self.videoGravity = AVLayerVideoGravityResizeAspectFill;//AVLayerVideoGravityResize;
self.videoGravity = AVLayerVideoGravityResizeAspect;//AVLayerVideoGravityResize;
self.autoHideContentController = NO;
if (@available(iOS 14.2, *)) {
self.canStartPictureInPictureAutomaticallyFromInline = NO;
......@@ -205,19 +221,22 @@ static VELPictureInPictureController *vel_instance_pip = nil;
VELPipLog(LOG_TAG, @"pip dealloc %@", @(self.type));
}
// 准备
// 准备
- (void)prepareWithCompletion:(VELPipPrepareCompletionBlock)completion {
// NSAssert(self.delegate != nil, @"delegate cannot be nil");
self.state = VELPictureInPictureStatePreparing;
self.isRestoreUserInterface = NO;
self.prepareCompletionBlock = completion;
{
// 这部分代码,可根据自身业务需求进行配置
// 例如是否需要和别的 App 同时播放音频
NSError *error = nil;
AVAudioSessionCategoryOptions categoryOptions = AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetoothA2DP;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:categoryOptions error:&error];
[[AVAudioSession sharedInstance] setActive:YES error:nil];
dispatch_async(dispatch_get_global_queue(0, 0), ^{
// 这部分代码,可根据自身业务需求进行配置
// 例如是否需要和别的 App 同时播放音频
NSError *error = nil;
AVAudioSessionCategoryOptions categoryOptions = AVAudioSessionCategoryOptionMixWithOthers | AVAudioSessionCategoryOptionAllowBluetooth | AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionAllowBluetoothA2DP;
[[AVAudioSession sharedInstance] setCategory:AVAudioSessionCategoryPlayback withOptions:categoryOptions error:&error];
[[AVAudioSession sharedInstance] setActive:YES error:nil];
});
// if (error) {
// VELPipLog(LOG_TAG, @"set audio session playback error %@", error);
// [self notifyDeleteWithError:error];
......@@ -227,7 +246,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
[self setupPictureInPictureController];
}
// 开启画中画
// 开启画中画
- (void)startPictureInPicture {
self.needStartWhenPrepared = YES;
[self _startPictureInPicture];
......@@ -262,7 +281,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
[self.pipController startPictureInPicture];
}
// 停止画中画
// 停止画中画
- (void)stopPictureInPicture {
self.needStartWhenPrepared = NO;
#if VEL_PICTURE_IN_PICTURE_ENABLE_PIP_BELOW_IOS_15
......@@ -330,6 +349,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
return vel_pip_is_started && vel_instance_pip != nil;
}
// MARK: - Private
- (void)autoSelectType {
if (self.type != VELPictureInPictureTypeAuto) {
......@@ -471,12 +491,25 @@ static VELPictureInPictureController *vel_instance_pip = nil;
if (!CGRectEqualToRect(_videoFrame, videoFrame)) {
_videoFrame = videoFrame;
[self setupPipHolderView];
// 新增:同步更新 sourceProvider 的尺寸
if (@available(iOS 15.0, *)) {
if (self.type == VELPictureInPictureTypeContentSource
&& self.sourceProvider) {
self.sourceProvider.videoSize = videoFrame.size;
}
}
// // 2. 刷新播放状态(原逻辑保留)上周不知道怎么弄加了这个自动偶尔成功 这周突然又不行了
// if (@available(iOS 15.0, *)) {
// [self.pipController invalidatePlaybackState];
// }
}
}
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
if (![_videoGravity isEqualToString:videoGravity]) {
_videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity.copy;
_videoGravity = videoGravity;//videoGravity.copy;
[self setupVideoGravity];
}
}
......@@ -738,7 +771,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
}
/// add holder
[self generatedPlacdholderImageViewFor:self.contentView];
// 如果想要做特效转场,自行实现
// 如果想要做特效转场,自行实现
self.storeContentController = self.contentController;
if (self.contentController.presentingViewController) {
[self.contentController.presentingViewController dismissViewControllerAnimated:NO
......@@ -769,7 +802,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
}
[self generatedPlacdholderImageViewFor:self.contentView];
VELPipLog(LOG_TAG, @"show content controller");
// 如果想要做特效转场,自行实现
// 如果想要做特效转场,自行实现
UIViewController *topVC = [UIViewController vel_pipTopViewController];
if ([topVC isKindOfClass:UINavigationController.class]) {
[(UINavigationController *)topVC pushViewController:self.contentController
......@@ -887,7 +920,7 @@ static VELPictureInPictureController *vel_instance_pip = nil;
}
}
// 宽高误差在 16 像素的间距内,都默认是全屏展示
// 宽高误差在 16 像素的间距内,都默认是全屏展示
- (BOOL)isFullScreenLayout {
return ABS(self.contentOriginFrame.size.width - self.contentOriginSuperView.bounds.size.width) < 16
&& ABS(self.contentOriginFrame.size.height - self.contentOriginSuperView.bounds.size.height) < 16;
......@@ -966,21 +999,6 @@ restoreUserInterfaceForPictureInPictureStopWithCompletionHandler:(void (^)(BOOL
#endif
@end
// MARK: - interface VELPipContentSourceProvider
@interface VELPipContentSourceProvider : NSObject
@property (nonatomic, assign) CGSize videoSize;
@property (nonatomic, strong) VELPipSampleBufferView *sampleBufferView;
@property (nonatomic, strong) AVSampleBufferDisplayLayer *sampleDisplayLayer;
@property (nonatomic, assign) CVPixelBufferRef darkPixelBuffer;
@property (nonatomic, strong) dispatch_source_t darkFrameTimer;
@property (nonatomic, strong) dispatch_queue_t darkFrameQueue;
@property (nonatomic, copy) AVLayerVideoGravity videoGravity;
@property (nonatomic, copy) void (^sampleDisplayLayerChanged)(VELPipContentSourceProvider *provider);
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer;
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer;
- (void)destroy;
@end
// MARK: - interface VELSampleBufferPlaybackDelegate
API_AVAILABLE(ios(15.0), tvos(15.0), macos(12.0)) API_UNAVAILABLE(watchos)
@interface VELSampleBufferPlaybackDelegate : NSObject <AVPictureInPictureSampleBufferPlaybackDelegate>
......@@ -1077,7 +1095,7 @@ static char kAssociatedObjectKey_pipPlaybackDelegate;
}
- (void)setSampleLayerVideoGravity:(AVLayerVideoGravity)videoGravity {
self.sourceProvider.sampleBufferView.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.sourceProvider.sampleBufferView.videoGravity = videoGravity;//videoGravity;
}
- (void)vel_enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer {
......@@ -1108,7 +1126,7 @@ static char kAssociatedObjectKey_pipContentSourcePlayerView;
[VELPipLoopPlayer initLoopPlayer];
self.contentSourcePlayerView = [[VELPipAVPlayerView alloc] init];
self.contentSourcePlayerView.playerLayer.player = VELPipLoopPlayer.player;
self.contentSourcePlayerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//AVLayerVideoGravityResize;
self.contentSourcePlayerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;//AVLayerVideoGravityResize;
self.pipHolderView = self.contentSourcePlayerView;
AVPictureInPictureControllerContentSource *contentSource = [[AVPictureInPictureControllerContentSource alloc]
initWithPlayerLayer:self.contentSourcePlayerView.playerLayer];
......@@ -1145,7 +1163,7 @@ static char kAssociatedObjectKey_pipContentSourcePlayerView;
}
- (void)setContentSourceAVPlayerLayerVideoGravity:(AVLayerVideoGravity)videoGravity {
self.contentSourcePlayerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.contentSourcePlayerView.playerLayer.videoGravity = videoGravity;//videoGravity;
}
@end
......@@ -1165,7 +1183,7 @@ static char kAssociatedObjectKey_pipAVplayerView;
[VELPipLoopPlayer initLoopPlayer];
self.playerView = [[VELPipAVPlayerView alloc] init];
self.playerView.playerLayer.player = VELPipLoopPlayer.player;
self.playerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//AVLayerVideoGravityResize;
self.playerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspect;//AVLayerVideoGravityResize;
self.pipHolderView = self.playerView;
self.pipController = [[AVPictureInPictureController alloc] initWithPlayerLayer:self.playerView.playerLayer];
......@@ -1199,7 +1217,7 @@ static char kAssociatedObjectKey_pipAVplayerView;
}
- (void)setAVPlayerLayerVideoGravity:(AVLayerVideoGravity)videoGravity {
self.playerView.playerLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.playerView.playerLayer.videoGravity = videoGravity;//videoGravity;
}
@end
......@@ -1230,7 +1248,7 @@ static char kAssociatedObjectKey_pipAVPlayerViewController;
self.playerViewController.delegate = self;
self.playerViewController.player = VELPipLoopPlayer.player;
self.playerViewController.allowsPictureInPicturePlayback = YES;
self.playerViewController.videoGravity = AVLayerVideoGravityResizeAspectFill;//AVLayerVideoGravityResize;
self.playerViewController.videoGravity = AVLayerVideoGravityResizeAspect;//AVLayerVideoGravityResize;
// self.playerViewController.showsPlaybackControls = NO; // don't set to NO
if (@available(iOS 14.0, *)) {
......@@ -1268,7 +1286,7 @@ static char kAssociatedObjectKey_pipAVPlayerViewController;
}
}
// 开启画中画
// 开启画中画
- (void)velPlayerVCStartPictureInPicture {
if (!self.velPlayerVCCanStartPictureInPicture) {
VELPipLog(LOG_TAG, @"AVPlayerViewController can not start pip state:%@, check app has audio playing?", @(self.state));
......@@ -1283,7 +1301,7 @@ static char kAssociatedObjectKey_pipAVPlayerViewController;
}
}
// 停止画中画
// 停止画中画
- (void)velPlayerVCStopPictureInPicture {
if (self.state != VELPictureInPictureStateRunning) {
VELPipLog(LOG_TAG, @"AVPlayerViewController not running");
......@@ -1327,7 +1345,7 @@ static char kAssociatedObjectKey_pipAVPlayerViewController;
}
- (void)setAVPlayerVCVideoGravity:(AVLayerVideoGravity)videoGravity {
self.playerViewController.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.playerViewController.videoGravity = videoGravity;//videoGravity;
}
@end
......@@ -1503,8 +1521,15 @@ static VELPipLoopPlayer *vel_pip_loop_player_instance = nil;
VELPipLog(LOG_TAG, @"loop player prepare player item");
NSURL *url;
if (self.landspace) {
url = [[FUSShowRoomCenterBunble bundle] URLForResource:@"fake_video_for_pk" withExtension:@"mp4"];
} else {
url = [[FUSShowRoomCenterBunble bundle] URLForResource:@"fake_video" withExtension:@"mp4"];
}
AVAsset *asset = [AVAsset assetWithURL:url];
self.playerItem = [[AVPlayerItem alloc] initWithAsset:asset];
}
- (void)setPlayerItem:(AVPlayerItem *)playerItem {
......@@ -1725,6 +1750,14 @@ static char kAssociatedObjectKey_velAutoHideControlsView;
}
- (void)vel_autoHideSampleLayerSubview:(UIView *)view {
if (self == view) {
return;
}
if (NSThread.isMainThread == false) {
FUSLogInfo(@"");
}
[self vel_autoHideSampleLayerSubview:view];
if (self.vel_autoHideSamplePlayerView) {
[self hideSamplePlayerView];
......@@ -1795,7 +1828,7 @@ static char kAssociatedObjectKey_vel_viewController;
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
_videoGravity = videoGravity.copy;
@synchronized (self) {
self.sampleLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.sampleLayer.videoGravity = videoGravity;//videoGravity;
}
}
......@@ -1808,7 +1841,7 @@ static char kAssociatedObjectKey_vel_viewController;
}
self.sampleLayer = [[AVSampleBufferDisplayLayer alloc] init];
self.sampleLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//self.videoGravity;
self.sampleLayer.videoGravity = self.videoGravity;//self.videoGravity;
self.sampleLayer.opaque = YES;
CMTimebaseRef timebase;
CMTimebaseCreateWithSourceClock(nil, CMClockGetHostTimeClock(), &timebase);
......@@ -1860,19 +1893,19 @@ static char kAssociatedObjectKey_vel_viewController;
VELPipLog(LOG_TAG, @"content source provider init");
self.videoSize = CGSizeMake(360, 480);
self.sampleBufferView = [[VELPipSampleBufferView alloc] initWithFrame:CGRectMake(0, 0, 360, 480)];
self.sampleBufferView.videoGravity = AVLayerVideoGravityResizeAspectFill;//self.videoGravity;
self.sampleBufferView.videoGravity = AVLayerVideoGravityResizeAspect;//self.videoGravity;
self.sampleDisplayLayer = self.sampleBufferView.sampleLayer;
[self startDarkFrameTimer];
}
- (void)enqueuePixelBuffer:(CVPixelBufferRef)pixelBuffer {
// 外部送视频帧,停止内部黑帧
// 外部送视频帧,停止内部黑帧
[self stopDarkFrameTimer];
[self _enqueuePixelBuffer:pixelBuffer];
}
- (void)enqueueSampleBuffer:(CMSampleBufferRef)sampleBuffer {
// 外部送视频帧,停止内部黑帧
// 外部送视频帧,停止内部黑帧
[self stopDarkFrameTimer];
[self _enqueueSampleBuffer:sampleBuffer needRetain:YES];
}
......@@ -1966,10 +1999,14 @@ static char kAssociatedObjectKey_vel_viewController;
return;
}
@synchronized (self) {
// 检查layer是否有效并且是否在视图层级中
if (!_sampleDisplayLayer || !_sampleDisplayLayer.superlayer) {
return;
}
[_sampleDisplayLayer enqueueSampleBuffer:sampleBuffer];
if (_sampleDisplayLayer.status == AVQueuedSampleBufferRenderingStatusFailed) {
[_sampleDisplayLayer flush];
// 后台唤醒,重启渲染
// 后台唤醒,重启渲染
if (-11847 == _sampleDisplayLayer.error.code) {
VELPipLog(LOG_TAG, @"content source provider rebuild sampleLayer");
[self.sampleBufferView rebuildSamplelayer];
......@@ -1990,7 +2027,7 @@ static char kAssociatedObjectKey_vel_viewController;
// MARK: - Setter
- (void)setVideoGravity:(AVLayerVideoGravity)videoGravity {
_videoGravity = videoGravity.copy;
self.sampleDisplayLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;//videoGravity;
self.sampleDisplayLayer.videoGravity = videoGravity;//videoGravity;
}
- (void)setVideoSize:(CGSize)videoSize {
......
......@@ -25,6 +25,9 @@ NS_ASSUME_NONNULL_BEGIN
// 直播等级
@property (nonatomic, copy) NSString *lev;
// 性别
@property (nonatomic, copy) NSString *sex;
/// 用户贡献萤火
@property (nonatomic, copy) NSString *userOffer;
......
......@@ -21,16 +21,12 @@ NS_ASSUME_NONNULL_BEGIN
@property (nullable, nonatomic, strong) FUSLinkMicroModel *model;
@property (nonatomic, strong) UIButton *coverBtn;
@property (nonatomic, assign) BOOL cancelAnim; //不需要动画
@property (nonatomic, weak) id<FUSLinkMicroHeaderViewDelegate> delegate;
@property (nonatomic, copy) void(^completeRadarAnimationBlock)(void);
/// 加号的动画
- (void)fus_changePlusBtnAnim:(BOOL)ifAnim;
/// 改变静音动态
/// @param mute 是否静音
......@@ -45,16 +41,13 @@ NS_ASSUME_NONNULL_BEGIN
/// 停止所有动画
- (void)fus_stopAllAnimation;
/// 加号动画
- (void)fus_startPlusAnim;
/// 开始送礼动画
/// @param url 礼物url
- (void)fus_startAnimWithImgUrl:(NSString *)url;
- (void)fus_updateFrame;
/// 取消红点
//- (void)fus_refreshUnreadRedPoint;
@end
......
......@@ -14,17 +14,18 @@
@interface FUSLinkMicroHeaderView ()
@property (nonatomic, strong) FUSRadarAnimImageViewView *faceView;
/// UIImageView
@property (nonatomic, strong) YYAnimatedImageView *speakingAnimImageView;
@property (nonatomic, strong) UIView *unreadPointView;
@property (nonatomic, strong) UIImageView *richImageView;
@property (nonatomic, strong) FUSRichIconView *faceView;
@property (nonatomic, strong) UIImageView *muteImageView;
@property (nonatomic, copy) NSString *lev;
@property (nonatomic, strong) CBAutoScrollLabel *nicknameLabel;
@property (nonatomic, strong) UIView *gradeView;
@property (nonatomic, strong) UILabel *plusLb;
@property (nonatomic, strong) UILabel *gradeLabel;
// 是否完成一轮动画
@property (nonatomic, assign) BOOL isAminCompleted;
......@@ -32,11 +33,7 @@
// 送礼动画的图片
@property (nonatomic, strong) UIImageView *animImgView;
@property (nonatomic, strong) UIImage *coverNormalImage;
@property (nonatomic, strong) UIImage *coverHighlightedImage;
@property (nonatomic, strong) UIImage *coverSelectedImage;
@property (nonatomic, strong) UIButton *coverBtn;
......@@ -49,32 +46,17 @@
self = [super initWithFrame:frame];
if (self) {
_faceView = [[FUSRadarAnimImageViewView alloc] initWithFrame:CGRectMake(0, 0, self.height - 10, self.height - 10)];
_faceView.center = CGPointMake(self.width/2.0, self.height/2.0);
_faceView.userInteractionEnabled = NO;
_faceView.raderColor = [UIColor whiteColor];
_faceView.maxRadarScale = 1.5;
_faceView.totalRadarRingCount = 4;
_faceView.radarSpeed = 1.5;
_coverBtn = [UIButton buttonWithType:UIButtonTypeCustom];
_coverBtn.frame = self.bounds;
_coverBtn.imageView.contentMode = UIViewContentModeScaleAspectFit;
_plusLb = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, 20, 10)];
_plusLb.text = @"+1";
_plusLb.font = [UIFont systemFontOfSize:10];
_plusLb.textColor = [UIColor colorWithHex:@"#00A0E9"];
_plusLb.centerX = self.centerX;
_plusLb.alpha = 0;
_isAminCompleted = YES;
_faceView = [[FUSRichIconView alloc] initWithFrame:CGRectMake(10, 0, 32, 32)];
_faceView.centerY = self.height / 2.0f;
self.speakingAnimImageView = [[YYAnimatedImageView alloc] initWithFrame:CGRectMake(0, 0, 65, 65)];
self.speakingAnimImageView.center = self.faceView.center;
self.speakingAnimImageView.hidden = YES;
[self addSubview:self.speakingAnimImageView];
[self addSubview:_faceView];
[self addSubview:_plusLb];
_muteImageView = [[UIImageView alloc] initWithFrame:_faceView.frame];
_muteImageView.center = _muteImageView.center;
_muteImageView.contentMode = UIViewContentModeCenter;
......@@ -85,17 +67,34 @@
_muteImageView.hidden = YES;
[self addSubview:_muteImageView];
_richImageView = [[UIImageView alloc] initWithFrame:CGRectMake(0, 0, 28, 14)];
_richImageView.contentMode = UIViewContentModeScaleAspectFit;
_richImageView.centerX = _faceView.centerX;
_richImageView.centerY = _faceView.bottom;
[self addSubview:_richImageView];
CGFloat nicknameLabelX = self.faceView.right + 7;
self.nicknameLabel = [[CBAutoScrollLabel alloc] initWithFrame:CGRectMake(nicknameLabelX, self.faceView.y + 2, self.width - nicknameLabelX, 15)];
self.nicknameLabel.font = [UIFont fus_themeBoldFont:11];
self.nicknameLabel.fadeLength = 0;
self.nicknameLabel.textColor = [UIColor whiteColor];
self.nicknameLabel.shadowColor = [UIColor colorWithHex:@"000000" alpha:0.3];;
self.nicknameLabel.shadowOffset = CGSizeMake(0.8, 0.5);
[self addSubview:self.nicknameLabel];
self.gradeView = [[UIView alloc] initWithFrame:CGRectMake(self.faceView.centerX, self.nicknameLabel.bottom + 2, 40, 10)];
self.gradeView.backgroundColor = [UIColor colorWithWhite:0 alpha:0.4];
self.gradeView.layer.cornerRadius = 5;
self.gradeView.layer.masksToBounds = YES;
[self insertSubview:self.gradeView belowSubview:self.speakingAnimImageView];
self.gradeLabel = [[UILabel alloc] initWithFrame:CGRectMake(nicknameLabelX, 0, 15, 10)];
self.gradeLabel.font = [UIFont fus_themeBoldFont:7];
self.gradeLabel.textColor = [UIColor whiteColor];
[self addSubview:self.gradeLabel];
_animImgView = [[UIImageView alloc] initWithFrame:_faceView.frame];
[self addSubview:_animImgView];
[self addSubview:_coverBtn];
[_coverBtn addTarget:self action:@selector(onClickAction:) forControlEvents:UIControlEventTouchUpInside];
self.coverBtn = [UIButton buttonWithType:UIButtonTypeCustom];
[self.coverBtn addTarget:self action:@selector(onClickAction:) forControlEvents:UIControlEventTouchUpInside];
[self addSubview:self.coverBtn];
}
return self;
}
......@@ -108,218 +107,152 @@
}
- (void)setModel:(FUSLinkMicroModel *)model{
BOOL modelHasChanged = _model.uid.integerValue != model.uid.integerValue;
_model = model;
NSInteger level = model.level.integerValue;
if (!model) {
[self.coverBtn setImage:nil forState:UIControlStateNormal];
[self.coverBtn setImage:nil forState:UIControlStateHighlighted];
[self.coverBtn setImage:nil forState:UIControlStateSelected];
self.faceView.imageView.image = nil;
_richImageView.image = nil;
_muteImageView.hidden = YES;
self.transform = CGAffineTransformMakeScale(1.f, 1.f);
[self fus_stopRadarAnimation];
return;
if (model.sex.integerValue == 1) {
self.speakingAnimImageView.image = [FUSShowRoomCenterBunble webpImageName:@"live_link_mic_boy_speak_anim"];
} else {
self.speakingAnimImageView.image = [FUSShowRoomCenterBunble webpImageName:@"live_link_mic_girl_speak_anim"];
}
// 是否是开麦状态
[self fus_changeMuteStatus:!model.isOpenMic];
BOOL enlargeFace = [[[NSUserDefaults standardUserDefaults] objectForKey:FUSLiveUDKeys.fus_LIVE_LINKMIC_FACE_ENLARGE_SWITCH] boolValue];
if (_cancelAnim) {
enlargeFace = NO;
}
if (model.uid.integerValue == 0) {
self.faceView.imageView.image = nil;
if (self.coverNormalImage == nil) {
self.coverNormalImage = [FUSShowRoomCenterBunble imageNamed:model.face];
}
if (modelHasChanged) {
if (self.coverHighlightedImage == nil) {
self.coverHighlightedImage = [FUSShowRoomCenterBunble imageNamed:model.hightlightImgUrl];
NSInteger level = 0;
if (model.isHide.boolValue) {
level = -1;
}
if (self.coverSelectedImage == nil) {
self.coverSelectedImage = [FUSShowRoomCenterBunble animatedImageNamed:@"live_link_micro_request_" duration:1];
if (![NSDictionary isNullWithDictionary:model.privilege]) {
if ([model.privilege[@"richPower"] boolValue]) {
level = model.level.integerValue;
}
}
[self.coverBtn setImage:self.coverNormalImage forState:UIControlStateNormal];
[self.coverBtn setImage:self.coverHighlightedImage forState:UIControlStateHighlighted];
[self.coverBtn setImage:self.coverSelectedImage forState:UIControlStateSelected];
[self.coverBtn setImageEdgeInsets:UIEdgeInsetsMake(5, 5, 5, 5)];
_richImageView.image = nil;
self.transform = CGAffineTransformMakeScale(1.f, 1.f);
[self fus_stopRadarAnimation];
}else{
[self.coverBtn setImageEdgeInsets:UIEdgeInsetsMake(0, 0, 0, 0)];
// 头像
[self.faceView.imageView setLiveFaceWebImageWithSubURLString:model.face placeholder:nil];
[self.coverBtn setImage:nil forState:UIControlStateNormal];
[self.coverBtn setImage:nil forState:UIControlStateHighlighted];
[self.coverBtn setImage:nil forState:UIControlStateSelected];
[self.faceView fus_setupLiveIconWithFacePath:model.face level:level];
self.nicknameLabel.text = model.nickname;
}
[self.nicknameLabel scrollLabelIfNeeded];
self.gradeLabel.text = model.userOffer;
[self.gradeLabel sizeToFit];
// 是否是开麦状态
[self fus_changeMuteStatus:!model.isOpenMic];
NSLog(@"suolong:model.isSpeaking=== %ld", model.isSpeaking);
if (model.isSpeaking && _isAminCompleted && model.isOpenMic && _muteImageView.hidden) {
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fus_stopHeaderViewRadarAnimation) object:self];
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fus_stopSpeakingAnimation) object:self];
[self performSelector:@selector(fus_stopHeaderViewRadarAnimation) withObject:self afterDelay:2];
_isAminCompleted = NO;
model.oldSpeaking = YES;
if (enlargeFace) {
self.transform = CGAffineTransformMakeScale(1.2f, 1.2f);
}
[self fus_startRadarAnimation];
[self fus_startSpeakingAnimation];
}else if(_isAminCompleted){
model.oldSpeaking = NO;
self.transform = CGAffineTransformMakeScale(1.f, 1.f);
[self fus_stopRadarAnimation];
// [NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fus_stopHeaderViewRadarAnimation) object:self];
}
// 富豪等级
if (![NSDictionary isNull:model.privilege]) {
if ([model.privilege[@"richPower"] boolValue]) {
_richImageView.hidden = NO;
UIImage *vipImage = [UIImage fus_imageWithLevel:model.level.integerValue];;
_richImageView.image = vipImage;
if (vipImage) {
_richImageView.width = ceil(_richImageView.height / vipImage.size.height * vipImage.size.width);
}
}else{
_richImageView.hidden = YES;
_richImageView.image = nil;
return;
}
}
BOOL sameLev = NO;
if (_lev.integerValue == level) {
sameLev = YES;
[self fus_stopSpeakingAnimation];
[NSObject cancelPreviousPerformRequestsWithTarget:self selector:@selector(fus_stopSpeakingAnimation) object:self];
}
_lev = model.level;
[self fus_updateFrame:modelHasChanged];
}
- (void)layoutSubviews{
[super layoutSubviews];
_faceView.frame = CGRectMake(0, 0, CGRectGetHeight(self.bounds) - 10, CGRectGetHeight(self.bounds) - 10);
_faceView.center = CGPointMake(self.width/2.0, self.height/2.0);
_plusLb.centerX = self.centerX;
_muteImageView.frame = _faceView.frame;
_muteImageView.layer.cornerRadius = _muteImageView.height/2.0;
_richImageView.centerX = _faceView.centerX;
_richImageView.centerY = _faceView.bottom;
[super layoutSubviews];
[self fus_updateFrame:YES];
}
_animImgView.frame = _faceView.frame;
_coverBtn.frame = self.bounds;
- (void)fus_updateFrame {
[self fus_updateFrame:YES];
}
- (void)fus_updateFrame{
[self layoutSubviews];
- (void)fus_updateFrame:(BOOL)hasChanged {
if (hasChanged) {
_faceView.frame = CGRectMake(10, 0, 32, 32);
_faceView.centerY = self.height / 2.0f;
CGFloat nicknameLabelX = self.faceView.right + 7;
self.nicknameLabel.frame = CGRectMake(nicknameLabelX, self.faceView.y + 2, self.width - nicknameLabelX, 15);
self.gradeLabel.x = nicknameLabelX;
}
self.speakingAnimImageView.center = self.faceView.center;
_muteImageView.frame = _faceView.frame;
_muteImageView.layer.cornerRadius = _muteImageView.height/2.0;
[self.gradeLabel sizeToFit];
self.gradeLabel.height = 10;
self.gradeView.width = self.gradeLabel.right + 5 - self.faceView.centerX;
self.gradeLabel.centerY = self.gradeView.centerY;
_animImgView.frame = _faceView.frame;
self.coverBtn.frame = self.bounds;
}
- (void)fus_stopHeaderViewRadarAnimation{
_isAminCompleted = YES;
if (_completeRadarAnimationBlock) {
_completeRadarAnimationBlock();
}
_isAminCompleted = YES;
if (_completeRadarAnimationBlock) {
_completeRadarAnimationBlock();
}
[self fus_stopAllAnimation];
}
- (void)fus_startRadarAnimation{
if (!_muteImageView.hidden) {
[self fus_stopRadarAnimation];
return;
}
[self.faceView fus_startRadarAnimation];
- (void)fus_startSpeakingAnimation{
if (!_muteImageView.hidden) {
[self fus_stopSpeakingAnimation];
return;
}
self.speakingAnimImageView.hidden = NO;
[self.speakingAnimImageView startAnimating];
}
- (void)fus_stopRadarAnimation{
[self.faceView fus_stopRadarAnimation];
- (void)fus_stopSpeakingAnimation{
self.speakingAnimImageView.hidden = YES;
[self.speakingAnimImageView stopAnimating];
}
- (void)fus_stopAllAnimation{
_isAminCompleted = YES;
_model.oldSpeaking = NO;
self.transform = CGAffineTransformMakeScale(1.f, 1.f);
[self fus_stopRadarAnimation];
_isAminCompleted = YES;
_model.oldSpeaking = NO;
[self fus_stopSpeakingAnimation];
}
- (void)fus_refreshUnreadRedPoint{
NSInteger count = [[[NSUserDefaults standardUserDefaults] objectForKey:FUSLiveUDKeys.fus_LIVE_NEW_LINKMIC_UNREAD] integerValue];
if (count > 0 && _model.uid.intValue == 0) {
_unreadPointView.hidden = NO;
}else{
_unreadPointView.hidden = YES;
}
}
- (void)fus_changePlusBtnAnim:(BOOL)ifAnim{
_coverBtn.selected = ifAnim;
}
/// 设置静音状态
/// @param mute 是否静音
- (void)fus_changeMuteStatus:(BOOL)mute{
if (!mute) {
_muteImageView.hidden = YES;
}else{
_muteImageView.hidden = NO;
[self fus_stopHeaderViewRadarAnimation];
[self fus_stopAllAnimation];
}
}
- (void)fus_startPlusAnim{
_plusLb.y = 0;
_plusLb.alpha = 1;
[UIView animateWithDuration:1 animations:^{
self.plusLb.y -= 30;
self.plusLb.alpha = 0;
}];
if (!mute) {
_muteImageView.hidden = YES;
}else{
_muteImageView.hidden = NO;
[self fus_stopHeaderViewRadarAnimation];
[self fus_stopAllAnimation];
}
}
- (void)fus_startAnimWithImgUrl:(NSString *)url{
_animImgView.image = nil;
// [_animImgView.layer removeAllAnimations];
[_animImgView setWebImageWithSubURLString:url];
CAKeyframeAnimation *scaleAnimation = [CAKeyframeAnimation animationWithKeyPath:@"transform.scale"];
scaleAnimation.values = @[@1,@1.5,@1,@1.3,@1.5];
scaleAnimation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
scaleAnimation.duration = 1.5;
CAKeyframeAnimation *opacityAnim = [CAKeyframeAnimation animationWithKeyPath:@"opacity"];
opacityAnim.values = @[@1,@1,@1,@1,@1,@0.75,@0.7,@0.55,@0];
opacityAnim.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
opacityAnim.duration = 1.5;
CAAnimationGroup *animationGroup = [[CAAnimationGroup alloc] init];
animationGroup.animations = @[opacityAnim,scaleAnimation];
animationGroup.duration = 1.5f;
animationGroup.fillMode = kCAFillModeForwards;
animationGroup.removedOnCompletion = NO;
[_animImgView.layer addAnimation:animationGroup forKey:nil];
}
_animImgView.image = nil;
- (void)dealloc {
FUSLogInfo(@"%s",__func__);
[_animImgView setWebImageWithSubURLString:url placeholder:nil];
CAKeyframeAnimation *scaleAnimation = [CAKeyframeAnimation animationWithKeyPath:@"transform.scale"];
scaleAnimation.values = @[@1,@1.5,@1,@1.3,@1.5];
scaleAnimation.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
scaleAnimation.duration = 1.5;
CAKeyframeAnimation *opacityAnim = [CAKeyframeAnimation animationWithKeyPath:@"opacity"];
opacityAnim.values = @[@1,@1,@1,@1,@1,@0.75,@0.7,@0.55,@0];
opacityAnim.timingFunction = [CAMediaTimingFunction functionWithName:kCAMediaTimingFunctionLinear];
opacityAnim.duration = 1.5;
CAAnimationGroup *animationGroup = [[CAAnimationGroup alloc] init];
animationGroup.animations = @[opacityAnim,scaleAnimation];
animationGroup.duration = 1.5f;
animationGroup.fillMode = kCAFillModeForwards;
animationGroup.removedOnCompletion = NO;
[_animImgView.layer addAnimation:animationGroup forKey:nil];
}
@end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment