diff --git a/packages/video_player/.fvmrc b/packages/video_player/.fvmrc new file mode 100644 index 00000000000..d1669636689 --- /dev/null +++ b/packages/video_player/.fvmrc @@ -0,0 +1,3 @@ +{ + "flutter": "3.38.0" +} \ No newline at end of file diff --git a/packages/video_player/.gitignore b/packages/video_player/.gitignore new file mode 100644 index 00000000000..9e366fe3b73 --- /dev/null +++ b/packages/video_player/.gitignore @@ -0,0 +1,3 @@ + +# FVM Version Cache +.fvm/ \ No newline at end of file diff --git a/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist b/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist index 7c569640062..1dc6cf7652b 100644 --- a/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist +++ b/packages/video_player/video_player/example/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 12.0 + 13.0 diff --git a/packages/video_player/video_player/example/ios/Podfile b/packages/video_player/video_player/example/ios/Podfile index 01d4aa611bb..17adeb14132 100644 --- a/packages/video_player/video_player/example/ios/Podfile +++ b/packages/video_player/video_player/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '12.0' +# platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj index 2ab10fb9081..a003785afc3 100644 --- a/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player/example/ios/Runner.xcodeproj/project.pbxproj @@ -140,6 +140,7 @@ 97C146EC1CF9000F007C117D /* Resources */, 9705A1C41CF9048500538489 /* Embed Frameworks */, 3B06AD1E1E4923F5004D2608 /* Thin Binary */, + 1F784D8C27C8AC72541E3F4C /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -205,6 +206,23 @@ /* End PBXResourcesBuildPhase section */ /* Begin PBXShellScriptBuildPhase section */ + 1F784D8C27C8AC72541E3F4C /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 3B06AD1E1E4923F5004D2608 /* Thin Binary */ = { isa = PBXShellScriptBuildPhase; alwaysOutOfDate = 1; @@ -335,7 +353,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; @@ -414,7 +432,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = iphoneos; @@ -465,7 +483,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - IPHONEOS_DEPLOYMENT_TARGET = 12.0; + IPHONEOS_DEPLOYMENT_TARGET = 13.0; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = iphoneos; SUPPORTED_PLATFORMS = iphoneos; diff --git a/packages/video_player/video_player/example/macos/Podfile b/packages/video_player/video_player/example/macos/Podfile index ae77cc1d426..66f6172bbb3 100644 --- a/packages/video_player/video_player/example/macos/Podfile +++ b/packages/video_player/video_player/example/macos/Podfile @@ -1,4 +1,4 @@ -platform :osx, '10.14' +platform :osx, '10.15' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/video_player/video_player/example/macos/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player/example/macos/Runner.xcodeproj/project.pbxproj index e6fa40d2ed6..9869c74bb38 100644 --- a/packages/video_player/video_player/example/macos/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player/example/macos/Runner.xcodeproj/project.pbxproj @@ -193,6 +193,7 @@ 33CC10EB2044A3C60003C045 /* Resources */, 33CC110E2044A8840003C045 /* Bundle Framework */, 3399D490228B24CF009A79C7 /* ShellScript */, + C0B5FBA873B9089B9B9062E0 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -306,6 +307,23 @@ shellPath = /bin/sh; shellScript = "\"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh && touch Flutter/ephemeral/tripwire"; }; + C0B5FBA873B9089B9B9062E0 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; D3E396DFBCC51886820113AA /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; @@ -402,7 +420,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.14; + MACOSX_DEPLOYMENT_TARGET = 10.15; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = macosx; SWIFT_COMPILATION_MODE = wholemodule; @@ -481,7 +499,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.14; + MACOSX_DEPLOYMENT_TARGET = 10.15; MTL_ENABLE_DEBUG_INFO = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; @@ -528,7 +546,7 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; - MACOSX_DEPLOYMENT_TARGET = 10.14; + MACOSX_DEPLOYMENT_TARGET = 10.15; MTL_ENABLE_DEBUG_INFO = NO; SDKROOT = macosx; SWIFT_COMPILATION_MODE = wholemodule; diff --git a/packages/video_player/video_player_avfoundation/CHANGELOG.md b/packages/video_player/video_player_avfoundation/CHANGELOG.md index 7f509fcc462..f9eb1a9338a 100644 --- a/packages/video_player/video_player_avfoundation/CHANGELOG.md +++ b/packages/video_player/video_player_avfoundation/CHANGELOG.md @@ -1,3 +1,8 @@ +## 2.9.0 + +* Implements `getAudioTracks()` and `selectAudioTrack()` methods. +* Updates minimum supported SDK version to Flutter 3.29/Dart 3.7. + ## 2.8.8 * Refactors Dart internals for maintainability. diff --git a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m index b095dbf33ae..f86fed55cf7 100644 --- a/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m +++ b/packages/video_player/video_player_avfoundation/darwin/RunnerTests/VideoPlayerTests.m @@ -1051,4 +1051,165 @@ - (nonnull AVPlayerItem *)playerItemWithURL:(NSURL *)url { return [AVPlayerItem playerItemWithAsset:[AVURLAsset URLAssetWithURL:url options:nil]]; } +#pragma mark - Audio Track Tests + +// Tests getAudioTracks with a regular MP4 video file using real AVFoundation. +// The bee.mp4 video has a single audio track. +- (void)testGetAudioTracksWithRealMP4Video { + FVPVideoPlayer *player = + [[FVPVideoPlayer alloc] initWithPlayerItem:[self playerItemWithURL:self.mp4TestURL] + avFactory:[[FVPDefaultAVFactory alloc] init] + viewProvider:[[StubViewProvider alloc] initWithView:nil]]; + XCTAssertNotNil(player); + + XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; + StubEventListener *listener = + [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; + player.eventListener = listener; + [self waitForExpectationsWithTimeout:30.0 handler:nil]; + + // Now test getAudioTracks + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + XCTAssertNil(error); + XCTAssertNotNil(result); + + // For regular MP4 files, we expect asset tracks (not media selection tracks) + // bee.mp4 has at least one audio track + if (result.assetTracks) { + XCTAssertGreaterThanOrEqual(result.assetTracks.count, 1); + // First track should be selected by default + if (result.assetTracks.count > 0) { + FVPAssetAudioTrackData *firstTrack = result.assetTracks[0]; + XCTAssertTrue(firstTrack.isSelected); + XCTAssertGreaterThan(firstTrack.trackId, 0); + } + } + // mediaSelectionTracks should be nil for regular MP4 files + XCTAssertNil(result.mediaSelectionTracks); + + [player disposeWithError:&error]; +} + +// Tests getAudioTracks with an HLS stream using real AVFoundation. +// HLS streams use media selection groups for audio track selection. +- (void)testGetAudioTracksWithRealHLSStream { + NSURL *hlsURL = [NSURL + URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/videos/hls/bee.m3u8"]; + XCTAssertNotNil(hlsURL); + + FVPVideoPlayer *player = + [[FVPVideoPlayer alloc] initWithPlayerItem:[self playerItemWithURL:hlsURL] + avFactory:[[FVPDefaultAVFactory alloc] init] + viewProvider:[[StubViewProvider alloc] initWithView:nil]]; + XCTAssertNotNil(player); + + XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; + StubEventListener *listener = + [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; + player.eventListener = listener; + [self waitForExpectationsWithTimeout:30.0 handler:nil]; + + // Now test getAudioTracks + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + XCTAssertNil(error); + XCTAssertNotNil(result); + + // For HLS streams, the result depends on whether the stream has multiple audio options. + // The bee.m3u8 stream may or may not have multiple audio tracks. + // We verify the method returns valid data without crashing. + if (result.mediaSelectionTracks) { + // If media selection tracks exist, they should have valid structure + for (FVPMediaSelectionAudioTrackData *track in result.mediaSelectionTracks) { + XCTAssertNotNil(track.displayName); + XCTAssertGreaterThanOrEqual(track.index, 0); + } + } else if (result.assetTracks) { + // Falls back to asset tracks if no media selection group + for (FVPAssetAudioTrackData *track in result.assetTracks) { + XCTAssertGreaterThan(track.trackId, 0); + } + } + + [player disposeWithError:&error]; +} + +// Tests that getAudioTracks returns valid data for audio-only files. +- (void)testGetAudioTracksWithRealAudioFile { + NSURL *audioURL = [NSURL + URLWithString:@"https://flutter.github.io/assets-for-api-docs/assets/audio/rooster.mp3"]; + XCTAssertNotNil(audioURL); + + FVPVideoPlayer *player = + [[FVPVideoPlayer alloc] initWithPlayerItem:[self playerItemWithURL:audioURL] + avFactory:[[FVPDefaultAVFactory alloc] init] + viewProvider:[[StubViewProvider alloc] initWithView:nil]]; + XCTAssertNotNil(player); + + XCTestExpectation *initializedExpectation = [self expectationWithDescription:@"initialized"]; + StubEventListener *listener = + [[StubEventListener alloc] initWithInitializationExpectation:initializedExpectation]; + player.eventListener = listener; + [self waitForExpectationsWithTimeout:30.0 handler:nil]; + + // Now test getAudioTracks + FlutterError *error = nil; + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + XCTAssertNil(error); + XCTAssertNotNil(result); + + // Audio files should have at least one audio track + if (result.assetTracks) { + XCTAssertGreaterThanOrEqual(result.assetTracks.count, 1); + } + + [player disposeWithError:&error]; +} + +// Tests that getAudioTracks works correctly through the plugin API with a real video. +- (void)testGetAudioTracksViaPluginWithRealVideo { + NSObject *registrar = OCMProtocolMock(@protocol(FlutterPluginRegistrar)); + FVPVideoPlayerPlugin *videoPlayerPlugin = + [[FVPVideoPlayerPlugin alloc] initWithRegistrar:registrar]; + + FlutterError *error; + [videoPlayerPlugin initialize:&error]; + XCTAssertNil(error); + + FVPCreationOptions *create = [FVPCreationOptions + makeWithUri:@"https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4" + httpHeaders:@{}]; + FVPTexturePlayerIds *identifiers = [videoPlayerPlugin createTexturePlayerWithOptions:create + error:&error]; + XCTAssertNil(error); + XCTAssertNotNil(identifiers); + + FVPVideoPlayer *player = videoPlayerPlugin.playersByIdentifier[@(identifiers.playerId)]; + XCTAssertNotNil(player); + + // Wait for player item to become ready + AVPlayerItem *item = player.player.currentItem; + [self keyValueObservingExpectationForObject:(id)item + keyPath:@"status" + expectedValue:@(AVPlayerItemStatusReadyToPlay)]; + [self waitForExpectationsWithTimeout:30.0 handler:nil]; + + // Now test getAudioTracks + FVPNativeAudioTrackData *result = [player getAudioTracks:&error]; + + XCTAssertNil(error); + XCTAssertNotNil(result); + + // For regular MP4, expect asset tracks + if (result.assetTracks) { + XCTAssertGreaterThanOrEqual(result.assetTracks.count, 1); + } + + [player disposeWithError:&error]; +} + @end diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m index 9da957fbc8c..94e8e93c1f9 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/FVPVideoPlayer.m @@ -421,6 +421,168 @@ - (void)setPlaybackSpeed:(double)speed error:(FlutterError *_Nullable *_Nonnull) [self updatePlayingState]; } +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + NSAssert(currentItem, @"currentItem should not be nil"); + AVAsset *asset = currentItem.asset; + + // First, try to get tracks from media selection (for HLS streams) + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + if (audioGroup.options.count > 0) { + NSMutableArray *mediaSelectionTracks = + [[NSMutableArray alloc] init]; + AVMediaSelection *mediaSelection = currentItem.currentMediaSelection; + AVMediaSelectionOption *currentSelection = + [mediaSelection selectedMediaOptionInMediaSelectionGroup:audioGroup]; + + for (NSInteger i = 0; i < audioGroup.options.count; i++) { + AVMediaSelectionOption *option = audioGroup.options[i]; + NSString *displayName = option.displayName; + + NSString *languageCode = nil; + if (option.locale) { + languageCode = option.locale.languageCode; + } + + NSArray *titleItems = + [AVMetadataItem metadataItemsFromArray:option.commonMetadata + withKey:AVMetadataCommonKeyTitle + keySpace:AVMetadataKeySpaceCommon]; + NSString *commonMetadataTitle = titleItems.firstObject.stringValue; + + BOOL isSelected = [currentSelection isEqual:option]; + + FVPMediaSelectionAudioTrackData *trackData = + [FVPMediaSelectionAudioTrackData makeWithIndex:i + displayName:displayName + languageCode:languageCode + isSelected:isSelected + commonMetadataTitle:commonMetadataTitle]; + + [mediaSelectionTracks addObject:trackData]; + } + + // Always return media selection tracks when there's a media selection group + // even if all options were nil/invalid (empty array) + return [FVPNativeAudioTrackData makeWithAssetTracks:nil + mediaSelectionTracks:mediaSelectionTracks]; + } + + // If no media selection group or empty, try to get tracks from AVAsset (for regular video files) + NSArray *assetAudioTracks = [asset tracksWithMediaType:AVMediaTypeAudio]; + NSMutableArray *assetTracks = [[NSMutableArray alloc] init]; + + for (NSInteger i = 0; i < assetAudioTracks.count; i++) { + AVAssetTrack *track = assetAudioTracks[i]; + + // Extract metadata from the track + NSString *language = nil; + NSString *label = nil; + + // Try to get language from track + if ([track.languageCode length] > 0) { + language = track.languageCode; + } + + // Try to get label from metadata + for (AVMetadataItem *item in track.commonMetadata) { + if ([item.commonKey isEqualToString:AVMetadataCommonKeyTitle] && item.stringValue) { + label = item.stringValue; + break; + } + } + + // Extract format information + NSNumber *bitrate = nil; + NSNumber *sampleRate = nil; + NSNumber *channelCount = nil; + NSString *codec = nil; + + // Extract format information from the track's format descriptions + if (track.formatDescriptions.count > 0) { + CMFormatDescriptionRef formatDesc = + (__bridge CMFormatDescriptionRef)track.formatDescriptions[0]; + + // Get audio stream basic description + const AudioStreamBasicDescription *audioDesc = + CMAudioFormatDescriptionGetStreamBasicDescription(formatDesc); + if (audioDesc) { + if (audioDesc->mSampleRate > 0) { + sampleRate = @((NSInteger)audioDesc->mSampleRate); + } + if (audioDesc->mChannelsPerFrame > 0) { + channelCount = @(audioDesc->mChannelsPerFrame); + } + } + + // Get codec information + FourCharCode codecType = CMFormatDescriptionGetMediaSubType(formatDesc); + switch (codecType) { + case kAudioFormatMPEG4AAC: + codec = @"aac"; + break; + case kAudioFormatAC3: + codec = @"ac3"; + break; + case kAudioFormatEnhancedAC3: + codec = @"eac3"; + break; + case kAudioFormatMPEGLayer3: + codec = @"mp3"; + break; + default: + codec = nil; + break; + } + } + + // Estimate bitrate from track + if (track.estimatedDataRate > 0) { + bitrate = @((NSInteger)track.estimatedDataRate); + } + + // For now, assume the first track is selected (we don't have easy access to current selection + // for asset tracks) + BOOL isSelected = (i == 0); + + FVPAssetAudioTrackData *trackData = [FVPAssetAudioTrackData makeWithTrackId:track.trackID + label:label + language:language + isSelected:isSelected + bitrate:bitrate + sampleRate:sampleRate + channelCount:channelCount + codec:codec]; + + [assetTracks addObject:trackData]; + } + + // Return asset tracks (even if empty), media selection tracks should be nil + return [FVPNativeAudioTrackData makeWithAssetTracks:assetTracks mediaSelectionTracks:nil]; +} + +- (void)selectAudioTrackWithType:(nonnull NSString *)trackType + trackId:(NSInteger)trackId + error:(FlutterError *_Nullable __autoreleasing *_Nonnull)error { + AVPlayerItem *currentItem = _player.currentItem; + NSAssert(currentItem, @"currentItem should not be nil"); + AVAsset *asset = currentItem.asset; + + // Check if this is a media selection track (for HLS streams) + if ([trackType isEqualToString:@"mediaSelection"]) { + AVMediaSelectionGroup *audioGroup = + [asset mediaSelectionGroupForMediaCharacteristic:AVMediaCharacteristicAudible]; + // Validate that we have a valid audio group and the trackId (index) is valid + if (audioGroup && trackId >= 0 && trackId < (NSInteger)audioGroup.options.count) { + AVMediaSelectionOption *option = audioGroup.options[trackId]; + [currentItem selectMediaOption:option inMediaSelectionGroup:audioGroup]; + } + } + // For asset tracks, we don't have a direct way to select them in AVFoundation + // This would require more complex track selection logic that's not commonly used +} + #pragma mark - Private - (int64_t)duration { diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h index d06c3fd0179..16bb1f2fbe3 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/include/video_player_avfoundation/messages.g.h @@ -16,6 +16,10 @@ NS_ASSUME_NONNULL_BEGIN @class FVPPlatformVideoViewCreationParams; @class FVPCreationOptions; @class FVPTexturePlayerIds; +@class FVPAudioTrackMessage; +@class FVPAssetAudioTrackData; +@class FVPMediaSelectionAudioTrackData; +@class FVPNativeAudioTrackData; /// Information passed to the platform view creation. @interface FVPPlatformVideoViewCreationParams : NSObject @@ -42,6 +46,78 @@ NS_ASSUME_NONNULL_BEGIN @property(nonatomic, assign) NSInteger textureId; @end +/// Represents an audio track in a video. +@interface FVPAudioTrackMessage : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, copy) NSString *id; +@property(nonatomic, copy) NSString *label; +@property(nonatomic, copy) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVAssetTrack (for regular assets). +@interface FVPAssetAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec; +@property(nonatomic, assign) NSInteger trackId; +@property(nonatomic, copy, nullable) NSString *label; +@property(nonatomic, copy, nullable) NSString *language; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, strong, nullable) NSNumber *bitrate; +@property(nonatomic, strong, nullable) NSNumber *sampleRate; +@property(nonatomic, strong, nullable) NSNumber *channelCount; +@property(nonatomic, copy, nullable) NSString *codec; +@end + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +@interface FVPMediaSelectionAudioTrackData : NSObject +/// `init` unavailable to enforce nonnull fields, see the `make` class method. +- (instancetype)init NS_UNAVAILABLE; ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle; +@property(nonatomic, assign) NSInteger index; +@property(nonatomic, copy, nullable) NSString *displayName; +@property(nonatomic, copy, nullable) NSString *languageCode; +@property(nonatomic, assign) BOOL isSelected; +@property(nonatomic, copy, nullable) NSString *commonMetadataTitle; +@end + +/// Container for raw audio track data from native platforms. +@interface FVPNativeAudioTrackData : NSObject ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks; +/// Asset-based tracks (for regular video files) +@property(nonatomic, copy, nullable) NSArray *assetTracks; +/// Media selection-based tracks (for HLS streams) +@property(nonatomic, copy, nullable) + NSArray *mediaSelectionTracks; +@end + /// The codec used by all APIs. NSObject *FVPGetMessagesCodec(void); @@ -78,6 +154,11 @@ extern void SetUpFVPAVFoundationVideoPlayerApiWithSuffix( - (void)seekTo:(NSInteger)position completion:(void (^)(FlutterError *_Nullable))completion; - (void)pauseWithError:(FlutterError *_Nullable *_Nonnull)error; - (void)disposeWithError:(FlutterError *_Nullable *_Nonnull)error; +/// @return `nil` only when `error != nil`. +- (nullable FVPNativeAudioTrackData *)getAudioTracks:(FlutterError *_Nullable *_Nonnull)error; +- (void)selectAudioTrackWithType:(NSString *)trackType + trackId:(NSInteger)trackId + error:(FlutterError *_Nullable *_Nonnull)error; @end extern void SetUpFVPVideoPlayerInstanceApi(id binaryMessenger, diff --git a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m index 155ac2bacad..7ca21a80f58 100644 --- a/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m +++ b/packages/video_player/video_player_avfoundation/darwin/video_player_avfoundation/Sources/video_player_avfoundation/messages.g.m @@ -48,6 +48,30 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list; - (NSArray *)toList; @end +@interface FVPAudioTrackMessage () ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list; ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPAssetAudioTrackData () ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPMediaSelectionAudioTrackData () ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + +@interface FVPNativeAudioTrackData () ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list; ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list; +- (NSArray *)toList; +@end + @implementation FVPPlatformVideoViewCreationParams + (instancetype)makeWithPlayerId:(NSInteger)playerId { FVPPlatformVideoViewCreationParams *pigeonResult = @@ -120,6 +144,167 @@ + (nullable FVPTexturePlayerIds *)nullableFromList:(NSArray *)list { } @end +@implementation FVPAudioTrackMessage ++ (instancetype)makeWithId:(NSString *)id + label:(NSString *)label + language:(NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = id; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAudioTrackMessage *)fromList:(NSArray *)list { + FVPAudioTrackMessage *pigeonResult = [[FVPAudioTrackMessage alloc] init]; + pigeonResult.id = GetNullableObjectAtIndex(list, 0); + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAudioTrackMessage *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAudioTrackMessage fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.id ?: [NSNull null], + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPAssetAudioTrackData ++ (instancetype)makeWithTrackId:(NSInteger)trackId + label:(nullable NSString *)label + language:(nullable NSString *)language + isSelected:(BOOL)isSelected + bitrate:(nullable NSNumber *)bitrate + sampleRate:(nullable NSNumber *)sampleRate + channelCount:(nullable NSNumber *)channelCount + codec:(nullable NSString *)codec { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = trackId; + pigeonResult.label = label; + pigeonResult.language = language; + pigeonResult.isSelected = isSelected; + pigeonResult.bitrate = bitrate; + pigeonResult.sampleRate = sampleRate; + pigeonResult.channelCount = channelCount; + pigeonResult.codec = codec; + return pigeonResult; +} ++ (FVPAssetAudioTrackData *)fromList:(NSArray *)list { + FVPAssetAudioTrackData *pigeonResult = [[FVPAssetAudioTrackData alloc] init]; + pigeonResult.trackId = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.label = GetNullableObjectAtIndex(list, 1); + pigeonResult.language = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.bitrate = GetNullableObjectAtIndex(list, 4); + pigeonResult.sampleRate = GetNullableObjectAtIndex(list, 5); + pigeonResult.channelCount = GetNullableObjectAtIndex(list, 6); + pigeonResult.codec = GetNullableObjectAtIndex(list, 7); + return pigeonResult; +} ++ (nullable FVPAssetAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPAssetAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.trackId), + self.label ?: [NSNull null], + self.language ?: [NSNull null], + @(self.isSelected), + self.bitrate ?: [NSNull null], + self.sampleRate ?: [NSNull null], + self.channelCount ?: [NSNull null], + self.codec ?: [NSNull null], + ]; +} +@end + +@implementation FVPMediaSelectionAudioTrackData ++ (instancetype)makeWithIndex:(NSInteger)index + displayName:(nullable NSString *)displayName + languageCode:(nullable NSString *)languageCode + isSelected:(BOOL)isSelected + commonMetadataTitle:(nullable NSString *)commonMetadataTitle { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = index; + pigeonResult.displayName = displayName; + pigeonResult.languageCode = languageCode; + pigeonResult.isSelected = isSelected; + pigeonResult.commonMetadataTitle = commonMetadataTitle; + return pigeonResult; +} ++ (FVPMediaSelectionAudioTrackData *)fromList:(NSArray *)list { + FVPMediaSelectionAudioTrackData *pigeonResult = [[FVPMediaSelectionAudioTrackData alloc] init]; + pigeonResult.index = [GetNullableObjectAtIndex(list, 0) integerValue]; + pigeonResult.displayName = GetNullableObjectAtIndex(list, 1); + pigeonResult.languageCode = GetNullableObjectAtIndex(list, 2); + pigeonResult.isSelected = [GetNullableObjectAtIndex(list, 3) boolValue]; + pigeonResult.commonMetadataTitle = GetNullableObjectAtIndex(list, 4); + return pigeonResult; +} ++ (nullable FVPMediaSelectionAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPMediaSelectionAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + @(self.index), + self.displayName ?: [NSNull null], + self.languageCode ?: [NSNull null], + @(self.isSelected), + self.commonMetadataTitle ?: [NSNull null], + ]; +} +@end + +@implementation FVPNativeAudioTrackData ++ (instancetype)makeWithAssetTracks:(nullable NSArray *)assetTracks + mediaSelectionTracks: + (nullable NSArray *)mediaSelectionTracks { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = assetTracks; + pigeonResult.mediaSelectionTracks = mediaSelectionTracks; + return pigeonResult; +} ++ (FVPNativeAudioTrackData *)fromList:(NSArray *)list { + FVPNativeAudioTrackData *pigeonResult = [[FVPNativeAudioTrackData alloc] init]; + pigeonResult.assetTracks = GetNullableObjectAtIndex(list, 0); + pigeonResult.mediaSelectionTracks = GetNullableObjectAtIndex(list, 1); + return pigeonResult; +} ++ (nullable FVPNativeAudioTrackData *)nullableFromList:(NSArray *)list { + return (list) ? [FVPNativeAudioTrackData fromList:list] : nil; +} +- (NSArray *)toList { + return @[ + self.assetTracks ?: [NSNull null], + self.mediaSelectionTracks ?: [NSNull null], + ]; +} +@end + @interface FVPMessagesPigeonCodecReader : FlutterStandardReader @end @implementation FVPMessagesPigeonCodecReader @@ -131,6 +316,14 @@ - (nullable id)readValueOfType:(UInt8)type { return [FVPCreationOptions fromList:[self readValue]]; case 131: return [FVPTexturePlayerIds fromList:[self readValue]]; + case 132: + return [FVPAudioTrackMessage fromList:[self readValue]]; + case 133: + return [FVPAssetAudioTrackData fromList:[self readValue]]; + case 134: + return [FVPMediaSelectionAudioTrackData fromList:[self readValue]]; + case 135: + return [FVPNativeAudioTrackData fromList:[self readValue]]; default: return [super readValueOfType:type]; } @@ -150,6 +343,18 @@ - (void)writeValue:(id)value { } else if ([value isKindOfClass:[FVPTexturePlayerIds class]]) { [self writeByte:131]; [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAudioTrackMessage class]]) { + [self writeByte:132]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPAssetAudioTrackData class]]) { + [self writeByte:133]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPMediaSelectionAudioTrackData class]]) { + [self writeByte:134]; + [self writeValue:[value toList]]; + } else if ([value isKindOfClass:[FVPNativeAudioTrackData class]]) { + [self writeByte:135]; + [self writeValue:[value toList]]; } else { [super writeValue:value]; } @@ -502,4 +707,50 @@ void SetUpFVPVideoPlayerInstanceApiWithSuffix(id binaryM [channel setMessageHandler:nil]; } } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.getAudioTracks", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(getAudioTracks:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to @selector(getAudioTracks:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + FlutterError *error; + FVPNativeAudioTrackData *output = [api getAudioTracks:&error]; + callback(wrapResult(output, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } + { + FlutterBasicMessageChannel *channel = [[FlutterBasicMessageChannel alloc] + initWithName:[NSString stringWithFormat:@"%@%@", + @"dev.flutter.pigeon.video_player_avfoundation." + @"VideoPlayerInstanceApi.selectAudioTrack", + messageChannelSuffix] + binaryMessenger:binaryMessenger + codec:FVPGetMessagesCodec()]; + if (api) { + NSCAssert([api respondsToSelector:@selector(selectAudioTrackWithType:trackId:error:)], + @"FVPVideoPlayerInstanceApi api (%@) doesn't respond to " + @"@selector(selectAudioTrackWithType:trackId:error:)", + api); + [channel setMessageHandler:^(id _Nullable message, FlutterReply callback) { + NSArray *args = message; + NSString *arg_trackType = GetNullableObjectAtIndex(args, 0); + NSInteger arg_trackId = [GetNullableObjectAtIndex(args, 1) integerValue]; + FlutterError *error; + [api selectAudioTrackWithType:arg_trackType trackId:arg_trackId error:&error]; + callback(wrapResult(nil, error)); + }]; + } else { + [channel setMessageHandler:nil]; + } + } } diff --git a/packages/video_player/video_player_avfoundation/example/ios/Podfile b/packages/video_player/video_player_avfoundation/example/ios/Podfile index c9339a034eb..6eafd7e2e95 100644 --- a/packages/video_player/video_player_avfoundation/example/ios/Podfile +++ b/packages/video_player/video_player_avfoundation/example/ios/Podfile @@ -1,5 +1,5 @@ # Uncomment this line to define a global platform for your project -# platform :ios, '12.0' +# platform :ios, '13.0' # CocoaPods analytics sends network stats synchronously affecting flutter build latency. ENV['COCOAPODS_DISABLE_STATS'] = 'true' diff --git a/packages/video_player/video_player_avfoundation/example/macos/Runner.xcodeproj/project.pbxproj b/packages/video_player/video_player_avfoundation/example/macos/Runner.xcodeproj/project.pbxproj index 44df4b4d978..41178cae189 100644 --- a/packages/video_player/video_player_avfoundation/example/macos/Runner.xcodeproj/project.pbxproj +++ b/packages/video_player/video_player_avfoundation/example/macos/Runner.xcodeproj/project.pbxproj @@ -246,6 +246,7 @@ 33CC10EB2044A3C60003C045 /* Resources */, 33CC110E2044A8840003C045 /* Bundle Framework */, 3399D490228B24CF009A79C7 /* ShellScript */, + 43465698DA6E8053DBCCE1D3 /* [CP] Embed Pods Frameworks */, ); buildRules = ( ); @@ -373,6 +374,23 @@ shellPath = /bin/sh; shellScript = "\"$FLUTTER_ROOT\"/packages/flutter_tools/bin/macos_assemble.sh && touch Flutter/ephemeral/tripwire"; }; + 43465698DA6E8053DBCCE1D3 /* [CP] Embed Pods Frameworks */ = { + isa = PBXShellScriptBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + inputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-input-files.xcfilelist", + ); + name = "[CP] Embed Pods Frameworks"; + outputFileListPaths = ( + "${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks-${CONFIGURATION}-output-files.xcfilelist", + ); + runOnlyForDeploymentPostprocessing = 0; + shellPath = /bin/sh; + shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-Runner/Pods-Runner-frameworks.sh\"\n"; + showEnvVarsInLog = 0; + }; 5121AE1943D8EE14C90ED8B7 /* [CP] Check Pods Manifest.lock */ = { isa = PBXShellScriptBuildPhase; buildActionMask = 2147483647; diff --git a/packages/video_player/video_player_avfoundation/example/pubspec.yaml b/packages/video_player/video_player_avfoundation/example/pubspec.yaml index cc176e75c3f..902bf087303 100644 --- a/packages/video_player/video_player_avfoundation/example/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/example/pubspec.yaml @@ -16,7 +16,7 @@ dependencies: # The example app is bundled with the plugin so we use a path dependency on # the parent directory to use the current plugin's version. path: ../ - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: flutter_test: diff --git a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart index 834b36ed6b0..bee801cbbc4 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/avfoundation_video_player.dart @@ -178,6 +178,77 @@ class AVFoundationVideoPlayer extends VideoPlayerPlatform { return _api.setMixWithOthers(mixWithOthers); } + @override + Future> getAudioTracks(int playerId) async { + final NativeAudioTrackData nativeData = await _playerWith( + id: playerId, + ).getAudioTracks(); + final tracks = []; + + // Convert asset tracks to VideoAudioTrack + if (nativeData.assetTracks != null) { + for (final AssetAudioTrackData track in nativeData.assetTracks!) { + tracks.add( + VideoAudioTrack( + id: track.trackId.toString(), + label: track.label, + language: track.language, + isSelected: track.isSelected, + bitrate: track.bitrate, + sampleRate: track.sampleRate, + channelCount: track.channelCount, + codec: track.codec, + ), + ); + } + } + + // Convert media selection tracks to VideoAudioTrack (for HLS streams) + if (nativeData.mediaSelectionTracks != null) { + for (final MediaSelectionAudioTrackData track + in nativeData.mediaSelectionTracks!) { + final trackId = 'media_selection_${track.index}'; + final String? label = track.commonMetadataTitle ?? track.displayName; + tracks.add( + VideoAudioTrack( + id: trackId, + label: label, + language: track.languageCode, + isSelected: track.isSelected, + ), + ); + } + } + + return tracks; + } + + @override + Future selectAudioTrack(int playerId, String trackId) { + // Parse the trackId to determine type and extract the integer ID + String trackType; + int numericTrackId; + + if (trackId.startsWith('media_selection_')) { + trackType = 'mediaSelection'; + numericTrackId = int.parse(trackId.substring('media_selection_'.length)); + } else { + // Asset track - the trackId is just the integer as a string + trackType = 'asset'; + numericTrackId = int.parse(trackId); + } + + return _playerWith( + id: playerId, + ).selectAudioTrack(trackType, numericTrackId); + } + + @override + bool isAudioTrackSupportAvailable() { + // iOS/macOS with AVFoundation supports audio track selection + return true; + } + @override Widget buildView(int playerId) { return buildViewWithOptions(VideoViewOptions(playerId: playerId)); @@ -249,6 +320,11 @@ class _PlayerInstance { return Duration(milliseconds: await _api.getPosition()); } + Future getAudioTracks() => _api.getAudioTracks(); + + Future selectAudioTrack(String trackType, int trackId) => + _api.selectAudioTrack(trackType, trackId); + Stream get videoEvents { _eventSubscription ??= _eventChannel.receiveBroadcastStream().listen( _onStreamEvent, diff --git a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart index 9072c153f95..9ae19909a20 100644 --- a/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart +++ b/packages/video_player/video_player_avfoundation/lib/src/messages.g.dart @@ -154,6 +154,267 @@ class TexturePlayerIds { int get hashCode => Object.hashAll(_toList()); } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + + String label; + + String language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + id, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AudioTrackMessage decode(Object result) { + result as List; + return AudioTrackMessage( + id: result[0]! as String, + label: result[1]! as String, + language: result[2]! as String, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AudioTrackMessage || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + + String? label; + + String? language; + + bool isSelected; + + int? bitrate; + + int? sampleRate; + + int? channelCount; + + String? codec; + + List _toList() { + return [ + trackId, + label, + language, + isSelected, + bitrate, + sampleRate, + channelCount, + codec, + ]; + } + + Object encode() { + return _toList(); + } + + static AssetAudioTrackData decode(Object result) { + result as List; + return AssetAudioTrackData( + trackId: result[0]! as int, + label: result[1] as String?, + language: result[2] as String?, + isSelected: result[3]! as bool, + bitrate: result[4] as int?, + sampleRate: result[5] as int?, + channelCount: result[6] as int?, + codec: result[7] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! AssetAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + + String? displayName; + + String? languageCode; + + bool isSelected; + + String? commonMetadataTitle; + + List _toList() { + return [ + index, + displayName, + languageCode, + isSelected, + commonMetadataTitle, + ]; + } + + Object encode() { + return _toList(); + } + + static MediaSelectionAudioTrackData decode(Object result) { + result as List; + return MediaSelectionAudioTrackData( + index: result[0]! as int, + displayName: result[1] as String?, + languageCode: result[2] as String?, + isSelected: result[3]! as bool, + commonMetadataTitle: result[4] as String?, + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! MediaSelectionAudioTrackData || + other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; + + List _toList() { + return [assetTracks, mediaSelectionTracks]; + } + + Object encode() { + return _toList(); + } + + static NativeAudioTrackData decode(Object result) { + result as List; + return NativeAudioTrackData( + assetTracks: (result[0] as List?)?.cast(), + mediaSelectionTracks: (result[1] as List?) + ?.cast(), + ); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + bool operator ==(Object other) { + if (other is! NativeAudioTrackData || other.runtimeType != runtimeType) { + return false; + } + if (identical(this, other)) { + return true; + } + return _deepEquals(encode(), other.encode()); + } + + @override + // ignore: avoid_equals_and_hash_code_on_mutable_classes + int get hashCode => Object.hashAll(_toList()); +} + class _PigeonCodec extends StandardMessageCodec { const _PigeonCodec(); @override @@ -170,6 +431,18 @@ class _PigeonCodec extends StandardMessageCodec { } else if (value is TexturePlayerIds) { buffer.putUint8(131); writeValue(buffer, value.encode()); + } else if (value is AudioTrackMessage) { + buffer.putUint8(132); + writeValue(buffer, value.encode()); + } else if (value is AssetAudioTrackData) { + buffer.putUint8(133); + writeValue(buffer, value.encode()); + } else if (value is MediaSelectionAudioTrackData) { + buffer.putUint8(134); + writeValue(buffer, value.encode()); + } else if (value is NativeAudioTrackData) { + buffer.putUint8(135); + writeValue(buffer, value.encode()); } else { super.writeValue(buffer, value); } @@ -184,6 +457,14 @@ class _PigeonCodec extends StandardMessageCodec { return CreationOptions.decode(readValue(buffer)!); case 131: return TexturePlayerIds.decode(readValue(buffer)!); + case 132: + return AudioTrackMessage.decode(readValue(buffer)!); + case 133: + return AssetAudioTrackData.decode(readValue(buffer)!); + case 134: + return MediaSelectionAudioTrackData.decode(readValue(buffer)!); + case 135: + return NativeAudioTrackData.decode(readValue(buffer)!); default: return super.readValueOfType(type, buffer); } @@ -582,4 +863,61 @@ class VideoPlayerInstanceApi { return; } } + + Future getAudioTracks() async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.getAudioTracks$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send(null); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else if (pigeonVar_replyList[0] == null) { + throw PlatformException( + code: 'null-error', + message: 'Host platform returned null value for non-null return value.', + ); + } else { + return (pigeonVar_replyList[0] as NativeAudioTrackData?)!; + } + } + + Future selectAudioTrack(String trackType, int trackId) async { + final String pigeonVar_channelName = + 'dev.flutter.pigeon.video_player_avfoundation.VideoPlayerInstanceApi.selectAudioTrack$pigeonVar_messageChannelSuffix'; + final BasicMessageChannel pigeonVar_channel = + BasicMessageChannel( + pigeonVar_channelName, + pigeonChannelCodec, + binaryMessenger: pigeonVar_binaryMessenger, + ); + final Future pigeonVar_sendFuture = pigeonVar_channel.send( + [trackType, trackId], + ); + final List? pigeonVar_replyList = + await pigeonVar_sendFuture as List?; + if (pigeonVar_replyList == null) { + throw _createConnectionError(pigeonVar_channelName); + } else if (pigeonVar_replyList.length > 1) { + throw PlatformException( + code: pigeonVar_replyList[0]! as String, + message: pigeonVar_replyList[1] as String?, + details: pigeonVar_replyList[2], + ); + } else { + return; + } + } } diff --git a/packages/video_player/video_player_avfoundation/pigeons/messages.dart b/packages/video_player/video_player_avfoundation/pigeons/messages.dart index 6e872dec145..6f5137af409 100644 --- a/packages/video_player/video_player_avfoundation/pigeons/messages.dart +++ b/packages/video_player/video_player_avfoundation/pigeons/messages.dart @@ -39,6 +39,80 @@ class TexturePlayerIds { final int textureId; } +/// Represents an audio track in a video. +class AudioTrackMessage { + AudioTrackMessage({ + required this.id, + required this.label, + required this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + String id; + String label; + String language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVAssetTrack (for regular assets). +class AssetAudioTrackData { + AssetAudioTrackData({ + required this.trackId, + this.label, + this.language, + required this.isSelected, + this.bitrate, + this.sampleRate, + this.channelCount, + this.codec, + }); + + int trackId; + String? label; + String? language; + bool isSelected; + int? bitrate; + int? sampleRate; + int? channelCount; + String? codec; +} + +/// Raw audio track data from AVMediaSelectionOption (for HLS streams). +class MediaSelectionAudioTrackData { + MediaSelectionAudioTrackData({ + required this.index, + this.displayName, + this.languageCode, + required this.isSelected, + this.commonMetadataTitle, + }); + + int index; + String? displayName; + String? languageCode; + bool isSelected; + String? commonMetadataTitle; +} + +/// Container for raw audio track data from native platforms. +class NativeAudioTrackData { + NativeAudioTrackData({this.assetTracks, this.mediaSelectionTracks}); + + /// Asset-based tracks (for regular video files) + List? assetTracks; + + /// Media selection-based tracks (for HLS streams) + List? mediaSelectionTracks; +} + @HostApi() abstract class AVFoundationVideoPlayerApi { @ObjCSelector('initialize') @@ -72,4 +146,8 @@ abstract class VideoPlayerInstanceApi { void seekTo(int position); void pause(); void dispose(); + @ObjCSelector('getAudioTracks') + NativeAudioTrackData getAudioTracks(); + @ObjCSelector('selectAudioTrackWithType:trackId:') + void selectAudioTrack(String trackType, int trackId); } diff --git a/packages/video_player/video_player_avfoundation/pubspec.yaml b/packages/video_player/video_player_avfoundation/pubspec.yaml index 9c326136d92..3a428e6cc82 100644 --- a/packages/video_player/video_player_avfoundation/pubspec.yaml +++ b/packages/video_player/video_player_avfoundation/pubspec.yaml @@ -2,7 +2,7 @@ name: video_player_avfoundation description: iOS and macOS implementation of the video_player plugin. repository: https://github.com/flutter/packages/tree/main/packages/video_player/video_player_avfoundation issue_tracker: https://github.com/flutter/flutter/issues?q=is%3Aissue+is%3Aopen+label%3A%22p%3A+video_player%22 -version: 2.8.8 +version: 2.9.0 environment: sdk: ^3.9.0 @@ -24,7 +24,7 @@ flutter: dependencies: flutter: sdk: flutter - video_player_platform_interface: ^6.3.0 + video_player_platform_interface: ^6.6.0 dev_dependencies: build_runner: ^2.3.3