Skip to content

Commit

Permalink
feat: upgrade native sdk 4.1.1.25 (#1509)
Browse files Browse the repository at this point in the history
Update native sdk 4.1.1.25 dependencies

> This pull request is trigger by bot, you can checkout this branch and
update it.

---------

Co-authored-by: littleGnAl <[email protected]>
  • Loading branch information
littleGnAl and littleGnAl authored Jan 19, 2024
1 parent 8d44eae commit 78eadd2
Show file tree
Hide file tree
Showing 44 changed files with 929 additions and 661 deletions.
6 changes: 3 additions & 3 deletions android/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -47,9 +47,9 @@ dependencies {
if (isDev(project)) {
implementation fileTree(dir: "libs", include: ["*.jar"])
} else {
api 'io.agora.rtc:iris-rtc:4.1.1.21-banban.6'
api 'io.agora.rtc:agora-special-full:4.1.1.21'
api 'io.agora.rtc:full-screen-sharing:4.1.1.21'
api 'io.agora.rtc:iris-rtc:4.1.1.25-sp.1'
api 'io.agora.rtc:agora-special-full:4.1.1.25'
api 'io.agora.rtc:full-screen-sharing:4.1.1.25'
}
}

Expand Down
4 changes: 2 additions & 2 deletions ios/agora_rtc_engine.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ Pod::Spec.new do |s|
s.source = { :path => '.' }
s.source_files = 'Classes/**/*.{h,mm,m,swift}'
s.dependency 'Flutter'
s.dependency 'AgoraIrisRTC_iOS', '4.1.1.21-banban.6'
s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.21'
s.dependency 'AgoraIrisRTC_iOS', '4.1.1.25-sp.1'
s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.25'
# s.dependency 'AgoraRtcWrapper'
s.platform = :ios, '9.0'
s.swift_version = '5.0'
Expand Down
160 changes: 81 additions & 79 deletions lib/src/agora_base.dart

Large diffs are not rendered by default.

61 changes: 34 additions & 27 deletions lib/src/agora_media_base.dart
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ enum AudioRoute {
@JsonValue(4)
routeLoudspeaker,

/// 5: The audio route is a bluetooth headset.
/// 5: The audio route is a Bluetooth device using the HFP protocol.
@JsonValue(5)
routeHeadsetbluetooth,

Expand All @@ -146,7 +146,7 @@ enum AudioRoute {
@JsonValue(9)
routeAirplay,

/// @nodoc
/// 10: The audio route is a Bluetooth device using the A2DP protocol.
@JsonValue(10)
routeBluetoothSpeaker,
}
Expand Down Expand Up @@ -251,7 +251,7 @@ enum MediaSourceType {
@JsonValue(2)
primaryCameraSource,

/// 3: The secondary camera.
/// 3: A secondary camera.
@JsonValue(3)
secondaryCameraSource,

Expand All @@ -263,7 +263,7 @@ enum MediaSourceType {
@JsonValue(5)
secondaryScreenSource,

/// @nodoc
/// 6. Custom video source.
@JsonValue(6)
customVideoSource,

Expand Down Expand Up @@ -687,7 +687,8 @@ class ExternalVideoFrame {
this.matrix,
this.metadataBuffer,
this.metadataSize,
this.alphaBuffer});
this.alphaBuffer,
this.fillAlphaBuffer});

/// The video type. See VideoBufferType.
@JsonKey(name: 'type')
Expand Down Expand Up @@ -757,6 +758,10 @@ class ExternalVideoFrame {
@JsonKey(name: 'alphaBuffer', ignore: true)
final Uint8List? alphaBuffer;

/// @nodoc
@JsonKey(name: 'fillAlphaBuffer')
final bool? fillAlphaBuffer;

/// @nodoc
factory ExternalVideoFrame.fromJson(Map<String, dynamic> json) =>
_$ExternalVideoFrameFromJson(json);
Expand Down Expand Up @@ -957,15 +962,17 @@ extension MediaPlayerSourceTypeExt on MediaPlayerSourceType {
/// The frame position of the video observer.
@JsonEnum(alwaysCreate: true)
enum VideoModulePosition {
/// 1: The post-capturer position, which corresponds to the video data in the onCaptureVideoFrame callback.
/// 1: The location of the locally collected video data after preprocessing corresponds to the onCaptureVideoFrame callback. The observed video here has the effect of video pre-processing, which can be verified by enabling image enhancement, virtual background, or watermark.
@JsonValue(1 << 0)
positionPostCapturer,

/// 2: The pre-renderer position, which corresponds to the video data in the onRenderVideoFrame callback.
@JsonValue(1 << 1)
positionPreRenderer,

/// 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback.
/// 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback. The observed video here has the effects of video pre-processing and encoding pre-processing.
/// To verify the pre-processing effects of the video, you can enable image enhancement, virtual background, or watermark.
/// To verify the pre-encoding processing effect, you can set a lower frame rate (for example, 5 fps).
@JsonValue(1 << 2)
positionPreEncoder,
}
Expand Down Expand Up @@ -1350,16 +1357,16 @@ class VideoFrameObserver {
/// Occurs each time the SDK receives a video frame captured by local devices.
///
/// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios.
/// The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.
/// The video data that this callback gets has not been pre-processed such as watermarking, cropping, and rotating.
/// If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
/// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK.
///
/// * [sourceType] Video source types, including cameras, screens, or media player. See VideoSourceType.
/// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
/// Android: texture
/// iOS: cvPixelBuffer
/// macOS: YUV 420
/// Windows: YUV 420
/// Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
/// iOS: I420 or CVPixelBufferRef
/// macOS: I420 or CVPixelBufferRef
/// Windows: YUV420
final void Function(VideoSourceType type, VideoFrame videoFrame)?
onCaptureVideoFrame;

Expand All @@ -1370,10 +1377,10 @@ class VideoFrameObserver {
/// The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced.
///
/// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
/// Android: texture
/// iOS: cvPixelBuffer
/// macOS: YUV 420
/// Windows: YUV 420
/// Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
/// iOS: I420 or CVPixelBufferRef
/// macOS: I420 or CVPixelBufferRef
/// Windows: YUV420
/// * [sourceType] The type of the video source. See VideoSourceType.
final void Function(VideoSourceType type, VideoFrame videoFrame)?
onPreEncodeVideoFrame;
Expand All @@ -1389,10 +1396,10 @@ class VideoFrameObserver {
/// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK.
///
/// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
/// Android: texture
/// iOS: cvPixelBuffer
/// macOS: YUV 420
/// Windows: YUV 420
/// Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
/// iOS: I420 or CVPixelBufferRef
/// macOS: I420 or CVPixelBufferRef
/// Windows: YUV420
/// * [remoteUid] The user ID of the remote user who sends the current video frame.
/// * [channelId] The channel ID.
final void Function(String channelId, int remoteUid, VideoFrame videoFrame)?
Expand Down Expand Up @@ -1505,7 +1512,7 @@ extension MediaRecorderStreamTypeExt on MediaRecorderStreamType {
/// The current recording state.
@JsonEnum(alwaysCreate: true)
enum RecorderState {
/// -1: An error occurs during the recording. See RecorderErrorCode for the reason.
/// -1: An error occurs during the recording. See RecorderReasonCode for the reason.
@JsonValue(-1)
recorderStateError,

Expand All @@ -1531,26 +1538,26 @@ extension RecorderStateExt on RecorderState {
}
}

/// The reason for the state change.
/// @nodoc
@JsonEnum(alwaysCreate: true)
enum RecorderErrorCode {
/// 0: No error.
/// @nodoc
@JsonValue(0)
recorderErrorNone,

/// 1: The SDK fails to write the recorded data to a file.
/// @nodoc
@JsonValue(1)
recorderErrorWriteFailed,

/// 2: The SDK does not detect any audio and video streams, or audio and video streams are interrupted for more than five seconds during recording.
/// @nodoc
@JsonValue(2)
recorderErrorNoStream,

/// 3: The recording duration exceeds the upper limit.
/// @nodoc
@JsonValue(3)
recorderErrorOverMaxDuration,

/// 4: The recording configuration changes.
/// @nodoc
@JsonValue(4)
recorderErrorConfigChanged,
}
Expand Down
2 changes: 2 additions & 0 deletions lib/src/agora_media_base.g.dart

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

20 changes: 11 additions & 9 deletions lib/src/agora_media_engine.dart
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ abstract class MediaEngine {

/// Pulls the remote audio data.
///
/// Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback.
/// Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback.
/// This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method.
/// Call this method after joining a channel.
/// Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback.
Expand Down Expand Up @@ -156,10 +156,10 @@ abstract class MediaEngine {

/// Creates a custom audio track.
///
/// To publish a custom audio source to multiple channels, see the following steps:
/// Ensure that you call this method before joining a channel. To publish a custom audio source, see the following steps:
/// Call this method to create a custom audio track and get the audio track ID.
/// In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
/// If you call pushAudioFrame, and specify trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels.
/// Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
/// Call pushAudioFrame and specify trackId as the audio track ID set in step 2. You can then publish the corresponding custom audio source in the channel.
///
/// * [trackType] The type of the custom audio track. See AudioTrackType. If audioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2.
/// * [config] The configuration of the custom audio track. See AudioTrackConfig.
Expand All @@ -176,7 +176,6 @@ abstract class MediaEngine {
///
/// Returns
/// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.
/// < 0: Failure.
Future<void> destroyCustomAudioTrack(int trackId);

/// Sets the external audio sink.
Expand All @@ -191,17 +190,21 @@ abstract class MediaEngine {
///
/// Returns
/// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.
/// < 0: Failure.
Future<void> setExternalAudioSink(
{required bool enabled, required int sampleRate, required int channels});

/// @nodoc
Future<void> enableCustomAudioLocalPlayback(
{required int trackId, required bool enabled});

/// Pushes the external raw video frame to the SDK.
/// Pushes the external raw video frame to the SDK through video tracks.
///
/// If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK.
/// To publish a custom video source, see the following steps:
/// Call createCustomVideoTrack to create a video track and get the video track ID.
/// Call joinChannel to join the channel. In ChannelMediaOptions, set customVideoTrackId to the video track ID that you want to publish, and set publishCustomVideoTrack to true.
/// Call this method and specify videoTrackId as the video track ID set in step 2. You can then publish the corresponding custom video source in the channel. After calling this method, even if you stop pushing external video frames to the SDK, the custom video stream will still be counted as the video duration usage and incur charges. Agora recommends that you take appropriate measures based on the actual situation to avoid such video billing.
/// If you no longer need to capture external video data, you can call destroyCustomVideoTrack to destroy the custom video track.
/// If you only want to use the external video data for local preview and not publish it in the channel, you can call muteLocalVideoStream to cancel sending video stream or call updateChannelMediaOptions to set publishCustomVideoTrack to false.
///
/// * [frame] The external raw video frame to be pushed. See ExternalVideoFrame.
/// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. The default value is 0.
Expand All @@ -227,7 +230,6 @@ abstract class MediaEngine {
///
/// Returns
/// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.
/// < 0: Failure.
void unregisterAudioFrameObserver(AudioFrameObserver observer);

/// Unregisters the video frame observer.
Expand Down
Loading

0 comments on commit 78eadd2

Please sign in to comment.