diff --git a/.github/ci/build/build_android.sh b/.github/ci/build/build_android.sh index c803dca02..233b4bb01 100644 --- a/.github/ci/build/build_android.sh +++ b/.github/ci/build/build_android.sh @@ -49,28 +49,34 @@ echo release_version: $release_version echo short_version: $short_version echo pwd: `pwd` echo sdk_url: $sdk_url +unzip_name=Agora_Native_SDK_for_Android_FULL_DEFAULT +zip_name=Agora_Native_SDK_for_Android_FULL_DEFAULT.zip +if [ -z "$sdk_url" ]; then + echo "sdk_url is empty" + echo unzip_name: $unzip_name + echo zip_name: $zip_name +else + zip_name=${sdk_url##*/} + echo zip_name: $zip_name -zip_name=${sdk_url##*/} -echo zip_name: $zip_name + # env LC_ALL=en_US.UTF-8 python3 $WORKSPACE/artifactory_utils.py --action=download_file --file=$sdk_url || exit 1 + curl -o $zip_name $sdk_url || exit 1 + 7za x ./$zip_name -y > log.txt -# env LC_ALL=en_US.UTF-8 python3 $WORKSPACE/artifactory_utils.py --action=download_file --file=$sdk_url || exit 1 -curl -o $zip_name $sdk_url || exit 1 -7za x ./$zip_name -y > log.txt - -unzip_name=`ls -S -d */ | grep Agora | sed 's/\///g'` -echo unzip_name: $unzip_name - -rm -rf ./$unzip_name/rtc/bin -rm -rf ./$unzip_name/rtc/demo -rm ./$unzip_name/rtc/commits -rm ./$unzip_name/rtc/package_size_report.txt -mkdir ./$unzip_name/rtc/samples + unzip_name=`ls -S -d */ | grep Agora | sed 's/\///g'` + echo unzip_name: $unzip_name + rm -rf ./$unzip_name/rtc/bin + rm -rf ./$unzip_name/rtc/demo + rm ./$unzip_name/rtc/commits + rm ./$unzip_name/rtc/package_size_report.txt + rm -rf ./$unzip_name/pom +fi +mkdir -p ./$unzip_name/rtc/samples cp -rf ./Android/${android_direction} ./$unzip_name/rtc/samples/API-Example || exit 1 7za a -tzip result.zip -r $unzip_name > log.txt mv result.zip $WORKSPACE/withAPIExample_${BUILD_NUMBER}_$zip_name - if [ $compile_project = true ]; then # install android sdk which java @@ -79,7 +85,12 @@ if [ $compile_project = true ]; then export ANDROID_HOME=/usr/lib/android_sdk echo ANDROID_HOME: $ANDROID_HOME cd ./$unzip_name/rtc/samples/API-Example || exit 1 - ./cloud_build.sh || exit 1 + if [ -z "$sdk_url" ]; then + ./cloud_build.sh false || exit 1 + else + ./cloud_build.sh true || exit 1 + fi + fi diff --git a/.github/ci/build/build_ios.sh b/.github/ci/build/build_ios.sh index 7a60cc7ae..6cfc26eb0 100644 --- a/.github/ci/build/build_ios.sh +++ b/.github/ci/build/build_ios.sh @@ -38,7 +38,8 @@ # pr: output test.zip to workspace dir # others: Rename the zip package name yourself, But need copy it to workspace dir ################################## - +xcode_version=$(xcodebuild -version | grep Xcode | awk '{print $2}') +echo "Xcode Version: $xcode_version" echo ios_direction: $ios_direction echo Package_Publish: $Package_Publish echo is_tag_fetch: $is_tag_fetch @@ -52,31 +53,49 @@ echo short_version: $short_version echo pwd: `pwd` echo sdk_url: $sdk_url -zip_name=${sdk_url##*/} -echo zip_name: $zip_name - -curl -o $zip_name $sdk_url || exit 1 -7za x ./$zip_name -y > log.txt - -unzip_name=`ls -S -d */ | grep Agora | sed 's/\///g'` -echo unzip_name: $unzip_name +unzip_name=Agora_Native_SDK_for_iOS_FULL +zip_name=output.zip +sdk_url_flag=false +if [ -z "$sdk_url" ]; then + sdk_url_flag=false + echo "sdk_url is empty" + echo unzip_name: $unzip_name + mkdir -p ./$unzip_name/samples + cp -rf ./iOS/${ios_direction} ./$unzip_name/samples/${ios_direction} || exit 1 + ls -al ./$unzip_name/samples/${ios_direction}/ +else + sdk_url_flag=true + zip_name=${sdk_url##*/} + echo zip_name: $zip_name + curl -o $zip_name $sdk_url || exit 1 + 7za x ./$zip_name -y > log.txt + unzip_name=`ls -S -d */ | grep Agora | sed 's/\///g'` + echo unzip_name: $unzip_name + rm -rf ./$unzip_name/bin + rm -f ./$unzip_name/commits + rm -f ./$unzip_name/package_size_report.txt -rm -rf ./$unzip_name/bin -rm ./$unzip_name/commits -rm ./$unzip_name/package_size_report.txt -mkdir ./$unzip_name/samples - -cp -rf ./iOS/${ios_direction} ./$unzip_name/samples/API-Example || exit 1 -ls -al ./$unzip_name/samples/API-Example/ -mv ./$unzip_name/samples/API-Example/sdk.podspec ./$unzip_name/ || exit 1 -python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/API-Example/Podfile || exit 1 + rm -f ./$unzip_name/.commits + rm -f ./$unzip_name/AgoraInfra_iOS.swift + rm -f ./$unzip_name/AgoraRtcEngine_iOS.podspec + rm -f ./$unzip_name/AgoraAudio_iOS.podspec + rm -f ./$unzip_name/Package.swift + mkdir -p ./$unzip_name/samples + cp -rf ./iOS/${ios_direction} ./$unzip_name/samples/${ios_direction} || exit 1 + ls -al ./$unzip_name/samples/${ios_direction}/ + mv ./$unzip_name/samples/${ios_direction}/sdk.podspec ./$unzip_name/ || exit 1 +fi +python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/${ios_direction}/Podfile $sdk_url_flag || exit 1 +echo "start compress" 7za a -tzip result.zip -r $unzip_name > log.txt -mv result.zip $WORKSPACE/withAPIExample_${BUILD_NUMBER}_$zip_name +echo "start move to" +echo $WORKSPACE/with${ios_direction}_${BUILD_NUMBER}_$zip_name +mv result.zip $WORKSPACE/with${ios_direction}_${BUILD_NUMBER}_$zip_name if [ $compile_project = true ]; then - cd ./$unzip_name/samples/API-Example + cd ./$unzip_name/samples/${ios_direction} ./cloud_build.sh || exit 1 cd - fi diff --git a/.github/ci/build/build_mac.sh b/.github/ci/build/build_mac.sh index 00444a1ee..e0d163db8 100644 --- a/.github/ci/build/build_mac.sh +++ b/.github/ci/build/build_mac.sh @@ -51,26 +51,42 @@ echo short_version: $short_version echo pwd: `pwd` echo sdk_url: $sdk_url -zip_name=${sdk_url##*/} -echo zip_name: $zip_name - -curl -o $zip_name $sdk_url || exit 1 -7za x ./$zip_name -y > log.txt - -unzip_name=`ls -S -d */ | grep Agora` -echo unzip_name: $unzip_name - -rm -rf ./$unzip_name/bin -rm ./$unzip_name/commits -rm ./$unzip_name/package_size_report.txt -mkdir ./$unzip_name/samples +unzip_name=Agora_Native_SDK_for_iOS_FULL +zip_name=output.zip +sdk_url_flag=false +echo zip_name: $zip_name +if [ -z "$sdk_url" ]; then + sdk_url_flag=false + echo "sdk_url is empty" + echo unzip_name: $unzip_name + mkdir ./$unzip_name/samples + cp -rf ./macOS ./$unzip_name/samples/APIExample || exit 1 + ls -al ./$unzip_name/samples/API-Example/ +else + sdk_url_flag=true + zip_name=${sdk_url##*/} + echo unzip_name: $unzip_name + curl -o $zip_name $sdk_url || exit 1 + 7za x ./$zip_name -y > log.txt + unzip_name=`ls -S -d */ | grep Agora` + echo unzip_name: $unzip_name -cp -rf ./macOS ./$unzip_name/samples/APIExample || exit 1 -ls -al ./$unzip_name/samples/API-Example/ -mv ./$unzip_name/samples/APIExample/sdk.podspec ./$unzip_name/ -python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/APIExample/Podfile + rm -rf ./$unzip_name/bin + rm -f ./$unzip_name/commits + rm -f ./$unzip_name/package_size_report.txt + rm -f ./$unzip_name/.commits + rm -f ./$unzip_name/AgoraInfra_macOS.swift + rm -f ./$unzip_name/AgoraRtcEngine_macOS.podspec + rm -f ./$unzip_name/Package.swift + + mkdir ./$unzip_name/samples + cp -rf ./macOS ./$unzip_name/samples/APIExample || exit 1 + ls -al ./$unzip_name/samples/API-Example/ + mv ./$unzip_name/samples/APIExample/sdk.podspec ./$unzip_name/ +fi +python3 ./.github/ci/build/modify_podfile.py ./$unzip_name/samples/APIExample/Podfile $sdk_url_flag 7za a -tzip result.zip -r $unzip_name cp result.zip $WORKSPACE/withAPIExample_${BUILD_NUMBER}_$zip_name diff --git a/.github/ci/build/build_windows.bat b/.github/ci/build/build_windows.bat index 0f02ea60c..2d0f20a69 100644 --- a/.github/ci/build/build_windows.bat +++ b/.github/ci/build/build_windows.bat @@ -51,7 +51,9 @@ echo short_version: %short_version% echo pwd: %cd% echo sdk_url: %sdk_url% -echo off + +set zip_name=Agora_Native_SDK_for_Windows_FULL_DEFAULT.zip +if %compile_project% EQU false goto SKIP_DOWNLOAD set zip_name=%sdk_url% :LOOP for /f "tokens=1* delims=" %%a in ("%zip_name%") do ( @@ -70,11 +72,13 @@ dir curl %sdk_url% -o %zip_name% REM python %WORKSPACE%\\artifactory_utils.py --action=download_file --file=%sdk_url% 7z x ./%zip_name% -y - dir rmdir /S /Q Agora_Native_SDK_for_Windows_FULL\demo del /F /Q Agora_Native_SDK_for_Windows_FULL\commits del /F /Q Agora_Native_SDK_for_Windows_FULL\package_size_report.txt +:SKIP_DOWNLOAD + + mkdir Agora_Native_SDK_for_Windows_FULL\samples mkdir Agora_Native_SDK_for_Windows_FULL\samples\API-example rmdir /S /Q windows\cicd diff --git a/.github/ci/build/modify_podfile.py b/.github/ci/build/modify_podfile.py index 3cf89c2bd..787d9c7a8 100644 --- a/.github/ci/build/modify_podfile.py +++ b/.github/ci/build/modify_podfile.py @@ -1,13 +1,15 @@ import os, sys -def modfiy(path): +def modfiy(path, sdk_flag): with open(path, 'r', encoding='utf-8') as file: contents = [] for num, line in enumerate(file): if "pod 'Agora" in line: - line = '\t'+"pod 'sdk', :path => '../../sdk.podspec'" + "\n" + if sdk_flag: + line = '\t'+"pod 'sdk', :path => '../../sdk.podspec'" + "\n" elif "pod 'sdk" in line: - line = "" + if sdk_flag: + line = "" elif 'sh .download_script' in line: line = line.replace('true', 'false') + "\n" contents.append(line) @@ -19,6 +21,8 @@ def modfiy(path): fw.close() + if __name__ == '__main__': - path = sys.argv[1:][0] - modfiy(path.strip()) \ No newline at end of file + path = sys.argv[1] + sdk_url_is_not_empty = sys.argv[2].lower() == 'true' + modfiy(path.strip(), sdk_url_is_not_empty) diff --git a/Android/APIExample-Audio/app/build.gradle b/Android/APIExample-Audio/app/build.gradle index d4d1a595a..dd1dec84c 100644 --- a/Android/APIExample-Audio/app/build.gradle +++ b/Android/APIExample-Audio/app/build.gradle @@ -1,7 +1,14 @@ apply plugin: 'com.android.application' + +def sdkVersionFile = file("../gradle.properties") +def properties = new Properties() +sdkVersionFile.withInputStream { stream -> + properties.load(stream) +} +def agoraSdkVersion = properties.getProperty("rtc_sdk_version") +println("${rootProject.project.name} agoraSdkVersion: ${agoraSdkVersion}") def localSdkPath= "${rootProject.projectDir.absolutePath}/../../sdk" -def agoraSdkVersion = '4.4.1' android { compileSdkVersion 32 @@ -103,7 +110,6 @@ dependencies { androidTestImplementation 'androidx.test.espresso:espresso-core:3.2.0' implementation 'io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:1.2.0' - implementation 'com.yanzhenjie:permission:2.0.3' implementation 'de.javagl:obj:0.2.1' implementation "com.squareup.okhttp3:okhttp:4.10.0" diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java index f183c3616..689fe4905 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/ReadyFragment.java @@ -1,8 +1,5 @@ package io.agora.api.example; -import android.Manifest; -import android.annotation.SuppressLint; -import android.os.Build; import android.os.Bundle; import android.view.LayoutInflater; import android.view.Menu; @@ -18,15 +15,10 @@ import androidx.navigation.NavDestination; import androidx.navigation.Navigation; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - -import java.util.ArrayList; -import java.util.List; - import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.Constant; import io.agora.api.example.common.model.ExampleBean; +import io.agora.api.example.utils.PermissonUtils; /** * @author cjw @@ -40,7 +32,6 @@ public class ReadyFragment extends BaseFragment { @Override public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); - setHasOptionsMenu(true); exampleBean = getArguments().getParcelable(Constant.DATA); } @@ -55,7 +46,7 @@ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup c public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); ActionBar actionBar = ((AppCompatActivity) getActivity()).getSupportActionBar(); - if(actionBar != null){ + if (actionBar != null) { actionBar.setTitle(exampleBean.getName()); actionBar.setHomeButtonEnabled(true); actionBar.setDisplayHomeAsUpEnabled(true); @@ -64,58 +55,33 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat tips = view.findViewById(R.id.tips); tips.setText(getString(exampleBean.getTipsId())); view.findViewById(R.id.next).setOnClickListener(v -> { - runOnPermissionGranted(new Runnable() { + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { @Override - public void run() { - NavController navController = Navigation.findNavController(requireView()); - navController.navigate(exampleBean.getActionId()); - navController.addOnDestinationChangedListener(new NavController.OnDestinationChangedListener() { - @Override - public void onDestinationChanged(@NonNull NavController controller, - @NonNull NavDestination destination, - @Nullable Bundle arguments) { - if (destination.getId() == R.id.Ready) { - controller.navigateUp(); - controller.removeOnDestinationChangedListener(this); - } - } - }); + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + navigationDest(); + } else { + showLongToast(getString(R.string.permission)); + } } }); }); } - @Override - public void onPrepareOptionsMenu(@NonNull Menu menu) { - super.onPrepareOptionsMenu(menu); - menu.setGroupVisible(R.id.main_setting_group, false); - } - - @SuppressLint("WrongConstant") - private void runOnPermissionGranted(@NonNull Runnable runnable) { - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Manifest.permission.READ_PHONE_STATE); - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } - - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - - if (AndPermission.hasPermissions(this, permissionArray)) { - runnable.run(); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - permissionArray - ).onGranted(permissions -> - { - // Permissions Granted - runnable.run(); - }).start(); + private void navigationDest() { + NavController navController = Navigation.findNavController(requireView()); + navController.navigate(exampleBean.getActionId()); + navController.addOnDestinationChangedListener(new NavController.OnDestinationChangedListener() { + @Override + public void onDestinationChanged(@NonNull NavController controller, + @NonNull NavDestination destination, + @Nullable Bundle arguments) { + if (destination.getId() == R.id.Ready) { + controller.navigateUp(); + controller.removeOnDestinationChangedListener(this); + } + } + }); } } + diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java index 68bddbbf3..cc64acdeb 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/common/BaseFragment.java @@ -1,6 +1,7 @@ package io.agora.api.example.common; import android.content.Context; +import android.content.pm.PackageManager; import android.os.Bundle; import android.os.Handler; import android.os.Looper; @@ -9,12 +10,18 @@ import android.widget.Toast; import androidx.activity.OnBackPressedCallback; +import androidx.activity.result.ActivityResultLauncher; +import androidx.activity.result.contract.ActivityResultContracts; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.Fragment; import androidx.navigation.Navigation; +import java.util.Map; + +import io.agora.api.example.utils.PermissonUtils; + /** * The type Base fragment. */ @@ -31,6 +38,29 @@ public void handleOnBackPressed() { onBackPressed(); } }; + private String[] permissionArray; + private PermissonUtils.PermissionResultCallback permissionResultCallback; + private ActivityResultLauncher permissionLauncher = registerForActivityResult( + new ActivityResultContracts.RequestMultiplePermissions(), + result -> { + if (permissionResultCallback != null) { + boolean allPermissionsGranted = true; + for (Map.Entry entry : result.entrySet()) { + if (!entry.getValue()) { + allPermissionsGranted = false; + break; + } + } + int[] grantResults = new int[permissionArray.length]; + for (int i = 0; i < permissionArray.length; i++) { + grantResults[i] = result.containsKey(permissionArray[i]) && result.get(permissionArray[i]) ? PackageManager.PERMISSION_GRANTED : PackageManager.PERMISSION_DENIED; + } + if (permissionResultCallback != null) { + permissionResultCallback.onPermissionsResult(allPermissionsGranted, permissionArray, grantResults); + } + } + } + ); @Override public void onCreate(@Nullable Bundle savedInstanceState) { @@ -171,4 +201,33 @@ protected void onBackPressed() { Navigation.findNavController(view).navigateUp(); } } + + /** + * @param permissions + * @param callback + */ + protected void checkOrRequestPermisson(String[] permissions, PermissonUtils.PermissionResultCallback callback) { + if (permissions != null && permissions.length > 0) { + permissionArray = permissions; + permissionResultCallback = callback; + if (PermissonUtils.checkPermissions(getContext(), permissionArray)) { + int[] grantResults = new int[permissionArray.length]; + for (int i = 0; i < permissionArray.length; i++) { + grantResults[i] = PackageManager.PERMISSION_GRANTED; + } + permissionResultCallback.onPermissionsResult(true, permissionArray, grantResults); + } else { + permissionLauncher.launch(permissionArray); + } + } + } + + /** + * request permisson with common permissions + * + * @param callback + */ + protected void checkOrRequestPermisson(PermissonUtils.PermissionResultCallback callback) { + checkOrRequestPermisson(PermissonUtils.getCommonPermission(), callback); + } } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java index e41594819..40ea64b86 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java @@ -19,9 +19,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.LinkedHashMap; import java.util.Map; @@ -32,6 +29,7 @@ import io.agora.api.example.common.Constant; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -66,23 +64,20 @@ public class PlayAudioFiles extends BaseFragment implements View.OnClickListener private AudioSeatManager audioSeatManager; @Override - public void onCreate(@Nullable Bundle savedInstanceState) - { + public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); handler = new Handler(); } @Nullable @Override - public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) - { + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_play_audio_files, container, false); return view; } @Override - public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) - { + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); join = view.findViewById(R.id.btn_join); @@ -130,7 +125,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat resetLayoutByJoin(); } - private void resetLayoutByJoin(){ + private void resetLayoutByJoin() { audioProfile.setEnabled(!joined); mixingStart.setClickable(joined); @@ -149,17 +144,14 @@ private void resetLayoutByJoin(){ } @Override - public void onActivityCreated(@Nullable Bundle savedInstanceState) - { + public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Check if the context is valid Context context = getContext(); - if (context == null) - { + if (context == null) { return; } - try - { + try { RtcEngineConfig config = new RtcEngineConfig(); /** * The context of Android Activity @@ -182,7 +174,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) */ config.mEventHandler = iRtcEngineEventHandler; config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); - config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode(); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); engine = RtcEngine.create(config); /** * This parameter is for reporting the usages of APIExample to agora background. @@ -203,9 +195,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) engine.setLocalAccessPoint(localAccessPointConfiguration); } preloadAudioEffect(); - } - catch (Exception e) - { + } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); } @@ -215,7 +205,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) * To ensure smooth communication, limit the size of the audio effect file. * We recommend using this method to preload the audio effect before calling the joinChannel method. */ - private void preloadAudioEffect(){ + private void preloadAudioEffect() { // Gets the global audio effect manager. audioEffectManager = engine.getAudioEffectManager(); // Preloads the audio effect (recommended). Note the file size, and preload the file before joining the channel. @@ -225,12 +215,10 @@ private void preloadAudioEffect(){ } @Override - public void onDestroy() - { + public void onDestroy() { super.onDestroy(); /**leaveChannel and Destroy the RtcEngine instance*/ - if(engine != null) - { + if (engine != null) { engine.leaveChannel(); } handler.post(RtcEngine::destroy); @@ -250,33 +238,23 @@ public void onNothingSelected(AdapterView parent) { } @Override - public void onClick(View v) - { - if (v == join) - { - if (!joined) - { + public void onClick(View v) { + if (v == join) { + if (!joined) { CommonUtil.hideInputBoard(getActivity(), et_channel); // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) - { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> - { - // Permissions Granted - joinChannel(channelId); - }).start(); - } - else - { + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); + } else { joined = false; /**After joining a channel, the user must call the leaveChannel method to end the * call before joining another channel. This method returns 0 if the user leaves the @@ -300,29 +278,19 @@ public void onClick(View v) resetLayoutByJoin(); audioSeatManager.downAllSeats(); } - } - else if (v == mixingStart) - { + } else if (v == mixingStart) { int ret = engine.startAudioMixing(Constant.MIX_FILE_PATH, false, -1, 0); Log.i(TAG, "startAudioMixing >> ret=" + ret); - } - else if (v == mixingResume) - { + } else if (v == mixingResume) { int ret = engine.resumeAudioMixing(); Log.i(TAG, "resumeAudioMixing >> ret=" + ret); - } - else if (v == mixingPause) - { + } else if (v == mixingPause) { int ret = engine.pauseAudioMixing(); Log.i(TAG, "pauseAudioMixing >> ret=" + ret); - } - else if (v == mixingStop) - { + } else if (v == mixingStop) { int ret = engine.stopAudioMixing(); Log.i(TAG, "stopAudioMixing >> ret=" + ret); - } - else if (v == effectStart) - { + } else if (v == effectStart) { /** Plays an audio effect file. * Returns * 0: Success. @@ -337,17 +305,14 @@ else if (v == effectStart) 100, // Sets the volume. The value ranges between 0 and 100. 100 is the original volume. true // Sets whether to publish the audio effect. ); - Log.i(TAG, "result playRet:"+ playRet); - } - else if(v == effectResume){ + Log.i(TAG, "result playRet:" + playRet); + } else if (v == effectResume) { int ret = engine.resumeEffect(EFFECT_SOUND_ID); Log.i(TAG, "resumeEffect >> ret=" + ret); - } - else if(v == effectPause){ + } else if (v == effectPause) { int ret = engine.pauseEffect(EFFECT_SOUND_ID); Log.i(TAG, "resumeEffect >> ret=" + ret); - } - else if(v == effectStop){ + } else if (v == effectStop) { int ret = engine.stopEffect(EFFECT_SOUND_ID); Log.i(TAG, "resumeEffect >> ret=" + ret); } @@ -355,9 +320,9 @@ else if(v == effectStop){ /** * @param channelId Specify the channel name that you want to join. - * Users that input the same channel name join the same channel.*/ - private void joinChannel(String channelId) - { + * Users that input the same channel name join the same channel. + */ + private void joinChannel(String channelId) { /**In the demo, the default is to enter as the anchor.*/ engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); engine.setAudioProfile( @@ -377,8 +342,7 @@ private void joinChannel(String channelId) option.autoSubscribeAudio = true; option.autoSubscribeVideo = true; int res = engine.joinChannel(ret, channelId, 0, option); - if (res != 0) - { + if (res != 0) { // Usually happens with invalid parameters // Error code description can be found at: // en: https://docs.agora.io/en/Voice/API%20Reference/java/classio_1_1agora_1_1rtc_1_1_i_rtc_engine_event_handler_1_1_error_code.html @@ -392,18 +356,18 @@ private void joinChannel(String channelId) }); } - /**IRtcEngineEventHandler is an abstract class providing default implementation. - * The SDK uses this class to report to the app on SDK runtime events.*/ - private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() - { + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { /** * Error code description can be found at: * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror */ @Override - public void onError(int err) - { + public void onError(int err) { Log.w(TAG, String.format("onError code %d message %s", err, RtcEngine.getErrorDescription(err))); } @@ -411,8 +375,7 @@ public void onError(int err) * @param stats With this callback, the application retrieves the channel information, * such as the call duration and statistics.*/ @Override - public void onLeaveChannel(RtcStats stats) - { + public void onLeaveChannel(RtcStats stats) { super.onLeaveChannel(stats); Log.i(TAG, String.format("local user %d leaveChannel!", myUid)); showLongToast(String.format("local user %d leaveChannel!", myUid)); @@ -425,17 +388,14 @@ public void onLeaveChannel(RtcStats stats) * @param uid User ID * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/ @Override - public void onJoinChannelSuccess(String channel, int uid, int elapsed) - { + public void onJoinChannelSuccess(String channel, int uid, int elapsed) { Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); myUid = uid; joined = true; - handler.post(new Runnable() - { + handler.post(new Runnable() { @Override - public void run() - { + public void run() { join.setEnabled(true); join.setText(getString(R.string.leave)); resetLayoutByJoin(); @@ -513,8 +473,7 @@ public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapse * @param elapsed Time delay (ms) from the local user calling joinChannel/setClientRole * until this callback is triggered.*/ @Override - public void onUserJoined(int uid, int elapsed) - { + public void onUserJoined(int uid, int elapsed) { super.onUserJoined(uid, elapsed); Log.i(TAG, "onUserJoined->" + uid); showLongToast(String.format("user %d joined!", uid)); @@ -532,8 +491,7 @@ public void onUserJoined(int uid, int elapsed) * USER_OFFLINE_BECOME_AUDIENCE(2): (Live broadcast only.) The client role switched from * the host to the audience.*/ @Override - public void onUserOffline(int uid, int reason) - { + public void onUserOffline(int uid, int reason) { Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason)); showLongToast(String.format("user %d offline! reason:%d", uid, reason)); runOnUIThread(() -> audioSeatManager.downSeat(uid)); @@ -552,29 +510,26 @@ public void onAudioMixingFinished() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { - if(seekBar.getId() == R.id.mixingPublishVolBar){ + if (seekBar.getId() == R.id.mixingPublishVolBar) { /** * Adjusts the volume of audio mixing for publishing (sending to other users). * @param volume: Audio mixing volume for publishing. The value ranges between 0 and 100 (default). */ engine.adjustAudioMixingPublishVolume(progress); - } - else if(seekBar.getId() == R.id.mixingPlayoutVolBar){ + } else if (seekBar.getId() == R.id.mixingPlayoutVolBar) { /** * Adjusts the volume of audio mixing for local playback. * @param volume: Audio mixing volume for local playback. The value ranges between 0 and 100 (default). */ engine.adjustAudioMixingPlayoutVolume(progress); - } - else if(seekBar.getId() == R.id.mixingVolBar){ + } else if (seekBar.getId() == R.id.mixingVolBar) { /** * Adjusts the volume of audio mixing. * Call this method when you are in a channel. * @param volume: Audio mixing volume. The value ranges between 0 and 100 (default). */ engine.adjustAudioMixingVolume(progress); - } - else if(seekBar.getId() == R.id.effectVolBar){ + } else if (seekBar.getId() == R.id.effectVolBar) { engine.setEffectsVolume(progress); } } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java index f097b2b49..b9c7b0001 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -30,6 +27,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -226,19 +224,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> - { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; /**After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java index 87309531b..1f6b9ebf8 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java @@ -18,14 +18,12 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -59,23 +57,20 @@ public class RhythmPlayer extends BaseFragment implements View.OnClickListener, private ChannelMediaOptions mChannelMediaOptions; @Override - public void onCreate(@Nullable Bundle savedInstanceState) - { + public void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); handler = new Handler(); } @Nullable @Override - public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) - { + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_rhythm_player, container, false); return view; } @Override - public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) - { + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); join = view.findViewById(R.id.btn_join); play = view.findViewById(R.id.play); @@ -91,17 +86,14 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat } @Override - public void onActivityCreated(@Nullable Bundle savedInstanceState) - { + public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Check if the context is valid Context context = getContext(); - if (context == null) - { + if (context == null) { return; } - try - { + try { RtcEngineConfig config = new RtcEngineConfig(); /** * The context of Android Activity @@ -124,7 +116,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) */ config.mEventHandler = iRtcEngineEventHandler; config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); - config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode(); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); engine = RtcEngine.create(config); /** * This parameter is for reporting the usages of APIExample to agora background. @@ -144,21 +136,17 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) // This api can only be used in the private media server scenario, otherwise some problems may occur. engine.setLocalAccessPoint(localAccessPointConfiguration); } - } - catch (Exception e) - { + } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); } } @Override - public void onDestroy() - { + public void onDestroy() { super.onDestroy(); /**leaveChannel and Destroy the RtcEngine instance*/ - if(engine != null) - { + if (engine != null) { engine.stopRhythmPlayer(); engine.leaveChannel(); } @@ -168,33 +156,23 @@ public void onDestroy() @Override - public void onClick(View v) - { - if (v.getId() == R.id.btn_join) - { - if (!joined) - { + public void onClick(View v) { + if (v.getId() == R.id.btn_join) { + if (!joined) { CommonUtil.hideInputBoard(getActivity(), et_channel); // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) - { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> - { - // Permissions Granted - joinChannel(channelId); - }).start(); - } - else - { + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); + } else { joined = false; /**After joining a channel, the user must call the leaveChannel method to end the * call before joining another channel. This method returns 0 if the user leaves the @@ -216,11 +194,10 @@ public void onClick(View v) engine.leaveChannel(); join.setText(getString(R.string.join)); } - } - else if(v.getId() == R.id.play){ - if(!isPlaying){ + } else if (v.getId() == R.id.play) { + if (!isPlaying) { int ret = engine.startRhythmPlayer(URL_DOWNBEAT, URL_UPBEAT, agoraRhythmPlayerConfig); - if(joined){ + if (joined) { mChannelMediaOptions.publishRhythmPlayerTrack = true; engine.updateChannelMediaOptions(mChannelMediaOptions); } @@ -229,10 +206,9 @@ else if(v.getId() == R.id.play){ beatPerMeasure.setEnabled(false); beatPerMinute.setEnabled(false); } - } - else if(v.getId() == R.id.stop){ + } else if (v.getId() == R.id.stop) { engine.stopRhythmPlayer(); - if(joined){ + if (joined) { mChannelMediaOptions.publishRhythmPlayerTrack = false; engine.updateChannelMediaOptions(mChannelMediaOptions); } @@ -244,9 +220,9 @@ else if(v.getId() == R.id.stop){ /** * @param channelId Specify the channel name that you want to join. - * Users that input the same channel name join the same channel.*/ - private void joinChannel(String channelId) - { + * Users that input the same channel name join the same channel. + */ + private void joinChannel(String channelId) { /**In the demo, the default is to enter as the anchor.*/ engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); engine.enableAudioVolumeIndication(1000, 3, true); @@ -283,18 +259,18 @@ private void joinChannel(String channelId) }); } - /**IRtcEngineEventHandler is an abstract class providing default implementation. - * The SDK uses this class to report to the app on SDK runtime events.*/ - private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() - { + /** + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + private final IRtcEngineEventHandler iRtcEngineEventHandler = new IRtcEngineEventHandler() { /** * Error code description can be found at: * en: https://api-ref.agora.io/en/voice-sdk/android/4.x/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror * cn: https://docs.agora.io/cn/voice-call-4.x/API%20Reference/java_ng/API/class_irtcengineeventhandler.html#callback_irtcengineeventhandler_onerror */ @Override - public void onError(int err) - { + public void onError(int err) { Log.w(TAG, String.format("onError code %d message %s", err, RtcEngine.getErrorDescription(err))); } @@ -302,8 +278,7 @@ public void onError(int err) * @param stats With this callback, the application retrieves the channel information, * such as the call duration and statistics.*/ @Override - public void onLeaveChannel(RtcStats stats) - { + public void onLeaveChannel(RtcStats stats) { super.onLeaveChannel(stats); Log.i(TAG, String.format("local user %d leaveChannel!", myUid)); showLongToast(String.format("local user %d leaveChannel!", myUid)); @@ -316,17 +291,14 @@ public void onLeaveChannel(RtcStats stats) * @param uid User ID * @param elapsed Time elapsed (ms) from the user calling joinChannel until this callback is triggered*/ @Override - public void onJoinChannelSuccess(String channel, int uid, int elapsed) - { + public void onJoinChannelSuccess(String channel, int uid, int elapsed) { Log.i(TAG, String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); showLongToast(String.format("onJoinChannelSuccess channel %s uid %d", channel, uid)); myUid = uid; joined = true; - handler.post(new Runnable() - { + handler.post(new Runnable() { @Override - public void run() - { + public void run() { join.setEnabled(true); join.setText(getString(R.string.leave)); play.setEnabled(true); @@ -380,8 +352,7 @@ public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapse * @param elapsed Time delay (ms) from the local user calling joinChannel/setClientRole * until this callback is triggered.*/ @Override - public void onUserJoined(int uid, int elapsed) - { + public void onUserJoined(int uid, int elapsed) { super.onUserJoined(uid, elapsed); Log.i(TAG, "onUserJoined->" + uid); showLongToast(String.format("user %d joined!", uid)); @@ -398,8 +369,7 @@ public void onUserJoined(int uid, int elapsed) * USER_OFFLINE_BECOME_AUDIENCE(2): (Live broadcast only.) The client role switched from * the host to the audience.*/ @Override - public void onUserOffline(int uid, int reason) - { + public void onUserOffline(int uid, int reason) { Log.i(TAG, String.format("user %d offline! reason:%d", uid, reason)); showLongToast(String.format("user %d offline! reason:%d", uid, reason)); } @@ -413,13 +383,12 @@ public void onActiveSpeaker(int uid) { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { - if(seekBar.getId() == R.id.beatsPerMeasure){ + if (seekBar.getId() == R.id.beatsPerMeasure) { agoraRhythmPlayerConfig.beatsPerMeasure = seekBar.getProgress() < 1 ? 1 : seekBar.getProgress(); - } - else if(seekBar.getId() == R.id.beatsPerMinute){ + } else if (seekBar.getId() == R.id.beatsPerMinute) { agoraRhythmPlayerConfig.beatsPerMinute = seekBar.getProgress() < 60 ? 60 : seekBar.getProgress(); } - Log.i(TAG, "agoraRhythmPlayerConfig beatsPerMeasure:"+ agoraRhythmPlayerConfig.beatsPerMeasure +", beatsPerMinute:" + agoraRhythmPlayerConfig.beatsPerMinute); + Log.i(TAG, "agoraRhythmPlayerConfig beatsPerMeasure:" + agoraRhythmPlayerConfig.beatsPerMeasure + ", beatsPerMinute:" + agoraRhythmPlayerConfig.beatsPerMinute); } @Override diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java index 8efd7619d..39780ae2c 100644 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java @@ -67,9 +67,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; @@ -81,6 +78,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -310,18 +308,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; resetControlLayoutByJoined(); diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java index 479854f9a..9d3be1823 100755 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.nio.ByteBuffer; import io.agora.api.example.MainApplication; @@ -28,6 +25,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -183,15 +181,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission(Permission.Group.STORAGE, Permission.Group.MICROPHONE).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java index bddfecc96..aa49bb8ff 100755 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; @@ -27,6 +24,7 @@ import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.AudioFileReader; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -140,7 +138,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { */ config.mEventHandler = iRtcEngineEventHandler; config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); - config.mAreaCode = ((MainApplication)getActivity().getApplication()).getGlobalSettings().getAreaCode(); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); engine = (RtcEngineEx) RtcEngine.create(config); /** * This parameter is for reporting the usages of APIExample to agora background. @@ -162,7 +160,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { } audioPushingHelper = new AudioFileReader(requireContext(), (buffer, timestamp) -> { - if(joined && engine != null && customAudioTrack != -1){ + if (joined && engine != null && customAudioTrack != -1) { int ret = engine.pushExternalAudioFrame(buffer, timestamp, AudioFileReader.SAMPLE_RATE, AudioFileReader.SAMPLE_NUM_OF_CHANNEL, Constants.BytesPerSample.TWO_BYTES_PER_SAMPLE, customAudioTrack); Log.i(TAG, "pushExternalAudioFrame times:" + (++pushTimes) + ", ret=" + ret); } @@ -176,11 +174,11 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { @Override public void onDestroy() { super.onDestroy(); - if(customAudioTrack != -1){ + if (customAudioTrack != -1) { engine.destroyCustomAudioTrack(customAudioTrack); customAudioTrack = -1; } - if(audioPushingHelper != null){ + if (audioPushingHelper != null) { audioPushingHelper.stop(); } /**leaveChannel and Destroy the RtcEngine instance*/ @@ -214,19 +212,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> - { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; /**After joining a channel, the user must call the leaveChannel method to end the @@ -252,7 +246,7 @@ public void onClick(View v) { pcm.setEnabled(false); pcm.setChecked(false); mic.setChecked(true); - if(audioPushingHelper != null){ + if (audioPushingHelper != null) { audioPushingHelper.stop(); } audioSeatManager.downAllSeats(); @@ -343,7 +337,7 @@ public void run() { pcm.setEnabled(true); join.setEnabled(true); join.setText(getString(R.string.leave)); - if(audioPushingHelper != null){ + if (audioPushingHelper != null) { pushTimes = 0; audioPushingHelper.start(); } diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java index f42034c23..d55349b87 100755 --- a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java @@ -34,9 +34,6 @@ import androidx.appcompat.app.AlertDialog; import androidx.core.app.NotificationManagerCompat; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.Map; @@ -48,6 +45,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -374,22 +372,17 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - audioProfileInput.setEnabled(false); - channelProfileInput.setEnabled(false); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - audioProfileInput.setEnabled(false); - channelProfileInput.setEnabled(false); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + audioProfileInput.setEnabled(false); + channelProfileInput.setEnabled(false); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java new file mode 100644 index 000000000..3c647cbbc --- /dev/null +++ b/Android/APIExample-Audio/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java @@ -0,0 +1,51 @@ +package io.agora.api.example.utils; + +import android.Manifest; +import android.content.Context; +import android.content.pm.PackageManager; +import android.os.Build; + +import androidx.core.content.ContextCompat; + +import java.util.ArrayList; +import java.util.List; + +public class PermissonUtils { + private static final String TAG = "PermissonUtils"; + + public static String[] getCommonPermission() { + List permissionList = new ArrayList<>(); + permissionList.add(Manifest.permission.READ_EXTERNAL_STORAGE); + permissionList.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); + permissionList.add(Manifest.permission.RECORD_AUDIO); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + permissionList.add(Manifest.permission.READ_PHONE_STATE); + permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); + } + String[] permissionArray = new String[permissionList.size()]; + permissionList.toArray(permissionArray); + return permissionArray; + } + + //check array permission is granted + public static boolean checkPermissions(Context context, String[] permissions) { + for (String permission : permissions) { + if (ContextCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) { + return false; + } + } + return true; + } + + //check single permission is granted + public static boolean checkPermission(Context context, String permission) { + return ContextCompat.checkSelfPermission(context, permission) == PackageManager.PERMISSION_GRANTED; + } + + + // Callback interface for permission results + public interface PermissionResultCallback { + void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults); + } + +} diff --git a/Android/APIExample-Audio/app/src/main/res/values-zh/strings.xml b/Android/APIExample-Audio/app/src/main/res/values-zh/strings.xml index 290c739b0..e832b3a33 100644 --- a/Android/APIExample-Audio/app/src/main/res/values-zh/strings.xml +++ b/Android/APIExample-Audio/app/src/main/res/values-zh/strings.xml @@ -32,7 +32,7 @@ 鑷畾涔夐煶棰戞覆鏌 鍘熷闊抽鏁版嵁 绌洪棿闊虫晥 - + 璇锋巿浜堟潈闄 姝ょず渚嬫紨绀哄湪浣跨敤RTC閫氳瘽涓煶棰戣矾鐢卞绗笁鏂规挱鏀惧櫒鐨勫奖鍝嶃 姝ょず渚嬫紨绀轰簡濡備綍浣跨敤SDK鍔犲叆棰戦亾杩涜绾闊抽氳瘽鐨勫姛鑳姐 姝ょず渚嬫紨绀轰簡濡備綍浣跨敤SDK鍔犲叆甯oken鐨勯閬撹繘琛岀函璇煶閫氳瘽鐨勫姛鑳姐 diff --git a/Android/APIExample-Audio/app/src/main/res/values/strings.xml b/Android/APIExample-Audio/app/src/main/res/values/strings.xml index bc36eba2c..ca378d453 100644 --- a/Android/APIExample-Audio/app/src/main/res/values/strings.xml +++ b/Android/APIExample-Audio/app/src/main/res/values/strings.xml @@ -54,7 +54,7 @@ Audio Effects Vol Please insert headphones to experience the spatial audio effect This example shows the behavior of audio router while communicating with rtc. - + Please granted the request permissions Rhythm Player Beats per Measure diff --git a/Android/APIExample-Audio/cloud_build.sh b/Android/APIExample-Audio/cloud_build.sh index 812a18d85..c472c0450 100755 --- a/Android/APIExample-Audio/cloud_build.sh +++ b/Android/APIExample-Audio/cloud_build.sh @@ -15,7 +15,24 @@ rm -f app/src/main/res/values/string_configs.xml-e ./gradlew clean || exit 1 ./gradlew :app:assembleRelease || exit 1 + +SDK_VERSION="" +if [ "$1" = "false" ]; then + sdk_version_file="./gradle.properties" + if [[ -f "$sdk_version_file" ]]; then + rtc_sdk_version=$(grep "rtc_sdk_version" "$sdk_version_file" | cut -d'=' -f2) + if [[ -n "$rtc_sdk_version" ]]; then + SDK_VERSION=$(echo "$rtc_sdk_version" | sed 's/^[ \t]*//;s/[ \t]*$//') + else + echo "rtc_sdk_version value not found" + fi +else + echo "file not found: $sdk_version_file" +fi +else + SDK_VERSION=$(echo $sdk_url | cut -d "/" -f 5) +fi + if [ "$WORKSPACE" != "" ]; then -SDK_VERSION=$(echo $sdk_url | cut -d "/" -f 5) cp app/build/outputs/apk/release/*.apk $WORKSPACE/APIExample-Audio_${BUILD_NUMBER}_${SDK_VERSION}_$(date "+%Y%m%d%H%M%S").apk fi \ No newline at end of file diff --git a/Android/APIExample-Audio/gradle.properties b/Android/APIExample-Audio/gradle.properties index c0ae9b619..ae0306701 100644 --- a/Android/APIExample-Audio/gradle.properties +++ b/Android/APIExample-Audio/gradle.properties @@ -19,4 +19,6 @@ android.useAndroidX=true android.enableJetifier=true # read enable simple filter section on README first before set this flag to TRUE -simpleFilter = false \ No newline at end of file +simpleFilter = false + +rtc_sdk_version = 4.5.0 \ No newline at end of file diff --git a/Android/APIExample-Compose/app/build.gradle.kts b/Android/APIExample-Compose/app/build.gradle.kts index fb0485a35..6db4bff1a 100644 --- a/Android/APIExample-Compose/app/build.gradle.kts +++ b/Android/APIExample-Compose/app/build.gradle.kts @@ -8,6 +8,11 @@ plugins { alias(libs.plugins.jetbrainsKotlinAndroid) } +val sdkVersionFile = file("../gradle.properties") +val properties = Properties() +properties.load(sdkVersionFile.inputStream()) +val agoraSdkVersion = properties.getProperty("rtc_sdk_version") +println("${rootProject.project.name} agoraSdkVersion: ${agoraSdkVersion}") val localSdkPath = "${rootProject.projectDir.absolutePath}/../../sdk" android { @@ -85,7 +90,7 @@ android { outputs.all { if (this is ApkVariantOutputImpl) { this.outputFileName = - "${rootProject.name}_${libs.versions.agoraSdk.get()}_${ + "${rootProject.name}_${agoraSdkVersion}_${ SimpleDateFormat("yyyyMMddHHmm").format( Date() ) @@ -129,8 +134,10 @@ dependencies { if (File(localSdkPath).exists()) { implementation(fileTree(localSdkPath).include("*.jar", "*.aar")) } else { - implementation(libs.agora.full.sdk) - implementation(libs.agora.full.screen.sharing) + implementation("io.agora.rtc:full-sdk:${agoraSdkVersion}") + implementation("io.agora.rtc:full-screen-sharing:${agoraSdkVersion}") +// implementation(libs.agora.full.sdk) +// implementation(libs.agora.full.screen.sharing) } } \ No newline at end of file diff --git a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/CustomAudioSource.kt b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/CustomAudioSource.kt index e9c4e949e..1b11b4605 100644 --- a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/CustomAudioSource.kt +++ b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/CustomAudioSource.kt @@ -143,6 +143,7 @@ fun CustomAudioSource() { Toast.makeText(context, "Permission Granted", Toast.LENGTH_LONG).show() option.channelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING option.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER + option.publishMicrophoneTrack = false TokenUtils.gen(channelName, 0){ rtcEngine.joinChannel(it, channelName, 0, option) } diff --git a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/MediaRecorder.kt b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/MediaRecorder.kt index c8514fe42..83d41992b 100644 --- a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/MediaRecorder.kt +++ b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/MediaRecorder.kt @@ -6,6 +6,7 @@ import android.view.View import android.widget.Toast import androidx.activity.compose.rememberLauncherForActivityResult import androidx.activity.result.contract.ActivityResultContracts +import androidx.compose.foundation.layout.BoxScope import androidx.compose.foundation.layout.Column import androidx.compose.foundation.layout.padding import androidx.compose.material3.AlertDialog @@ -19,6 +20,7 @@ import androidx.compose.runtime.mutableIntStateOf import androidx.compose.runtime.mutableStateMapOf import androidx.compose.runtime.mutableStateOf import androidx.compose.runtime.remember +import androidx.compose.runtime.rememberCoroutineScope import androidx.compose.runtime.saveable.rememberSaveable import androidx.compose.runtime.setValue import androidx.compose.ui.Alignment @@ -29,6 +31,7 @@ import androidx.compose.ui.platform.LocalSoftwareKeyboardController import androidx.compose.ui.res.stringResource import androidx.compose.ui.tooling.preview.Preview import androidx.compose.ui.unit.dp +import androidx.core.content.ContentProviderCompat.requireContext import io.agora.api.example.compose.BuildConfig import io.agora.api.example.compose.R import io.agora.api.example.compose.data.SettingPreferences @@ -47,6 +50,7 @@ import io.agora.rtc2.RtcEngine import io.agora.rtc2.RtcEngineConfig import io.agora.rtc2.video.VideoCanvas import io.agora.rtc2.video.VideoEncoderConfiguration +import kotlinx.coroutines.launch import java.io.File @Composable @@ -173,7 +177,7 @@ fun MediaRecorder() { Toast.makeText(context, "Permission Denied", Toast.LENGTH_LONG).show() } } - + val coroutineScope = rememberCoroutineScope() MediaRecorderView( channelName = channelName, isJoined = isJoined, @@ -200,58 +204,81 @@ fun MediaRecorder() { rtcEngine.setupRemoteVideo(VideoCanvas(view, Constants.RENDER_MODE_HIDDEN, id)) } }, - onRecorderClick = { id, isRecording -> - if (isRecording) { - val storagePath: String = - context.externalCacheDir?.absolutePath + File.separator + "media_recorder_" + channelName + "_" + id + ".mp4" - val recorder = rtcEngine.createMediaRecorder(RecorderStreamInfo(channelName, id)) - recorder.setMediaRecorderObserver(object : IMediaRecorderCallback { - override fun onRecorderStateChanged( - channelId: String?, - uid: Int, - state: Int, - reason: Int - ) { - Log.d( - "MediaRecorder", - "LocalMediaRecorder -- onRecorderStateChanged channelId=$channelId, uid=$uid, state=$state, reason=$reason" - ) - if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) { - recorders.remove(uid) - recoderResult = storagePath - } - } + overlay = { _, id -> + var isRecording by remember { mutableStateOf(false) } + Button( + modifier = Modifier + .padding(8.dp) + .align(Alignment.BottomEnd), + onClick = { + isRecording = !isRecording + if (isRecording) { + val storagePath: String = + context.externalCacheDir?.absolutePath + File.separator + "media_recorder_" + channelName + "_" + id + ".mp4" + val recorder = + rtcEngine.createMediaRecorder(RecorderStreamInfo(channelName, id, 0)) + recorder.setMediaRecorderObserver(object : IMediaRecorderCallback { + override fun onRecorderStateChanged( + channelId: String?, + uid: Int, + state: Int, + reason: Int + ) { + Log.d( + "MediaRecorder", + "LocalMediaRecorder -- onRecorderStateChanged channelId=$channelId, uid=$uid, state=$state, reason=$reason" + ) + if (state == AgoraMediaRecorder.RECORDER_STATE_STOP) { + recorders.remove(uid) + recoderResult = storagePath + } else if (state == AgoraMediaRecorder.RECORDER_STATE_ERROR && reason == AgoraMediaRecorder.RECORDER_REASON_CONFIG_CHANGED) { + coroutineScope.launch { + isRecording = false + recorders[id]?.let { + it.stopRecording() + it.release() + } + } + } + } - override fun onRecorderInfoUpdated( - channelId: String?, - uid: Int, - info: RecorderInfo? - ) { - info ?: return - Log.d( - "MediaRecorder", - "LocalMediaRecorder -- onRecorderInfoUpdated channelId=" - + channelId + ", uid=" + uid + ", fileName=" + info.fileName - + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize + override fun onRecorderInfoUpdated( + channelId: String?, + uid: Int, + info: RecorderInfo? + ) { + info ?: return + Log.d( + "MediaRecorder", + "LocalMediaRecorder -- onRecorderInfoUpdated channelId=" + + channelId + ", uid=" + uid + ", fileName=" + info.fileName + + ", durationMs=" + info.durationMs + ", fileSize=" + info.fileSize + ) + } + }) + recorder.startRecording( + AgoraMediaRecorder.MediaRecorderConfiguration( + storagePath, + AgoraMediaRecorder.CONTAINER_MP4, + AgoraMediaRecorder.STREAM_TYPE_BOTH, + 120000, + 0 + ) ) + recorders[id] = recorder + } else { + recorders[id]?.let { + it.stopRecording() + it.release() + } } - }) - recorder.startRecording( - AgoraMediaRecorder.MediaRecorderConfiguration( - storagePath, - AgoraMediaRecorder.CONTAINER_MP4, - AgoraMediaRecorder.STREAM_TYPE_BOTH, - 120000, - 0 + { + Text( + text = if (!isRecording) stringResource(id = R.string.start_recording) else stringResource( + id = R.string.stop_recording ) ) - recorders[id] = recorder - } else { - recorders[id]?.let { - it.stopRecording() - it.release() - } } }, onCameraSwitchClick = { @@ -284,7 +311,7 @@ private fun MediaRecorderView( videoIdList: List, setupVideo: (View, Int, Boolean) -> Unit, statsMap: Map = emptyMap(), - onRecorderClick: (id: Int, isRecording: Boolean) -> Unit = { _, _ -> }, + overlay: @Composable BoxScope.(index: Int, id: Int) -> Unit? = { _, _ -> }, onCameraSwitchClick: () -> Unit = { } ) { Column { @@ -292,22 +319,7 @@ private fun MediaRecorderView( modifier = Modifier.weight(1.0f), videoIdList = videoIdList, setupVideo = setupVideo, - overlay = { _, id -> - var isRecording by rememberSaveable { mutableStateOf(false) } - Button( - modifier = Modifier - .padding(8.dp) - .align(Alignment.BottomEnd), - onClick = { - isRecording = !isRecording - onRecorderClick(id, isRecording) - }) - { - Text(text = if (!isRecording) stringResource(id = R.string.start_recording) else stringResource( - id = R.string.stop_recording - )) - } - } + overlay = overlay ) Button( modifier = Modifier diff --git a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/PlayAudioFiles.kt b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/PlayAudioFiles.kt index a567c743e..3b72f57fa 100644 --- a/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/PlayAudioFiles.kt +++ b/Android/APIExample-Compose/app/src/main/java/io/agora/api/example/compose/samples/PlayAudioFiles.kt @@ -201,7 +201,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.startAudioMixing( "/assets/music_1.m4a", false, @@ -218,7 +218,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.resumeAudioMixing() } ) @@ -230,7 +230,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.pauseAudioMixing() } ) @@ -242,7 +242,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.stopAudioMixing() } ) @@ -279,7 +279,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.playEffect( EFFECT_SOUND_ID, // The sound ID of the audio effect file to be played. EFFECT_FILE_PATH, // The file path of the audio effect file. @@ -297,7 +297,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.resumeEffect(EFFECT_SOUND_ID) }) Text(text = stringResource(id = R.string.pause), @@ -307,7 +307,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable (isJoined) { rtcEngine?.pauseEffect(EFFECT_SOUND_ID) }) Text(text = stringResource(id = R.string.stop), @@ -317,7 +317,7 @@ private fun PlayAudioFilesView( modifier = Modifier .weight(1.0f) .padding(16.dp, 8.dp) - .clickable { + .clickable(isJoined) { rtcEngine?.stopEffect(EFFECT_SOUND_ID) }) } diff --git a/Android/APIExample-Compose/cloud_build.sh b/Android/APIExample-Compose/cloud_build.sh index 66e456cb4..206416058 100755 --- a/Android/APIExample-Compose/cloud_build.sh +++ b/Android/APIExample-Compose/cloud_build.sh @@ -24,7 +24,24 @@ fi ./gradlew clean || exit 1 ./gradlew :app:assembleRelease || exit 1 + +SDK_VERSION="" +if [ "$1" = "false" ]; then + sdk_version_file="./gradle.properties" + if [[ -f "$sdk_version_file" ]]; then + rtc_sdk_version=$(grep "rtc_sdk_version" "$sdk_version_file" | cut -d'=' -f2) + if [[ -n "$rtc_sdk_version" ]]; then + SDK_VERSION=$(echo "$rtc_sdk_version" | sed 's/^[ \t]*//;s/[ \t]*$//') + else + echo "rtc_sdk_version value not found" + fi +else + echo "file not found: $sdk_version_file" +fi +else + SDK_VERSION=$(echo $sdk_url | cut -d "/" -f 5) +fi + if [ "$WORKSPACE" != "" ]; then -SDK_VERSION=$(echo $sdk_url | cut -d "/" -f 5) cp app/build/outputs/apk/release/*.apk $WORKSPACE/APIExample-Compose_${BUILD_NUMBER}_${SDK_VERSION}_$(date "+%Y%m%d%H%M%S").apk fi \ No newline at end of file diff --git a/Android/APIExample-Compose/gradle.properties b/Android/APIExample-Compose/gradle.properties index 20e2a0152..58fe9e1a3 100644 --- a/Android/APIExample-Compose/gradle.properties +++ b/Android/APIExample-Compose/gradle.properties @@ -20,4 +20,6 @@ kotlin.code.style=official # Enables namespacing of each library's R class so that its R class includes only the # resources declared in the library itself and none from the library's dependencies, # thereby reducing the size of the R class for that library -android.nonTransitiveRClass=true \ No newline at end of file +android.nonTransitiveRClass=true + +rtc_sdk_version = 4.5.0 \ No newline at end of file diff --git a/Android/APIExample-Compose/gradle/libs.versions.toml b/Android/APIExample-Compose/gradle/libs.versions.toml index 5e7d7eb30..0a90ebb75 100644 --- a/Android/APIExample-Compose/gradle/libs.versions.toml +++ b/Android/APIExample-Compose/gradle/libs.versions.toml @@ -12,7 +12,7 @@ composeBom = "2023.08.00" loggingInterceptor = "4.10.0" materialIconsExtended = "1.6.0" navigationCompose = "2.7.7" -agoraSdk = "4.4.1" +#agoraSdk = "4.5.0" okhttp = "4.10.0" [libraries] @@ -20,8 +20,8 @@ androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = androidx-datastore = { module = "androidx.datastore:datastore", version.ref = "datastore" } androidx-datastore-preferences = { module = "androidx.datastore:datastore-preferences", version.ref = "datastore" } androidx-material-icons-extended = { module = "androidx.compose.material:material-icons-extended", version.ref = "materialIconsExtended" } -agora-full-sdk = { module = "io.agora.rtc:full-sdk", version.ref = "agoraSdk" } -agora-full-screen-sharing = { module = "io.agora.rtc:full-screen-sharing", version.ref = "agoraSdk" } +#agora-full-sdk = { module = "io.agora.rtc:full-sdk", version.ref = "agoraSdk" } +#agora-full-screen-sharing = { module = "io.agora.rtc:full-screen-sharing", version.ref = "agoraSdk" } junit = { group = "junit", name = "junit", version.ref = "junit" } androidx-junit = { group = "androidx.test.ext", name = "junit", version.ref = "junitVersion" } androidx-espresso-core = { group = "androidx.test.espresso", name = "espresso-core", version.ref = "espressoCore" } diff --git a/Android/APIExample/app/build.gradle b/Android/APIExample/app/build.gradle index efbde21a0..dc855a8a8 100644 --- a/Android/APIExample/app/build.gradle +++ b/Android/APIExample/app/build.gradle @@ -3,7 +3,14 @@ apply plugin: 'kotlin-android' apply from: "${rootDir.absolutePath}/git-hooks.gradle" apply from: 'vendors.gradle' -def agoraSdkVersion = "4.4.1" + +def sdkVersionFile = file("../gradle.properties") +def properties = new Properties() +sdkVersionFile.withInputStream { stream -> + properties.load(stream) +} +def agoraSdkVersion = properties.getProperty("rtc_sdk_version") +println("${rootProject.project.name} agoraSdkVersion: ${agoraSdkVersion}") def localSdkPath= "${rootProject.projectDir.absolutePath}/../../sdk" @@ -134,7 +141,6 @@ dependencies { androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' implementation 'io.github.luizgrp.sectionedrecyclerviewadapter:sectionedrecyclerviewadapter:1.2.0' - implementation 'com.yanzhenjie:permission:2.0.3' implementation 'de.javagl:obj:0.2.1' implementation "com.squareup.okhttp3:okhttp:4.10.0" diff --git a/Android/APIExample/app/src/main/AndroidManifest.xml b/Android/APIExample/app/src/main/AndroidManifest.xml index 0360c65c0..285bf475b 100644 --- a/Android/APIExample/app/src/main/AndroidManifest.xml +++ b/Android/APIExample/app/src/main/AndroidManifest.xml @@ -3,6 +3,8 @@ xmlns:tools="http://schemas.android.com/tools" package="io.agora.api.example"> + + @@ -58,6 +60,11 @@ + + + \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/ReadyFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/ReadyFragment.java index 7ed7dc652..e7b86f429 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/ReadyFragment.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/ReadyFragment.java @@ -1,8 +1,6 @@ package io.agora.api.example; -import android.Manifest; -import android.annotation.SuppressLint; -import android.os.Build; + import android.os.Bundle; import android.view.LayoutInflater; import android.view.View; @@ -17,15 +15,10 @@ import androidx.navigation.NavDestination; import androidx.navigation.Navigation; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - -import java.util.ArrayList; -import java.util.List; - import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.Constant; import io.agora.api.example.common.model.ExampleBean; +import io.agora.api.example.utils.PermissonUtils; /** * @author cjw @@ -63,52 +56,34 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat tips = view.findViewById(R.id.tips); tips.setText(getString(exampleBean.getTipsId())); view.findViewById(R.id.next).setOnClickListener(v -> { - runOnPermissionGranted(new Runnable() { + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { @Override - public void run() { - NavController navController = Navigation.findNavController(requireView()); - navController.navigate(exampleBean.getActionId()); - navController.addOnDestinationChangedListener(new NavController.OnDestinationChangedListener() { - @Override - public void onDestinationChanged(@NonNull NavController controller, - @NonNull NavDestination destination, - @Nullable Bundle arguments) { - if (destination.getId() == R.id.Ready) { - controller.navigateUp(); - controller.removeOnDestinationChangedListener(this); - } - } - }); + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + navigationDest(); + } else { + showLongToast(getString(R.string.permission)); + } } }); }); } - @SuppressLint("WrongConstant") - private void runOnPermissionGranted(@NonNull Runnable runnable) { - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - permissionList.add(Permission.CAMERA); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Permission.READ_PHONE_STATE); - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } + private void navigationDest() { + NavController navController = Navigation.findNavController(requireView()); + navController.navigate(exampleBean.getActionId()); + navController.addOnDestinationChangedListener(new NavController.OnDestinationChangedListener() { + @Override + public void onDestinationChanged(@NonNull NavController controller, + @NonNull NavDestination destination, + @Nullable Bundle arguments) { + if (destination.getId() == R.id.Ready) { + controller.navigateUp(); + controller.removeOnDestinationChangedListener(this); + } + } + }); + } - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - if (AndPermission.hasPermissions(this, permissionArray)) { - runnable.run(); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - permissionArray - ).onGranted(permissions -> { - // Permissions Granted - runnable.run(); - }).start(); - } } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/bean/MpOptions.java b/Android/APIExample/app/src/main/java/io/agora/api/example/bean/MpOptions.java new file mode 100644 index 000000000..2796fdb85 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/bean/MpOptions.java @@ -0,0 +1,101 @@ +package io.agora.api.example.bean; + +import android.util.Log; + +import org.json.JSONObject; + +import java.lang.reflect.Field; + +public class MpOptions { + public static final String TAG = "MpOptions"; + + /** + * key: + * makeup_options + *

+ * value: + * bool enable_mu; (true: 缇庡寮; false: 缇庡鍏) + * int browStyle; (鐪夋瘺绫诲瀷 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int browColor; (鐪夋瘺棰滆壊 0 - 2: 鏃犮侀粦鑹层佹鑹) + * float browStrength; (鐪夋瘺寮哄害 0.0 - 1.0) + * int lashStyle; (鐪肩潾姣涚被鍨 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int lashColor; (鐪肩潾姣涢鑹 0 - 2: 鏃犮侀粦鑹层佹鑹) + * float lashStrength; (鐪肩潾姣涘己搴 0.0 - 1.0) + * int shadowStyle; (鐪煎奖绫诲瀷 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int shadowColor; 鏆傛棤 + * float shadowStrength; (鐪煎奖寮哄害 0.0 - 1.0) + * int pupilStyle; (鐬冲瓟绫诲瀷 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int pupilColor; 鏆傛棤 + * float pupilStrength; (鐬冲瓟寮哄害 0.0 - 1.0) + * int blushStyle; (鑵孩绫诲瀷 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int blushColor; (鑵孩棰滆壊 0 - 5: 鏃犮1鍙疯壊銆2鍙疯壊銆3鍙疯壊銆4鍙疯壊銆5鍙疯壊) + * float blushStrength; (鑵孩寮哄害 0.0 - 1.0) + * int lipStyle; (鍞囧僵绫诲瀷 0 - 2: 鍏抽棴銆佺被鍨1銆佺被鍨2) + * int lipColor; (鍞囧僵棰滆壊 0 - 5: 鏃犮1鍙疯壊銆2鍙疯壊銆3鍙疯壊銆4鍙疯壊銆5鍙疯壊) + * float lipStrength; (鍞囧僵寮哄害 0.0 - 1.0) + **/ + + public boolean enable_mu; + + public int browStyle; + public int browColor; + public float browStrength; + + public int lashStyle; + public int lashColor; + public float lashStrength; + + public int shadowStyle; + public float shadowStrength; + + public int pupilStyle; + public float pupilStrength; + + public int blushStyle; + public int blushColor; + public float blushStrength; + + public int lipStyle; + public int lipColor; + public float lipStrength; + + public MpOptions() { + this.enable_mu = false; + this.browStyle = 0; + this.browColor = 0; + this.browStrength = 0.5f; + this.lashStyle = 0; + this.lashColor = 0; + this.lashStrength = 0.5f; + this.shadowStyle = 0; + this.shadowStrength = 0.5f; + this.pupilStyle = 0; + this.pupilStrength = 0.5f; + this.blushStyle = 0; + this.blushColor = 0; + this.blushStrength = 0.5f; + this.lipStyle = 0; + this.lipColor = 0; + this.lipStrength = 0.5f; + } + + public String toJson() { + String json = "{}"; + JSONObject jsonObject = new JSONObject(); + + try { + Field[] fields = MpOptions.class.getDeclaredFields(); + for (Field field : fields) { + field.setAccessible(true); + String name = field.getName(); + Object value = field.get(this); + jsonObject.put(name, value); + } + } catch (Exception e) { + Log.e(TAG, "toJson: error:" + e.getMessage()); + } + json = jsonObject.toString(); + Log.d(TAG, "toJson: " + json); + return json; + } +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java index 68bddbbf3..6d0c899ed 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseFragment.java @@ -1,6 +1,7 @@ package io.agora.api.example.common; import android.content.Context; +import android.content.pm.PackageManager; import android.os.Bundle; import android.os.Handler; import android.os.Looper; @@ -9,12 +10,18 @@ import android.widget.Toast; import androidx.activity.OnBackPressedCallback; +import androidx.activity.result.ActivityResultLauncher; +import androidx.activity.result.contract.ActivityResultContracts; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; import androidx.fragment.app.Fragment; import androidx.navigation.Navigation; +import java.util.Map; + +import io.agora.api.example.utils.PermissonUtils; + /** * The type Base fragment. */ @@ -31,6 +38,29 @@ public void handleOnBackPressed() { onBackPressed(); } }; + private String[] permissionArray; + private PermissonUtils.PermissionResultCallback permissionResultCallback; + private ActivityResultLauncher permissionLauncher = registerForActivityResult( + new ActivityResultContracts.RequestMultiplePermissions(), + result -> { + if (permissionResultCallback != null) { + boolean allPermissionsGranted = true; + for (Map.Entry entry : result.entrySet()) { + if (!entry.getValue()) { + allPermissionsGranted = false; + break; + } + } + int[] grantResults = new int[permissionArray.length]; + for (int i = 0; i < permissionArray.length; i++) { + grantResults[i] = result.containsKey(permissionArray[i]) && result.get(permissionArray[i]) ? PackageManager.PERMISSION_GRANTED : PackageManager.PERMISSION_DENIED; + } + if (permissionResultCallback != null) { + permissionResultCallback.onPermissionsResult(allPermissionsGranted, permissionArray, grantResults); + } + } + } + ); @Override public void onCreate(@Nullable Bundle savedInstanceState) { @@ -171,4 +201,34 @@ protected void onBackPressed() { Navigation.findNavController(view).navigateUp(); } } + + /** + * @param permissions + * @param callback + */ + protected void checkOrRequestPermisson(String[] permissions, PermissonUtils.PermissionResultCallback callback) { + if (permissions != null && permissions.length > 0) { + permissionArray = permissions; + permissionResultCallback = callback; + if (PermissonUtils.checkPermissions(getContext(), permissionArray)) { + int[] grantResults = new int[permissionArray.length]; + for (int i = 0; i < permissionArray.length; i++) { + grantResults[i] = PackageManager.PERMISSION_GRANTED; + } + permissionResultCallback.onPermissionsResult(true, permissionArray, grantResults); + } else { + permissionLauncher.launch(permissionArray); + } + } + } + + /** + * request permisson with common permissions + * + * @param callback + */ + protected void checkOrRequestPermisson(PermissonUtils.PermissionResultCallback callback) { + checkOrRequestPermisson(PermissonUtils.getCommonPermission(), callback); + } + } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseVbFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseVbFragment.java new file mode 100644 index 000000000..efba51360 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/common/BaseVbFragment.java @@ -0,0 +1,112 @@ +package io.agora.api.example.common; + +import android.content.Context; +import android.os.Bundle; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; + +import androidx.annotation.NonNull; +import androidx.annotation.Nullable; +import androidx.viewbinding.ViewBinding; + +import io.agora.api.example.MainApplication; +import io.agora.api.example.R; +import io.agora.rtc2.Constants; +import io.agora.rtc2.IRtcEngineEventHandler; +import io.agora.rtc2.RtcEngine; +import io.agora.rtc2.RtcEngineConfig; +import io.agora.rtc2.proxy.LocalAccessPointConfiguration; + +public abstract class BaseVbFragment extends BaseFragment { + + protected T binding; + + protected abstract T getViewBinding(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState); + + @Nullable + @Override + public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { + binding = getViewBinding(inflater, container, savedInstanceState); + return binding.getRoot(); + } + + @Override + public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { + super.onViewCreated(view, savedInstanceState); + initView(); + initData(); + } + + protected void initView() { + + } + + protected void initData() { + + } + + protected RtcEngine initRtcEngine(IRtcEngineEventHandler engineEventHandler) { + // Check if the context is valid + Context context = getContext(); + if (context == null) { + return null; + } + RtcEngine engine = null; + try { + RtcEngineConfig config = new RtcEngineConfig(); + /* + * The context of Android Activity + */ + config.mContext = context.getApplicationContext(); + /* + * The App ID issued to you by Agora. See How to get the App ID + */ + config.mAppId = getString(R.string.agora_app_id); + /* Sets the channel profile of the Agora RtcEngine. + CHANNEL_PROFILE_COMMUNICATION(0): (Default) The Communication profile. + Use this profile in one-on-one calls or group calls, where all users can talk freely. + CHANNEL_PROFILE_LIVE_BROADCASTING(1): The Live-Broadcast profile. Users in a live-broadcast + channel have a role as either broadcaster or audience. A broadcaster can both send and receive streams; + an audience can only receive streams.*/ + config.mChannelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; + /* + * IRtcEngineEventHandler is an abstract class providing default implementation. + * The SDK uses this class to report to the app on SDK runtime events. + */ + config.mEventHandler = engineEventHandler; + config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.DEFAULT); + config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); + engine = RtcEngine.create(config); + /* + * This parameter is for reporting the usages of APIExample to agora background. + * Generally, it is not necessary for you to set this parameter. + */ + engine.setParameters("{" + + "\"rtc.report_app_scenario\":" + + "{" + + "\"appScenario\":" + 100 + "," + + "\"serviceType\":" + 11 + "," + + "\"appVersion\":\"" + RtcEngine.getSdkVersion() + "\"" + + "}" + + "}"); + /* setting the local access point if the private cloud ip was set, otherwise the config will be invalid.*/ + LocalAccessPointConfiguration localAccessPointConfiguration = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getPrivateCloudConfig(); + if (localAccessPointConfiguration != null) { + // This api can only be used in the private media server scenario, otherwise some problems may occur. + engine.setLocalAccessPoint(localAccessPointConfiguration); + } + } catch (Exception e) { + e.printStackTrace(); + getActivity().onBackPressed(); + } + + return engine; + } + + @Override + public void onDestroyView() { + super.onDestroyView(); + binding = null; + } +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/EntryFragment.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/EntryFragment.java index bed4e0185..5f0ab8e29 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/EntryFragment.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CDNStreaming/EntryFragment.java @@ -14,12 +14,10 @@ import androidx.annotation.Nullable; import androidx.navigation.Navigation; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.utils.PermissonUtils; /** * The type Entry fragment. @@ -85,19 +83,15 @@ public void onDestroy() { @Override public void onClick(View v) { // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - join(v); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - join(v); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + join(v); + } + } + }); } private void join(View v) { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java index 7f71b021f..61480657b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ChannelEncryption.java @@ -25,9 +25,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.lang.reflect.Method; import io.agora.api.example.MainApplication; @@ -35,6 +32,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; @@ -195,19 +193,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; if (encry_mode.getSelectedItem().toString().equals(getString(R.string.custom))) { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ContentInspect.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ContentInspect.java index 8a36ffac1..ea09bc805 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ContentInspect.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ContentInspect.java @@ -19,14 +19,12 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -157,19 +155,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java index 2ff5b5161..753964de3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/CustomRemoteVideoRender.java @@ -17,13 +17,11 @@ import android.widget.Button; import android.widget.EditText; import android.widget.FrameLayout; +import android.widget.Switch; import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.concurrent.Callable; import javax.microedition.khronos.egl.EGLConfig; @@ -33,9 +31,11 @@ import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.common.gles.core.EglCore; import io.agora.api.example.examples.advanced.videoRender.GLTextureView; import io.agora.api.example.examples.advanced.videoRender.YuvUploader; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.base.TextureBufferHelper; import io.agora.base.VideoFrame; @@ -67,6 +67,7 @@ public class CustomRemoteVideoRender extends BaseFragment implements View.OnClic private FrameLayout fl_local, fl_remote; private Button join; + private Switch switchExEglContext; private EditText et_channel; private RtcEngine engine; private int myUid, remoteUid; @@ -78,6 +79,7 @@ public class CustomRemoteVideoRender extends BaseFragment implements View.OnClic private final GlRectDrawer drawer = new GlRectDrawer(); private final YuvUploader yuvUploader = new YuvUploader(); private final Matrix renderMatrix = new Matrix(); + private EglCore eglCore; @Nullable @Override @@ -94,6 +96,23 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat view.findViewById(R.id.btn_join).setOnClickListener(this); fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); + switchExEglContext = view.findViewById(R.id.switch_ex_context); + view.findViewById(R.id.btn_confirm).setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + view.findViewById(R.id.fl_container_egl_select).setVisibility(View.GONE); + //init egl context option + Log.d(TAG, "isChecked =" + switchExEglContext.isChecked()); + if (switchExEglContext.isChecked()) { + if (eglCore == null) { + eglCore = new EglCore(); + } + //once set the external egl context, you should use it until engine destroyed + int ret = engine.setExternalRemoteEglContext(eglCore.getEGLContext()); + Log.d(TAG, "setExternalRemoteEglContext: ret = " + ret); + } + } + }); } @Override @@ -181,19 +200,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/HostAcrossChannel.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/HostAcrossChannel.java index 06d6fa0d0..a78260ef7 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/HostAcrossChannel.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/HostAcrossChannel.java @@ -20,14 +20,12 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; @@ -164,19 +162,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/InCallReport.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/InCallReport.java index 90c53c9ba..46b737c80 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/InCallReport.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/InCallReport.java @@ -18,14 +18,12 @@ import androidx.annotation.Nullable; import androidx.appcompat.widget.AppCompatTextView; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.model.StatisticsInfo; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; @@ -180,19 +178,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/JoinMultipleChannel.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/JoinMultipleChannel.java index aa49c39d9..0af1dbf5f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/JoinMultipleChannel.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/JoinMultipleChannel.java @@ -20,9 +20,6 @@ import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.File; import java.util.Locale; import java.util.Random; @@ -34,6 +31,7 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.databinding.DialogLeaveOptionsBinding; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -168,19 +166,15 @@ public void onClick(View v) { channel1 = et_channel.getText().toString(); channel2 = channel1 + "-2"; // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channel1); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channel1); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channel1); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/KtvCopyrightMusic.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/KtvCopyrightMusic.java index a5316ae4c..4c9f6bf8b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/KtvCopyrightMusic.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/KtvCopyrightMusic.java @@ -23,7 +23,7 @@ public class KtvCopyrightMusic extends BaseBrowserFragment { @Override protected String getBrowserUrl() { if (getResources().getConfiguration().locale.getLanguage() == Locale.CHINESE.getLanguage()) { - return "https://doc.shengwang.cn/doc/online-ktv/android/landing-page"; + return "https://doc.shengwang.cn/doc/online-ktv/android/ktv-scenario/landing-page"; } return "https://docs.agora.io/en/interactive-live-streaming/overview/product-overview?platform=android"; } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java index 7ecfb5b7d..3eac74bb1 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/LiveStreaming.java @@ -26,8 +26,6 @@ import androidx.appcompat.app.AlertDialog; import com.google.android.material.bottomsheet.BottomSheetDialog; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; import java.io.File; import java.util.ArrayList; @@ -45,6 +43,7 @@ import io.agora.api.example.databinding.FragmentLiveStreamingVideoTrackingBinding; import io.agora.api.example.utils.CommonUtil; import io.agora.api.example.utils.FileUtils; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.ClientRoleOptions; @@ -56,6 +55,7 @@ import io.agora.rtc2.video.AgoraFocalLengthInfo; import io.agora.rtc2.video.CameraCapturerConfiguration; import io.agora.rtc2.video.ImageTrackOptions; +import io.agora.rtc2.video.SnapshotConfig; import io.agora.rtc2.video.VideoCanvas; import io.agora.rtc2.video.VideoEncoderConfiguration; import io.agora.rtc2.video.WatermarkOptions; @@ -74,15 +74,18 @@ actionId = R.id.action_mainFragment_to_live_streaming, tipsId = R.string.livestreaming ) -public class LiveStreaming extends BaseFragment implements View.OnClickListener { +public class LiveStreaming extends BaseFragment implements View.OnClickListener, SeekBar.OnSeekBarChangeListener, AdapterView.OnItemSelectedListener { private static final String TAG = LiveStreaming.class.getSimpleName(); - + private static final int FPS_DEFAULT = 15; + public Constants.VideoModulePosition position = Constants.VideoModulePosition.VIDEO_MODULE_POSITION_PRE_RENDERER; private FragmentLiveStreamingBinding mRootBinding; private FragmentLiveStreamingSettingBinding mSettingBinding; private BottomSheetDialog mSettingDialog; private VideoReportLayout foreGroundVideo, backGroundVideo; private boolean isLocalVideoForeground; + private int localFps = 0; + private int remoteFps = 0; private RtcEngine engine; private int myUid = 0; @@ -112,6 +115,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat mRootBinding.btnJoin.setOnClickListener(this); mRootBinding.btnPreload.setOnClickListener(this); mRootBinding.btnPublish.setOnClickListener(this); + mRootBinding.btnLocalScreenshot.setOnClickListener(this); mRootBinding.btnRemoteScreenshot.setOnClickListener(this); mRootBinding.btnSwitchCamera.setOnClickListener(this); foreGroundVideo.setOnClickListener(this); @@ -234,6 +238,22 @@ public void onNothingSelected(AdapterView parent) { }); mSettingDialog = new BottomSheetDialog(requireContext()); mSettingDialog.setContentView(mSettingBinding.getRoot()); + + //init fps + mRootBinding.sbLocal.setOnSeekBarChangeListener(this); + mRootBinding.sbRemote.setOnSeekBarChangeListener(this); + + mRootBinding.spinnerScenario.setOnItemSelectedListener(this); + mRootBinding.spinnerSnapshot.setOnItemSelectedListener(this); + + mRootBinding.btnLocalScreenshot.setEnabled(false); + } + + private void resetFps() { + localFps = FPS_DEFAULT; + remoteFps = FPS_DEFAULT; + mRootBinding.sbLocal.setProgress(localFps); + mRootBinding.sbRemote.setProgress(remoteFps); } private void updateVideoView() { @@ -361,21 +381,18 @@ public void onClick(View v) { // call when join button hit String channelId = mRootBinding.etChannel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { + mRootBinding.btnLocalScreenshot.setEnabled(false); joined = false; + mRootBinding.llContainerFp.setVisibility(View.GONE); isHost = false; isPreloaded = false; mRootBinding.btnJoin.setText(getString(R.string.join)); @@ -468,6 +485,8 @@ public void onClick(View v) { } } else if (v.getId() == R.id.btn_setting) { mSettingDialog.show(); + } else if (v.getId() == R.id.btn_local_screenshot) { + takeSnapshot(myUid); } else if (v.getId() == R.id.btn_remote_screenshot) { takeSnapshot(remoteUid); } else if (v.getId() == R.id.btn_preload) { @@ -498,7 +517,7 @@ private void joinChannel(String channelId) { if (context == null) { return; } - + resetFps(); isLocalVideoForeground = false; // Create render view by RtcEngine SurfaceView surfaceView = new SurfaceView(context); @@ -540,7 +559,7 @@ private void joinChannel(String channelId) { ChannelMediaOptions option = new ChannelMediaOptions(); option.channelProfile = Constants.CHANNEL_PROFILE_LIVE_BROADCASTING; - option.clientRoleType = Constants.CLIENT_ROLE_AUDIENCE; + option.clientRoleType = CLIENT_ROLE_AUDIENCE; option.autoSubscribeAudio = true; option.autoSubscribeVideo = true; int res; @@ -618,15 +637,32 @@ private void enableLowLegacy(boolean enable) { } private void takeSnapshot(int uid) { - if (uid != 0) { - String filePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "livestreaming_snapshot.png"; - int ret = engine.takeSnapshot(uid, filePath); + if (!joined) { + showLongToast(getString(R.string.join_channel_first)); + return; + } + String filePath = new File(requireContext().getExternalCacheDir(), uid + "_livestreaming_snapshot.png").getAbsolutePath(); + SnapshotConfig config = new SnapshotConfig(); + config.filePath = filePath; + if (uid == myUid) { + config.position = position; + int ret = engine.takeSnapshot(0, config); if (ret != Constants.ERR_OK) { - showLongToast("takeSnapshot error code=" + ret + ",msg=" + RtcEngine.getErrorDescription(ret)); + showLongToast("takeSnapshot local error code=" + ret + ",msg=" + RtcEngine.getErrorDescription(ret)); } } else { - showLongToast(getString(R.string.remote_screenshot_tip)); + if (uid != 0) { +// config.position = Constants.VideoModulePosition.VIDEO_MODULE_POSITION_PRE_RENDERER; +// int ret = engine.takeSnapshot(uid, config); + int ret = engine.takeSnapshot(uid, filePath); + if (ret != Constants.ERR_OK) { + showLongToast("takeSnapshot remote error code=" + ret + ",msg=" + RtcEngine.getErrorDescription(ret)); + } + } else { + showLongToast(getString(R.string.remote_screenshot_tip)); + } } + } @@ -671,6 +707,7 @@ public void onJoinChannelSuccess(String channel, int uid, int elapsed) { handler.post(new Runnable() { @Override public void run() { + mRootBinding.llContainerFp.setVisibility(View.VISIBLE); mRootBinding.btnJoin.setEnabled(true); mRootBinding.btnJoin.setText(getString(R.string.leave)); mRootBinding.btnPublish.setEnabled(true); @@ -813,6 +850,11 @@ public void onClientRoleChanged(int oldRole, int newRole, ClientRoleOptions newR Log.i(TAG, String.format("client role changed from state %d to %d", oldRole, newRole)); runOnUIThread(() -> { mRootBinding.btnPublish.setEnabled(true); + if (newRole == Constants.CLIENT_ROLE_BROADCASTER) { + mRootBinding.btnLocalScreenshot.setEnabled(true); + } else { + mRootBinding.btnLocalScreenshot.setEnabled(false); + } }); } @@ -886,4 +928,56 @@ public void onVideoRenderingTracingResult(int uid, Constants.MEDIA_TRACE_EVENT c } }; + + @Override + public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { + if (seekBar == mRootBinding.sbLocal) { + localFps = progress; + mRootBinding.tvLocalFpsNum.setText(localFps + ""); + if (engine != null) { + engine.setLocalRenderTargetFps(Constants.VideoSourceType.VIDEO_SOURCE_CAMERA_PRIMARY, localFps); + } + } else if (seekBar == mRootBinding.sbRemote) { + remoteFps = progress; + mRootBinding.tvRemoteFpsNum.setText(remoteFps + ""); + if (engine != null) { + engine.setRemoteRenderTargetFps(remoteFps); + } + } + } + + @Override + public void onStartTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onStopTrackingTouch(SeekBar seekBar) { + + } + + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + if (parent == mRootBinding.spinnerScenario) { + if (engine != null) { + // Set the video scenario + String name = parent.getSelectedItem().toString(); + Constants.VideoScenario videoScenario = Constants.VideoScenario.valueOf(name); + if (videoScenario != null) { + int ret = engine.setVideoScenario(videoScenario); + Log.d(TAG, "onItemSelected: setVideoScenario ret=" + ret); + } + } + } else if (parent == mRootBinding.spinnerSnapshot) { + if (engine != null) { + String name = parent.getSelectedItem().toString(); + LiveStreaming.this.position = Constants.VideoModulePosition.valueOf(name); + } + } + } + + @Override + public void onNothingSelected(AdapterView parent) { + + } } \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java index a3458e6e4..9ecd38b78 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaMetadata.java @@ -23,9 +23,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; @@ -34,6 +31,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.Constants; import io.agora.rtc2.IMetadataObserver; @@ -179,19 +177,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java index c1131224c..dc7ebb47e 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaPlayer.java @@ -32,9 +32,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; @@ -45,6 +42,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.mediaplayer.IMediaPlayer; import io.agora.mediaplayer.IMediaPlayerObserver; @@ -219,19 +217,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java index 6f77922e1..98fdb4136 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MediaRecorder.java @@ -4,10 +4,8 @@ import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN; import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; -import android.Manifest; import android.annotation.SuppressLint; import android.content.Context; -import android.os.Build; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; @@ -23,13 +21,8 @@ import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.File; -import java.util.ArrayList; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; @@ -41,6 +34,7 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.examples.basic.JoinChannelVideo; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.AgoraMediaRecorder; import io.agora.rtc2.ChannelMediaOptions; @@ -152,6 +146,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { // This api can only be used in the private media server scenario, otherwise some problems may occur. engine.setLocalAccessPoint(localAccessPointConfiguration); } + initLocalPreview(); } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); @@ -179,29 +174,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - permissionList.add(Permission.CAMERA); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } - - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - - if (AndPermission.hasPermissions(this, permissionArray)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - permissionArray - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; stopAllMediaRecorder(); @@ -229,17 +210,18 @@ public void onClick(View v) { resetLayoutRecording(value); } remoteViews.clear(); - fl_local.removeAllViews(); - resetLayoutRecording(fl_local); + //exclude local +// fl_local.removeAllViews(); +// resetLayoutRecording(fl_local); } } else if (v.getId() == switch_camera.getId()) { - if (engine != null && joined) { + if (engine != null) { engine.switchCamera(); } } } - private void joinChannel(String channelId) { + private void initLocalPreview() { // Check if the context is valid Context context = getContext(); if (context == null) { @@ -256,6 +238,9 @@ private void joinChannel(String channelId) { setupLayoutRecording(fl_local, () -> startLocalMediaRecorder(channelId), this::stopLocalMediaRecorder); // Setup local video to render your local camera preview engine.setupLocalVideo(new VideoCanvas(surfaceView, RENDER_MODE_HIDDEN, 0)); + engine.startPreview(); + engine.startRecordingDeviceTest(0); + // Set audio route to microPhone engine.setDefaultAudioRoutetoSpeakerphone(true); @@ -270,6 +255,9 @@ private void joinChannel(String channelId) { STANDARD_BITRATE, VideoEncoderConfiguration.ORIENTATION_MODE.valueOf(((MainApplication) getActivity().getApplication()).getGlobalSettings().getVideoEncodingOrientation()) )); + } + + private void joinChannel(String channelId) { ChannelMediaOptions option = new ChannelMediaOptions(); option.autoSubscribeAudio = true; @@ -301,7 +289,8 @@ private void joinChannel(String channelId) { } private void stopAllMediaRecorder() { - stopLocalMediaRecorder(); + //exclude local +// stopLocalMediaRecorder(); Set remoteUidList = remoteMediaRecorders.keySet(); for (Integer uid : remoteUidList) { stopRemoteMediaRecorder(uid); @@ -326,7 +315,7 @@ private void startRemoteMediaRecorder(String channelId, int uid) { AgoraMediaRecorder mediaRecorder = remoteMediaRecorders.get(uid); String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_" + uid + ".mp4"; if (mediaRecorder == null) { - mediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, uid)); + mediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, uid, 0)); // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4. mediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() { @Override @@ -380,7 +369,7 @@ private void startLocalMediaRecorder(String channelId) { String storagePath = requireContext().getExternalCacheDir().getAbsolutePath() + File.separator + "media_recorder_" + channelId + "_local.mp4"; if (localMediaRecorder == null) { - localMediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, myUid)); + localMediaRecorder = engine.createMediaRecorder(new RecorderStreamInfo(channelId, myUid, joined ? 0 : 1)); // Before starting recoding, you must call setMediaRecorderObserver firstly. Otherwise, recoding will fail with code -4. localMediaRecorder.setMediaRecorderObserver(new IMediaRecorderCallback() { @Override diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java index 627300696..72d0c1d9b 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/MultiVideoSourceTracks.java @@ -22,9 +22,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; @@ -41,6 +38,7 @@ import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.api.example.utils.VideoFileReader; import io.agora.base.JavaI420Buffer; @@ -199,19 +197,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; join.setText(getString(R.string.join)); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java index 13821dd41..4c21e0082 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PictureInPicture.java @@ -4,7 +4,6 @@ import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN; import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; -import android.Manifest; import android.annotation.SuppressLint; import android.app.AppOpsManager; import android.app.PictureInPictureParams; @@ -29,12 +28,6 @@ import androidx.appcompat.app.AppCompatActivity; import androidx.fragment.app.FragmentActivity; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - -import java.util.ArrayList; -import java.util.List; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; @@ -43,6 +36,7 @@ import io.agora.api.example.common.floatwindow.FloatWindowHelper; import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -178,27 +172,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - permissionList.add(Permission.CAMERA); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } - - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - - if (AndPermission.hasPermissions(this, permissionArray)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission(permissionArray).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the @@ -559,8 +541,8 @@ private boolean isFloatWindowShowing() { } - private boolean checkPipSupported(){ - if(Build.VERSION.SDK_INT < 26){ + private boolean checkPipSupported() { + if (Build.VERSION.SDK_INT < 26) { return false; } return requireActivity().getPackageManager().hasSystemFeature(PackageManager.FEATURE_PICTURE_IN_PICTURE); @@ -577,7 +559,7 @@ private boolean checkPipEnabled() { == AppOpsManager.MODE_ALLOWED; } - private void enterPip(){ + private void enterPip() { if (android.os.Build.VERSION.SDK_INT < 26) { return; } @@ -585,7 +567,7 @@ private void enterPip(){ .setAspectRatio(new Rational(video_layout_container.getWidth(), video_layout_container.getHeight())) .build()); - ((AppCompatActivity)requireActivity()).getSupportActionBar().hide(); + ((AppCompatActivity) requireActivity()).getSupportActionBar().hide(); ll_join.setVisibility(View.GONE); btn_pip.setVisibility(View.GONE); switch_float_window.setVisibility(View.GONE); @@ -595,7 +577,7 @@ private void enterPip(){ public void onPictureInPictureModeChanged(boolean isInPictureInPictureMode) { super.onPictureInPictureModeChanged(isInPictureInPictureMode); if (!isInPictureInPictureMode) { - ((AppCompatActivity)requireActivity()).getSupportActionBar().show(); + ((AppCompatActivity) requireActivity()).getSupportActionBar().show(); ll_join.setVisibility(View.VISIBLE); btn_pip.setVisibility(View.VISIBLE); switch_float_window.setVisibility(View.VISIBLE); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java index 947e2024f..964dd692a 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PlayAudioFiles.java @@ -19,9 +19,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.LinkedHashMap; import java.util.Map; @@ -32,6 +29,7 @@ import io.agora.api.example.common.Constant; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -250,18 +248,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java index 3013553a8..ec4d0dd86 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessAudioRawData.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; @@ -30,6 +27,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -226,18 +224,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java index e516a89cd..24f680374 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ProcessRawData.java @@ -26,9 +26,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.File; import java.io.OutputStream; import java.nio.ByteBuffer; @@ -38,6 +35,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.api.example.utils.YUVUtils; import io.agora.base.NV21Buffer; @@ -177,19 +175,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; engine.registerVideoFrameObserver(null); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideo.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideo.java index c9d82a71a..4154dcb48 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideo.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideo.java @@ -26,9 +26,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.io.IOException; import java.util.concurrent.Callable; @@ -39,6 +36,7 @@ import io.agora.api.example.common.gles.core.EglCore; import io.agora.api.example.common.gles.core.GlUtil; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.base.TextureBufferHelper; import io.agora.base.VideoFrame; @@ -228,19 +226,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { fl_local.setVisibility(View.GONE); getActivity().onBackPressed(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java index b4d13a9f4..1bcb3a580 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/PushExternalVideoYUV.java @@ -18,13 +18,11 @@ import android.widget.EditText; import android.widget.FrameLayout; import android.widget.Spinner; +import android.widget.Switch; import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.nio.ByteBuffer; import java.util.concurrent.Callable; @@ -33,8 +31,10 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.gles.GLThread; +import io.agora.api.example.common.gles.core.EglCore; import io.agora.api.example.examples.advanced.videoRender.YuvFboProgram; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.api.example.utils.VideoFileReader; import io.agora.base.JavaI420Buffer; @@ -70,6 +70,7 @@ public class PushExternalVideoYUV extends BaseFragment implements View.OnClickLi private FrameLayout fl_local, fl_remote; private Button join; + private Switch switchExEglContext; private EditText et_channel; private RtcEngineEx engine; private Spinner sp_push_buffer_type; @@ -80,7 +81,7 @@ public class PushExternalVideoYUV extends BaseFragment implements View.OnClickLi private YuvFboProgram yuvFboProgram; private TextureBufferHelper textureBufferHelper; - + private EglCore eglCore; @Nullable @Override @@ -98,6 +99,24 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); sp_push_buffer_type = view.findViewById(R.id.sp_buffer_type); + + switchExEglContext = view.findViewById(R.id.switch_ex_context); + view.findViewById(R.id.btn_confirm).setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View v) { + view.findViewById(R.id.fl_container_egl_select).setVisibility(View.GONE); + //init egl context option + Log.d(TAG, "isChecked =" + switchExEglContext.isChecked()); + if (switchExEglContext.isChecked()) { + if (eglCore == null) { + eglCore = new EglCore(); + } + //once set the external egl context, you should use it until engine destroyed + int ret = engine.setExternalRemoteEglContext(eglCore.getEGLContext()); + Log.d(TAG, "setExternalRemoteEglContext: ret = " + ret); + } + } + }); } @Override @@ -164,7 +183,6 @@ public void onDestroy() { if (videoFileReader != null) { videoFileReader.stop(); } - /*leaveChannel and Destroy the RtcEngine instance*/ if (engine != null) { /*After joining a channel, the user must call the leaveChannel method to end the @@ -215,19 +233,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + } + } + }); } else { joined = false; join.setText(getString(R.string.join)); @@ -287,7 +301,6 @@ private void joinChannel(String channelId) { fl_local.addView(textureView, new FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)); engine.startPreview(Constants.VideoSourceType.VIDEO_SOURCE_CUSTOM); - /*Please configure accessToken in the string_config file. * A temporary token generated in Console. A temporary token is valid for 24 hours. For details, see * https://docs.agora.io/en/Agora%20Platform/token?platform=All%20Platforms#get-a-temporary-token diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RTMPStreaming.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RTMPStreaming.java index 30cf799c7..46a20a4b7 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RTMPStreaming.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RTMPStreaming.java @@ -22,9 +22,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.ArrayList; import io.agora.api.example.MainApplication; @@ -32,6 +29,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -181,19 +179,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { if (publishing) { stopPublish(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java index a2bc2a308..3a31b7cb3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/RhythmPlayer.java @@ -18,14 +18,12 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -166,18 +164,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java index 5bb3b867d..32bbd6352 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/ScreenSharing.java @@ -6,7 +6,12 @@ import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; import static io.agora.rtc2.video.VideoEncoderConfiguration.VD_640x360; +import android.app.Activity; import android.content.Context; +import android.content.Intent; +import android.media.projection.MediaProjection; +import android.media.projection.MediaProjectionManager; +import android.os.Build; import android.os.Bundle; import android.util.DisplayMetrics; import android.util.Log; @@ -23,17 +28,18 @@ import android.widget.Spinner; import android.widget.Switch; +import androidx.activity.result.ActivityResultLauncher; +import androidx.activity.result.contract.ActivityResultContracts; import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.service.MediaProjectionService; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -64,16 +70,35 @@ public class ScreenSharing extends BaseFragment implements View.OnClickListener, private static final int DEFAULT_SHARE_FRAME_RATE = 15; private FrameLayout fl_local, fl_remote; private Button join; - private Switch screenAudio, screenPreview; + private Switch screenAudio, screenPreview, externalMediaPro; private SeekBar screenAudioVolume; private EditText et_channel; private int myUid, remoteUid = -1; private boolean joined = false; private RtcEngineEx engine; private final ScreenCaptureParameters screenCaptureParameters = new ScreenCaptureParameters(); + private String channelId; private Spinner screenScenarioType; + private MediaProjectionManager mediaProjectionManager; + private MediaProjection[] mediaProjection = new MediaProjection[1]; + private final ActivityResultLauncher mediaProjectionLauncher = registerForActivityResult( + new ActivityResultContracts.StartActivityForResult(), + result -> { + Log.d(TAG, "result-------------------result.getResultCode(): " + result.getResultCode()); + if (result.getResultCode() == Activity.RESULT_OK) { + try { + mediaProjection[0] = mediaProjectionManager + .getMediaProjection(result.getResultCode(), result.getData()); + joinChannel(); + } catch (Exception e) { + Log.e(TAG, "error msg: " + e.getMessage()); + } + } + } + ); + @Nullable @Override public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { @@ -84,6 +109,7 @@ public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup c @Override public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); + mediaProjectionManager = (MediaProjectionManager) getContext().getSystemService(Context.MEDIA_PROJECTION_SERVICE); join = view.findViewById(R.id.btn_join); et_channel = view.findViewById(R.id.et_channel); @@ -92,6 +118,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat join.setOnClickListener(this); screenPreview = view.findViewById(R.id.screen_preview); + externalMediaPro = view.findViewById(R.id.media_projection_external); screenAudio = view.findViewById(R.id.screen_audio); screenAudioVolume = view.findViewById(R.id.screen_audio_volume); screenScenarioType = view.findViewById(R.id.spinner_screen_scenario_type); @@ -99,6 +126,7 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat screenScenarioType.setOnItemSelectedListener(this); screenPreview.setOnCheckedChangeListener(this); screenAudio.setOnCheckedChangeListener(this); + externalMediaPro.setOnCheckedChangeListener(this); screenAudioVolume.setOnSeekBarChangeListener(this); } @@ -161,6 +189,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { @Override public void onDestroy() { + stopService(); /*leaveChannel and Destroy the RtcEngine instance*/ if (engine != null) { engine.leaveChannel(); @@ -190,6 +219,7 @@ public void onCheckedChanged(CompoundButton compoundButton, boolean checked) { } screenCaptureParameters.captureAudio = checked; engine.updateScreenCaptureParameters(screenCaptureParameters); + } else if (compoundButton == externalMediaPro) { } } @@ -199,21 +229,20 @@ public void onClick(View v) { if (!joined) { CommonUtil.hideInputBoard(getActivity(), et_channel); // call when join button hit - String channelId = et_channel.getText().toString(); + channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + if (externalMediaPro.isChecked()) { + requestScreenCapture(); + } else { + joinChannel(); + } + } + } + }); } else { leaveChannel(); } @@ -249,8 +278,30 @@ private void stopScreenSharePreview() { engine.stopPreview(Constants.VideoSourceType.VIDEO_SOURCE_SCREEN_PRIMARY); } + private void startService() { +// if (joined) { + Intent intent = new Intent(requireContext(), MediaProjectionService.class); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) { + requireContext().startForegroundService(intent); + } else { + requireContext().startService(intent); + } +// } + } + + private void stopService() { + Intent serviceIntent = new Intent(getContext(), MediaProjectionService.class); + getContext().stopService(serviceIntent); + } - private void joinChannel(String channelId) { + private void requestScreenCapture() { + startService(); + Intent intent = mediaProjectionManager.createScreenCaptureIntent(); + mediaProjectionLauncher.launch(intent); + } + + + private void joinChannel() { engine.setParameters("{\"che.video.mobile_1080p\":true}"); engine.setClientRole(Constants.CLIENT_ROLE_BROADCASTER); @@ -274,6 +325,11 @@ private void joinChannel(String channelId) { screenCaptureParameters.videoCaptureParameters.framerate = DEFAULT_SHARE_FRAME_RATE; screenCaptureParameters.captureAudio = screenAudio.isChecked(); screenCaptureParameters.audioCaptureParameters.captureSignalVolume = screenAudioVolume.getProgress(); + if (externalMediaPro.isChecked()) { + engine.setExternalMediaProjection(mediaProjection[0]); + } else { + engine.setExternalMediaProjection(null); + } engine.startScreenCapture(screenCaptureParameters); if (screenPreview.isChecked()) { @@ -340,6 +396,7 @@ public void onJoinChannelSuccess(String channel, int uid, int elapsed) { myUid = uid; joined = true; handler.post(() -> { + externalMediaPro.setEnabled(false); join.setEnabled(true); join.setText(getString(R.string.leave)); }); @@ -461,6 +518,8 @@ public void onUserOffline(int uid, int reason) { }; private void leaveChannel() { + externalMediaPro.setEnabled(true); + stopService(); joined = false; join.setText(getString(R.string.join)); fl_local.removeAllViews(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SendDataStream.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SendDataStream.java index c5db3c444..9f27425d4 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SendDataStream.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SendDataStream.java @@ -18,10 +18,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; - -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.nio.charset.Charset; import java.util.Date; @@ -30,6 +26,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -163,19 +160,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SimpleExtension.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SimpleExtension.java index bd3438dee..e026253da 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SimpleExtension.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SimpleExtension.java @@ -21,9 +21,6 @@ import androidx.annotation.Nullable; import androidx.appcompat.app.AlertDialog; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import org.json.JSONException; import org.json.JSONObject; @@ -32,6 +29,7 @@ import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -281,18 +279,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java index c43715dcc..70cae78c4 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/SwitchCameraScreenShare.java @@ -24,15 +24,13 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.Random; import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -232,19 +230,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; join.setText(getString(R.string.join)); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/TransparentRendering.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/TransparentRendering.java index 1583744b8..556dd440f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/TransparentRendering.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/TransparentRendering.java @@ -4,10 +4,8 @@ import static io.agora.rtc2.Constants.RENDER_MODE_FIT; import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; -import android.Manifest; import android.annotation.SuppressLint; import android.content.Context; -import android.os.Build; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; @@ -22,11 +20,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - -import java.util.ArrayList; -import java.util.List; import java.util.Random; import io.agora.api.example.MainApplication; @@ -35,6 +28,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.mediaplayer.IMediaPlayer; import io.agora.mediaplayer.data.MediaPlayerSource; @@ -165,29 +159,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - permissionList.add(Permission.CAMERA); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } - - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - - if (AndPermission.hasPermissions(this, permissionArray)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - permissionArray - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; stopPlaying(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/UrlLiveStream.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/UrlLiveStream.java new file mode 100644 index 000000000..a9a006e7c --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/UrlLiveStream.java @@ -0,0 +1,294 @@ +package io.agora.api.example.examples.advanced; + +import static io.agora.api.example.common.model.Examples.ADVANCED; + +import android.annotation.SuppressLint; +import android.os.Bundle; +import android.os.Handler; +import android.util.Log; +import android.view.LayoutInflater; +import android.view.View; +import android.view.ViewGroup; + +import io.agora.api.example.R; +import io.agora.api.example.annotation.Example; +import io.agora.api.example.common.BaseVbFragment; +import io.agora.api.example.databinding.FragmentUrlLiveStreamBinding; +import io.agora.api.example.utils.CommonUtil; +import io.agora.rte.Canvas; +import io.agora.rte.CanvasConfig; +import io.agora.rte.CanvasInitialConfig; +import io.agora.rte.Config; +import io.agora.rte.Constants; +import io.agora.rte.Error; +import io.agora.rte.Player; +import io.agora.rte.PlayerInfo; +import io.agora.rte.PlayerObserver; +import io.agora.rte.Rte; +import io.agora.rte.ViewConfig; +import io.agora.rte.exception.RteException; + +/** + * This demo demonstrates how to make a live stream with url + */ +@Example( + index = 26, + group = ADVANCED, + name = R.string.ultra_live_streaming_with_url, + actionId = R.id.action_mainFragment_to_url_live_stream, + tipsId = R.string.tip_ultra_live_streaming_with_url +) +public class UrlLiveStream extends BaseVbFragment implements View.OnClickListener { + private static final String TAG = UrlLiveStream.class.getSimpleName(); + private Handler mHandler = new Handler(); + private Rte mRte; + private Player mPlayer; + private Canvas mCanvas; + private PlayerObserver mPlayerObserver; + //player init flag + private boolean isInitSuccess; + //prepare resource flag + private boolean isPrepareSuccess; + private String playerStateDesc = ""; + private String initPlayerStateDesc = ""; + + @Override + protected FragmentUrlLiveStreamBinding getViewBinding(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { + return FragmentUrlLiveStreamBinding.inflate(inflater, container, false); + } + + @Override + protected void initView() { + binding.btnStart.setOnClickListener(this); + binding.btnStop.setOnClickListener(this); + binding.btnPause.setOnClickListener(this); + binding.btnPlay.setOnClickListener(this); + } + + @Override + protected void initData() { + binding.etRteUrl.setText("rte://" + getString(R.string.agora_app_id)); + try { + mRte = new Rte(null); + Config config = new Config(); + config.setAppId(getContext().getString(R.string.agora_app_id)); + mRte.setConfigs(config); + mRte.initMediaEngine((Error error) -> { + io.agora.rte.Constants.ErrorCode errCode = error.code(); + if (io.agora.rte.Constants.ErrorCode.OK != errCode) { + Log.e(TAG, "initData: initMediaEngine errCode=" + errCode); + } else { + mHandler.post(new Runnable() { + @Override + public void run() { + try { + mPlayer = new Player(mRte, null); + mPlayerObserver = new RtePlayerObserver(); + mPlayer.registerObserver(mPlayerObserver); + CanvasConfig canvasConfig = new CanvasConfig(); + canvasConfig.setVideoRenderMode(io.agora.rte.Constants.VideoRenderMode.FIT); + CanvasInitialConfig canvasInitialConfig = new CanvasInitialConfig(); + mCanvas = new Canvas(mRte, canvasInitialConfig); + mCanvas.setConfigs(canvasConfig); + binding.surfaceView.setZOrderMediaOverlay(true); + // Add to the local container + mCanvas.addView(binding.surfaceView, new ViewConfig()); + mPlayer.setCanvas(mCanvas); + isInitSuccess = true; + initPlayerStateDesc = "init player success"; + Log.d(TAG, initPlayerStateDesc); + updatePlayButtonState(); + //init success + } catch (RteException e) { + initPlayerStateDesc = "init player error = " + e.getMessage(); + Log.e(TAG, initPlayerStateDesc); + isInitSuccess = false; + updatePlayButtonState(); + } + } + }); + } + + }); + } catch (Exception e) { + initPlayerStateDesc = "init rte error = " + e.getMessage(); + Log.e(TAG, initPlayerStateDesc); + isInitSuccess = false; + updatePlayButtonState(); + } + } + + private void startWithUrl(String url) { + if (isInitSuccess) { + mPlayer.openWithUrl(url, 0, (Error error) -> { + Constants.ErrorCode code = error.code(); + if (code == Constants.ErrorCode.OK) { + // open resource success + initPlayerStateDesc = "open resurce success"; + Log.d(TAG, initPlayerStateDesc); + isPrepareSuccess = true; + updatePlayButtonState(); + } else { + initPlayerStateDesc = "open resurce failed error = " + code.name(); + Log.e(TAG, initPlayerStateDesc); + isPrepareSuccess = false; + updatePlayButtonState(); + stopPlay(); + } + }); + } + } + + public int stopPlay() { + try { + if (mPlayer != null) { + mPlayer.stop(); + } + } catch (RteException e) { + Log.e(TAG, "stopPlay: fail =" + e.getMessage()); + } + return 0; + } + + public void play() { + try { + if (mPlayer != null && !isPlaying()) { + mPlayer.play(); + } + } catch (RteException e) { + Log.e(TAG, "play fail =" + e.getMessage()); + } + } + + public void pause() { + try { + if (mPlayer != null && isPlaying()) { + mPlayer.pause(); + } + } catch (RteException e) { + Log.e(TAG, "pause fail =" + e.getMessage()); + } + } + + public boolean isPlaying() { + try { + PlayerInfo playerInfo = new PlayerInfo(); + mPlayer.getInfo(playerInfo); + return Constants.PlayerState.PLAYING == Constants.PlayerState.fromInt(playerInfo.state()); + } catch (RteException e) { + Log.e(TAG, "isPlaying fail =" + e.getMessage()); + } + return false; + } + + public void release() { + isInitSuccess = false; + isPrepareSuccess = false; + try { + if (mPlayer != null) { + mPlayer.unregisterObserver(mPlayerObserver); + } + } catch (RteException exception) { + Log.e(TAG, "release: faile = " + exception.getMessage()); + } + mPlayer = null; + mPlayerObserver = null; + mCanvas = null; + mRte = null; + } + + private void updatePlayButtonState() { + runOnUIThread(new Runnable() { + @Override + public void run() { + binding.tvInitPlayerState.setText("init state = " + initPlayerStateDesc); + binding.tvPlayerState.setText("player state = " + playerStateDesc); + if (isInitSuccess) { + if (isPrepareSuccess) { + binding.btnStart.setVisibility(View.GONE); + binding.llContainerPlayPause.setVisibility(View.VISIBLE); + binding.btnStop.setVisibility(View.VISIBLE); + } else { + binding.btnStart.setVisibility(View.VISIBLE); + binding.llContainerPlayPause.setVisibility(View.GONE); + binding.btnStop.setVisibility(View.GONE); + } + } else { + binding.llContainerPlayPause.setVisibility(View.GONE); + binding.btnStop.setVisibility(View.GONE); + binding.btnStart.setVisibility(View.VISIBLE); + } + } + }); + } + + @Override + public void onDestroy() { + super.onDestroy(); + release(); + } + + @SuppressLint("WrongConstant") + @Override + public void onClick(View v) { + if (v == binding.btnStart) { + String url = binding.etRteUrl.getText().toString(); + startWithUrl(url); + CommonUtil.hideInputBoard(getActivity(), binding.etRteUrl); + } else if (v == binding.btnStop) { + stopPlay(); + CommonUtil.hideInputBoard(getActivity(), binding.etRteUrl); + + } else if (v == binding.btnPlay) { + play(); + } else if (v == binding.btnPause) { + pause(); + } + } + + private class RtePlayerObserver extends PlayerObserver { + @Override + public void onStateChanged(int old_state, int new_state, Error error) { + Constants.PlayerState playerState = Constants.PlayerState.fromInt(new_state); + playerStateDesc = playerState.name(); + switch (playerState) { + case IDLE: + Log.d(TAG, "onStateChanged: IDLE"); + break; + case OPENING: + Log.d(TAG, "onStateChanged: OPENING"); + break; + case OPEN_COMPLETED: + Log.d(TAG, "onStateChanged: OPEN_COMPLETED"); + break; + case PLAYING: + Log.d(TAG, "onStateChanged: PLAYING"); + break; + case PAUSED: + Log.d(TAG, "onStateChanged: PAUSED"); + break; + case PLAYBACK_COMPLETED: + Log.d(TAG, "onStateChanged: PLAYBACK_COMPLETED"); + break; + case STOPPED: + Log.d(TAG, "onStateChanged: STOPPED"); + isPrepareSuccess = false; + break; + case FAILED: + Constants.ErrorCode code = error.code(); + playerStateDesc = playerStateDesc + " error code = " + code.name(); + Log.d(TAG, "onStateChanged: FAILED error code = " + Constants.ErrorCode.getValue(code)); + isPrepareSuccess = false; + stopPlay(); + break; + } + updatePlayButtonState(); + } + + @Override + public void onEvent(int event) { + Constants.PlayerEvent currentEvent = Constants.PlayerEvent.fromInt(event); + Log.d(TAG, "onEvent: currentEvent = " + currentEvent); + } + } +} diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java index b01a5c673..21144de08 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoProcessExtension.java @@ -11,6 +11,7 @@ import android.view.SurfaceView; import android.view.View; import android.view.ViewGroup; +import android.widget.AdapterView; import android.widget.Button; import android.widget.CompoundButton; import android.widget.EditText; @@ -18,21 +19,21 @@ import android.widget.LinearLayout; import android.widget.RadioGroup; import android.widget.SeekBar; +import android.widget.Spinner; import android.widget.Switch; import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; +import io.agora.api.example.bean.MpOptions; import io.agora.api.example.common.BaseFragment; import io.agora.api.example.utils.CommonUtil; import io.agora.api.example.utils.FileUtils; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -42,6 +43,9 @@ import io.agora.rtc2.proxy.LocalAccessPointConfiguration; import io.agora.rtc2.video.BeautyOptions; import io.agora.rtc2.video.ColorEnhanceOptions; +import io.agora.rtc2.video.FaceShapeAreaOptions; +import io.agora.rtc2.video.FaceShapeBeautyOptions; +import io.agora.rtc2.video.FilterEffectOptions; import io.agora.rtc2.video.LowLightEnhanceOptions; import io.agora.rtc2.video.SegmentationProperty; import io.agora.rtc2.video.VideoCanvas; @@ -59,20 +63,31 @@ actionId = R.id.action_mainFragment_video_enhancement, tipsId = R.string.videoEnhancement ) -public class VideoProcessExtension extends BaseFragment implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener { +public class VideoProcessExtension extends BaseFragment implements View.OnClickListener, CompoundButton.OnCheckedChangeListener, SeekBar.OnSeekBarChangeListener, AdapterView.OnItemSelectedListener { private static final String TAG = VideoProcessExtension.class.getSimpleName(); private FrameLayout fl_local, fl_remote; private LinearLayout controlPanel; private Button join; - private Switch beauty, virtualBackground, lightness2, colorful2, noiseReduce2; - private SeekBar seek_lightness, seek_redness, seek_sharpness, seek_smoothness, seek_strength, seek_skin; + private Switch shapeBeauty, makeUp, beauty, virtualBackground, lightness2, colorful2, noiseReduce2; + private SeekBar seek_lightness, seek_redness, seek_sharpness, seek_videoEnhance, seek_smoothness, seek_strength, seek_skin; + //缇庡 + private SeekBar sbBrowStrength, sbLashStrength, sbShadowStrength, sbPupilStrength, sbBlushStrength, sbLipStrength; + private Spinner spinnerBrowStyle, spinnerLashStyle, spinnerShadowStyle, spinnerPupilStyle, spinnerBlushStyle, spinnerLipStyle; + private Spinner spinnerBrowColor, spinnerLashColor, spinnerShadowColor, spinnerPupilColor, spinnerBlushColor, spinnerLipColor; + //缇庡瀷 + private SeekBar sbShapeBeautifyAreaIntensity, sbShapeBeautifyStyleIntensity; + private Spinner spinnerShapeBeautyArea, spinnerShapeBeautifyStyle; private EditText et_channel; private RadioGroup virtualBgType; private RtcEngine engine; private int myUid; private boolean joined = false; private BeautyOptions beautyOptions = new BeautyOptions(); + private FilterEffectOptions filterEffectOptions = new FilterEffectOptions(); + private MpOptions makeUpOptions = new MpOptions(); + private FaceShapeBeautyOptions faceShapeBeautyOptions = new FaceShapeBeautyOptions(); + private FaceShapeAreaOptions faceShapeAreaOptions = new FaceShapeAreaOptions(); private double skinProtect = 1.0; private double strength = 0.5; private VirtualBackgroundSource virtualBackgroundSource = new VirtualBackgroundSource(); @@ -93,6 +108,10 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat fl_local = view.findViewById(R.id.fl_local); fl_remote = view.findViewById(R.id.fl_remote); controlPanel = view.findViewById(R.id.controlPanel); + shapeBeauty = view.findViewById(R.id.switch_face_shape_beautify); + shapeBeauty.setOnCheckedChangeListener(this); + makeUp = view.findViewById(R.id.switch_face_makeup); + makeUp.setOnCheckedChangeListener(this); beauty = view.findViewById(R.id.switch_face_beautify); beauty.setOnCheckedChangeListener(this); lightness2 = view.findViewById(R.id.switch_lightness2); @@ -107,6 +126,8 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat seek_lightness.setOnSeekBarChangeListener(this); seek_redness = view.findViewById(R.id.redness); seek_redness.setOnSeekBarChangeListener(this); + seek_videoEnhance = view.findViewById(R.id.sb_video_enhance); + seek_videoEnhance.setOnSeekBarChangeListener(this); seek_sharpness = view.findViewById(R.id.sharpness); seek_sharpness.setOnSeekBarChangeListener(this); seek_smoothness = view.findViewById(R.id.smoothness); @@ -116,6 +137,60 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat seek_skin = view.findViewById(R.id.skinProtect); seek_skin.setOnSeekBarChangeListener(this); + //缇庡瀷 + sbShapeBeautifyAreaIntensity = view.findViewById(R.id.sb_shape_beautify_area_intensity); + sbShapeBeautifyAreaIntensity.setOnSeekBarChangeListener(this); + sbShapeBeautifyStyleIntensity = view.findViewById(R.id.sb_shape_beautify_style_intensity); + sbShapeBeautifyStyleIntensity.setOnSeekBarChangeListener(this); + + spinnerShapeBeautyArea = view.findViewById(R.id.spinner_shape_beauty_area); + spinnerShapeBeautyArea.setOnItemSelectedListener(this); + spinnerShapeBeautifyStyle = view.findViewById(R.id.spinner_shape_beautify_style); + spinnerShapeBeautifyStyle.setOnItemSelectedListener(this); + + //缇庡 + sbBrowStrength = view.findViewById(R.id.sb_brow_strength); + sbBrowStrength.setOnSeekBarChangeListener(this); + sbLashStrength = view.findViewById(R.id.sb_lash_strength); + sbLashStrength.setOnSeekBarChangeListener(this); + sbShadowStrength = view.findViewById(R.id.sb_shadow_strength); + sbShadowStrength.setOnSeekBarChangeListener(this); + sbPupilStrength = view.findViewById(R.id.sb_pupil_strength); + sbPupilStrength.setOnSeekBarChangeListener(this); + sbBlushStrength = view.findViewById(R.id.sb_blush_strength); + sbBlushStrength.setOnSeekBarChangeListener(this); + sbLipStrength = view.findViewById(R.id.sb_lip_strength); + sbLipStrength.setOnSeekBarChangeListener(this); + + spinnerBrowStyle = view.findViewById(R.id.spinner_brow_style); + spinnerLashStyle = view.findViewById(R.id.spinner_lash_style); + spinnerShadowStyle = view.findViewById(R.id.spinner_shadow_style); + spinnerPupilStyle = view.findViewById(R.id.spinner_pupil_style); + spinnerBlushStyle = view.findViewById(R.id.spinner_blush_style); + spinnerLipStyle = view.findViewById(R.id.spinner_lip_style); + + spinnerBrowColor = view.findViewById(R.id.spinner_brow_color); + spinnerLashColor = view.findViewById(R.id.spinner_lash_color); + spinnerShadowColor = view.findViewById(R.id.spinner_shadow_color); + spinnerPupilColor = view.findViewById(R.id.spinner_pupil_color); + spinnerBlushColor = view.findViewById(R.id.spinner_blush_color); + spinnerLipColor = view.findViewById(R.id.spinner_lip_color); + + spinnerBrowStyle.setOnItemSelectedListener(this); + spinnerLashStyle.setOnItemSelectedListener(this); + spinnerShadowStyle.setOnItemSelectedListener(this); + spinnerPupilStyle.setOnItemSelectedListener(this); + spinnerBlushStyle.setOnItemSelectedListener(this); + spinnerLipStyle.setOnItemSelectedListener(this); + + spinnerBrowColor.setOnItemSelectedListener(this); + spinnerLashColor.setOnItemSelectedListener(this); + spinnerShadowColor.setOnItemSelectedListener(this); + spinnerPupilColor.setOnItemSelectedListener(this); + spinnerBlushColor.setOnItemSelectedListener(this); + spinnerLipColor.setOnItemSelectedListener(this); + + virtualBgType = view.findViewById(R.id.virtual_bg_type); virtualBgType.setOnCheckedChangeListener((group, checkedId) -> { resetVirtualBackground(); @@ -205,12 +280,33 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { } engine.enableExtension("agora_video_filters_clear_vision", "clear_vision", true); + updateExtensionProperty(); + updateFaceShapeBeautyStyleOptions(); } catch (Exception e) { e.printStackTrace(); getActivity().onBackPressed(); } } + private void updateFaceShapeBeautyAreaOptions() { + if (engine != null) { + engine.setFaceShapeAreaOptions(faceShapeAreaOptions); + } + } + + + private void updateFaceShapeBeautyStyleOptions() { + if (engine != null) { + engine.setFaceShapeBeautyOptions(shapeBeauty.isChecked(), faceShapeBeautyOptions); + } + } + + private void updateExtensionProperty() { + if (engine != null) { + engine.setExtensionProperty("agora_video_filters_clear_vision", "clear_vision", "makeup_options", makeUpOptions.toJson(), Constants.MediaSourceType.PRIMARY_CAMERA_SOURCE); + } + } + @Override public void onDestroy() { super.onDestroy(); @@ -289,19 +385,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE, - Permission.Group.CAMERA - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the @@ -328,9 +420,81 @@ public void onClick(View v) { } } + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + + switch (parent.getId()) { + case R.id.spinner_shape_beauty_area: + faceShapeAreaOptions.shapeArea = position - 1; + //get origin beauty option params + FaceShapeAreaOptions originOptions = engine.getFaceShapeAreaOptions(faceShapeAreaOptions.shapeArea); + if (originOptions != null) { + faceShapeAreaOptions.shapeIntensity = originOptions.shapeIntensity; + sbShapeBeautifyAreaIntensity.setProgress(originOptions.shapeIntensity); + } + updateFaceShapeBeautyAreaOptions(); + return; + case R.id.spinner_shape_beautify_style: + faceShapeBeautyOptions.shapeStyle = position; + updateFaceShapeBeautyStyleOptions(); + return; + case R.id.spinner_brow_style: + makeUpOptions.browStyle = position; + break; + case R.id.spinner_lash_style: + makeUpOptions.lashStyle = position; + break; + case R.id.spinner_shadow_style: + makeUpOptions.shadowStyle = position; + break; + case R.id.spinner_pupil_style: + makeUpOptions.pupilStyle = position; + break; + case R.id.spinner_blush_style: + makeUpOptions.blushStyle = position; + break; + case R.id.spinner_lip_style: + makeUpOptions.lipStyle = position; + break; + case R.id.spinner_brow_color: + makeUpOptions.browColor = position; + break; + case R.id.spinner_lash_color: + makeUpOptions.lashColor = position; + break; + case R.id.spinner_blush_color: + makeUpOptions.blushColor = position; + break; + case R.id.spinner_lip_color: + makeUpOptions.lipColor = position; + break; + } + updateExtensionProperty(); + } + + @Override + public void onNothingSelected(AdapterView parent) { + + } + @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { - if (buttonView.getId() == beauty.getId()) { + if (buttonView.getId() == shapeBeauty.getId()) { + if (isChecked && !engine.isFeatureAvailableOnDevice(Constants.FEATURE_VIDEO_BEAUTY_EFFECT)) { + buttonView.setChecked(false); + Toast.makeText(requireContext(), R.string.feature_unavailable, Toast.LENGTH_SHORT).show(); + return; + } + updateFaceShapeBeautyStyleOptions(); + } else if (buttonView.getId() == makeUp.getId()) { + if (isChecked && !engine.isFeatureAvailableOnDevice(Constants.FEATURE_VIDEO_BEAUTY_EFFECT)) { + buttonView.setChecked(false); + Toast.makeText(requireContext(), R.string.feature_unavailable, Toast.LENGTH_SHORT).show(); + return; + } + makeUpOptions.enable_mu = isChecked; + updateExtensionProperty(); + } else if (buttonView.getId() == beauty.getId()) { if (isChecked && !engine.isFeatureAvailableOnDevice(Constants.FEATURE_VIDEO_BEAUTY_EFFECT)) { buttonView.setChecked(false); Toast.makeText(requireContext(), R.string.feature_unavailable, Toast.LENGTH_SHORT).show(); @@ -338,6 +502,7 @@ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { } engine.setBeautyEffectOptions(isChecked, beautyOptions); + engine.setFilterEffectOptions(isChecked, filterEffectOptions); } else if (buttonView.getId() == lightness2.getId()) { LowLightEnhanceOptions options = new LowLightEnhanceOptions(); options.lowlightEnhanceLevel = LowLightEnhanceOptions.LOW_LIGHT_ENHANCE_LEVEL_FAST; @@ -370,7 +535,31 @@ private void setColorEnhance(boolean isChecked) { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { float value = ((float) progress) / 10; - if (seekBar.getId() == seek_lightness.getId()) { + if (seekBar.getId() == sbShapeBeautifyAreaIntensity.getId()) { + faceShapeAreaOptions.shapeIntensity = progress; + updateFaceShapeBeautyAreaOptions(); + } else if (seekBar.getId() == sbShapeBeautifyStyleIntensity.getId()) { + faceShapeBeautyOptions.styleIntensity = progress; + updateFaceShapeBeautyStyleOptions(); + } else if (seekBar.getId() == sbBrowStrength.getId()) { + makeUpOptions.browStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == sbLashStrength.getId()) { + makeUpOptions.lashStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == sbShadowStrength.getId()) { + makeUpOptions.shadowStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == sbPupilStrength.getId()) { + makeUpOptions.pupilStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == sbBlushStrength.getId()) { + makeUpOptions.blushStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == sbLipStrength.getId()) { + makeUpOptions.lipStrength = value; + updateExtensionProperty(); + } else if (seekBar.getId() == seek_lightness.getId()) { beautyOptions.lighteningLevel = value; engine.setBeautyEffectOptions(beauty.isChecked(), beautyOptions); } else if (seekBar.getId() == seek_redness.getId()) { @@ -379,6 +568,11 @@ public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { } else if (seekBar.getId() == seek_sharpness.getId()) { beautyOptions.sharpnessLevel = value; engine.setBeautyEffectOptions(beauty.isChecked(), beautyOptions); + } else if (seekBar.getId() == seek_videoEnhance.getId()) { + filterEffectOptions.strength = value; + filterEffectOptions.path = "built_in_whiten_filter"; + int ret = engine.setFilterEffectOptions(beauty.isChecked(), filterEffectOptions); + Log.d(TAG, "onProgressChanged: ret=" + ret); } else if (seekBar.getId() == seek_smoothness.getId()) { beautyOptions.smoothnessLevel = value; engine.setBeautyEffectOptions(beauty.isChecked(), beautyOptions); @@ -586,4 +780,5 @@ public void run() { }); } }; + } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoQuickSwitch.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoQuickSwitch.java index 79b25a570..44830dd19 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoQuickSwitch.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VideoQuickSwitch.java @@ -27,9 +27,6 @@ import androidx.viewpager.widget.PagerAdapter; import androidx.viewpager.widget.ViewPager; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.ArrayList; import java.util.List; @@ -37,6 +34,7 @@ import io.agora.api.example.R; import io.agora.api.example.annotation.Example; import io.agora.api.example.common.BaseFragment; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -234,18 +232,15 @@ public void onPageScrollStateChanged(int state) { public void onActivityCreated(@Nullable Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelList.get(0)); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.READ_EXTERNAL_STORAGE, - Permission.WRITE_EXTERNAL_STORAGE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelList.get(0)); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelList.get(0)); + } + } + }); } @Override diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java index 418c06a63..270ec862c 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/VoiceEffects.java @@ -67,9 +67,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.Arrays; import java.util.LinkedHashMap; import java.util.List; @@ -81,6 +78,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -310,18 +308,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; resetControlLayoutByJoined(); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java index 2e05ce96b..c6806b9b6 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java @@ -6,6 +6,7 @@ import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; +import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -82,7 +83,15 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat ByteDanceBeautySDK.INSTANCE.getRenderManager(), new EventCallback(beautyStats -> null, () -> { - ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); + boolean authSuccess = ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); + if(!authSuccess){ + runOnUIThread(new Runnable() { + @Override + public void run() { + Toast.makeText(getContext(), "auth failed", Toast.LENGTH_SHORT).show(); + } + }); + } return null; }, () -> { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt index 8a28ab81f..ca3833ecc 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt @@ -3,7 +3,7 @@ package io.agora.api.example.examples.advanced.beauty import android.content.Context import android.util.Log import com.effectsar.labcv.effectsdk.RenderManager -import io.agora.api.example.utils.FileUtils +import io.agora.api.example.examples.advanced.beauty.utils.FileUtils import io.agora.beautyapi.bytedance.ByteDanceBeautyAPI import java.io.File @@ -11,7 +11,7 @@ object ByteDanceBeautySDK { private const val TAG = "ByteDanceBeautySDK" - private val LICENSE_NAME = "Agora_test_20240412_20240712_io.agora.entfull_4.5.0_1443.licbag" + private val LICENSE_NAME = "Agora_test_20241014_20241214_io.agora.entfull_4.5.0_2060.licbag" private var storagePath = "" private var assetsPath = "" private var licensePath = "" @@ -37,21 +37,20 @@ object ByteDanceBeautySDK { assetsPath = "beauty_bytedance" // copy license - licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle" - FileUtils.copyFilesFromAssets(context, "$assetsPath/LicenseBag.bundle", licensePath) - licensePath += "/$LICENSE_NAME" + licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle/$LICENSE_NAME" + FileUtils.copyAssets(context, "$assetsPath/LicenseBag.bundle/$LICENSE_NAME", licensePath) if (!File(licensePath).exists()) { return false } // copy models modelsPath = "$storagePath/beauty_bytedance/ModelResource.bundle" - FileUtils.copyFilesFromAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) + FileUtils.copyAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) // copy beauty node beautyNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite", beautyNodePath @@ -60,7 +59,7 @@ object ByteDanceBeautySDK { // copy beauty 4items node beauty4ItemsNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items", beauty4ItemsNodePath @@ -69,7 +68,7 @@ object ByteDanceBeautySDK { // copy resharp node reSharpNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/reshape_lite" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/reshape_lite", reSharpNodePath @@ -77,19 +76,19 @@ object ByteDanceBeautySDK { // copy stickers stickerPath = "$storagePath/beauty_bytedance/StickerResource.bundle/stickers" - FileUtils.copyFilesFromAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) + FileUtils.copyAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) return true } // GL Thread - fun initEffect(context: Context) { + fun initEffect(context: Context) : Boolean{ val ret = renderManager.init( context, modelsPath, licensePath, false, false, 0 ) if (!checkResult("RenderManager init ", ret)) { - return + return false } renderManager.useBuiltinSensor(true) renderManager.set3Buffer(false) @@ -99,6 +98,7 @@ object ByteDanceBeautySDK { ) renderManager.loadResourceWithTimeout(-1) beautyConfig.resume() + return true } // GL Thread @@ -139,7 +139,7 @@ object ByteDanceBeautySDK { } internal fun setBeautyAPI(beautyAPI: ByteDanceBeautyAPI?) { - this.beautyAPI = beautyAPI + ByteDanceBeautySDK.beautyAPI = beautyAPI } private fun runOnBeautyThread(run: () -> Unit) { @@ -411,7 +411,7 @@ object ByteDanceBeautySDK { if (value != null) { val nodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( value.context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}", nodePath diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt index 57bb76b5f..7d78db409 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt @@ -31,6 +31,8 @@ object FaceUnityBeautySDK { private var beautyAPI: FaceUnityBeautyAPI? = null + private var authSuccess = false + fun initBeauty(context: Context): Boolean { val auth = try { getAuth() @@ -45,13 +47,14 @@ object FaceUnityBeautySDK { override fun onSuccess(code: Int, msg: String) { Log.i(TAG, "FURenderManager onSuccess -- code=$code, msg=$msg") if (code == OPERATE_SUCCESS_AUTH) { - faceunity.fuSetUseTexAsync(1) + authSuccess = true + faceunity.fuSetUseTexAsync(0) FUAIKit.getInstance() .loadAIProcessor(BUNDLE_AI_FACE, FUAITypeEnum.FUAITYPE_FACEPROCESSOR) - FUAIKit.getInstance().loadAIProcessor( - BUNDLE_AI_HUMAN, - FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR - ) + // FUAIKit.getInstance().loadAIProcessor( + // BUNDLE_AI_HUMAN, + // FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR + // ) } } @@ -63,9 +66,14 @@ object FaceUnityBeautySDK { return true } + fun isAuthSuccess(): Boolean { + return authSuccess + } + fun unInitBeauty() { beautyAPI = null beautyConfig.reset() + authSuccess = false FUAIKit.getInstance().releaseAllAIProcessor() FURenderKit.getInstance().release() } @@ -77,8 +85,9 @@ object FaceUnityBeautySDK { return aMethod.invoke(null) as? ByteArray } - internal fun setBeautyAPI(beautyAPI: FaceUnityBeautyAPI) { - this.beautyAPI = beautyAPI + internal fun setBeautyAPI(beautyAPI: FaceUnityBeautyAPI?) { + FaceUnityBeautySDK.beautyAPI = beautyAPI + beautyConfig.resume() } private fun runOnBeautyThread(run: () -> Unit) { @@ -312,6 +321,28 @@ object FaceUnityBeautySDK { sticker = null } + fun resume(){ + smooth = smooth + whiten = whiten + thinFace = thinFace + enlargeEye = enlargeEye + redden = redden + shrinkCheekbone = shrinkCheekbone + shrinkJawbone = shrinkJawbone + whiteTeeth = whiteTeeth + hairlineHeight = hairlineHeight + narrowNose = narrowNose + mouthSize = mouthSize + chinLength = chinLength + brightEye = brightEye + darkCircles = darkCircles + nasolabialFolds = nasolabialFolds + faceThree = faceThree + + makeUp = makeUp + sticker = sticker + } + } data class MakeUpItem( diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt index 1e8a3bb4d..42bcd0985 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt @@ -8,7 +8,7 @@ import com.softsugar.stmobile.STMobileEffectNative import com.softsugar.stmobile.STMobileEffectParams import com.softsugar.stmobile.STMobileHumanActionNative import com.softsugar.stmobile.params.STEffectBeautyType -import io.agora.api.example.utils.FileUtils +import io.agora.api.example.examples.advanced.beauty.utils.FileUtils import io.agora.beautyapi.sensetime.SenseTimeBeautyAPI object SenseTimeBeautySDK { @@ -55,16 +55,25 @@ object SenseTimeBeautySDK { private var beautyAPI: SenseTimeBeautyAPI? = null + private var authSuccess = false + fun initBeautySDK(context: Context): Boolean { if (checkLicense(context)) { initHumanAction(context) + authSuccess = true return true } + initHumanAction(context) return false } + fun isAuthSuccess(): Boolean { + return authSuccess + } + fun unInitBeautySDK() { beautyAPI = null + authSuccess = false unInitHumanActionNative() beautyConfig.reset() } @@ -78,6 +87,7 @@ object SenseTimeBeautySDK { _mobileEffectNative?.createInstance(context, STMobileEffectNative.EFFECT_CONFIG_NONE) _mobileEffectNative?.setParam(STMobileEffectParams.EFFECT_PARAM_QUATERNION_SMOOTH_FRAME, 5f) Log.d(TAG, "SenseTime >> STMobileEffectNative create result : $result") + beautyConfig.resume() } fun unInitMobileEffect() { @@ -98,8 +108,8 @@ object SenseTimeBeautySDK { license, license.length ) - Log.d(TAG, "SenseTime >> checkLicense successfully! activeCode=$activeCode") - return true + Log.d(TAG, "SenseTime >> checkLicense activeCode=$activeCode") + return activeCode.isNotEmpty() } private fun initHumanAction(context: Context) { @@ -147,8 +157,8 @@ object SenseTimeBeautySDK { } - internal fun setBeautyAPI(beautyAPI: SenseTimeBeautyAPI){ - this.beautyAPI = beautyAPI + internal fun setBeautyAPI(beautyAPI: SenseTimeBeautyAPI?){ + SenseTimeBeautySDK.beautyAPI = beautyAPI beautyConfig.resume() } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt new file mode 100644 index 000000000..0da7f3323 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt @@ -0,0 +1,113 @@ +/* + * MIT License + * + * Copyright (c) 2023 Agora Community + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.agora.api.example.examples.advanced.beauty.utils + +import android.content.Context +import android.util.Log +import java.io.BufferedInputStream +import java.io.BufferedOutputStream +import java.io.BufferedReader +import java.io.File +import java.io.FileOutputStream +import java.io.IOException +import java.io.InputStream +import java.io.InputStreamReader +import java.io.OutputStream + +object FileUtils { + val TAG = "FileUtils" + + fun getAssetsString(context: Context, path: String): String { + val sb = StringBuilder() + var isr: InputStreamReader? = null + var br: BufferedReader? = null + try { + isr = InputStreamReader(context.resources.assets.open(path)) + br = BufferedReader(isr) + var line: String? = null + while (br.readLine().also { line = it } != null) { + sb.append(line).append("\n") + } + } catch (e: IOException) { + Log.e(TAG, "getAssetsString error: $e") + } finally { + if (isr != null) { + try { + isr.close() + } catch (e: IOException) { + e.printStackTrace() + } + } + if (br != null) { + try { + br.close() + } catch (e: IOException) { + e.printStackTrace() + } + } + } + return sb.toString() + } + + fun copyAssets(context: Context, assetsPath: String, targetPath: String) { + val fileNames = context.resources.assets.list(assetsPath) + if (fileNames?.isNotEmpty() == true) { + val targetFile = File(targetPath) + if (!targetFile.exists() && !targetFile.mkdirs()) { + return + } + for (fileName in fileNames) { + copyAssets( + context, + "$assetsPath/$fileName", + "$targetPath/$fileName" + ) + } + } else { + copyAssetsFile(context, assetsPath, targetPath) + } + } + + private fun copyAssetsFile(context: Context, assetsFile: String, targetPath: String) { + val dest = File(targetPath) + dest.parentFile?.mkdirs() + var input: InputStream? = null + var output: OutputStream? = null + try { + input = BufferedInputStream(context.assets.open(assetsFile)) + output = BufferedOutputStream(FileOutputStream(dest)) + val buffer = ByteArray(1024) + var length = 0 + while (input.read(buffer).also { length = it } != -1) { + output.write(buffer, 0, length) + } + } catch (e: Exception) { + Log.e(TAG, "copyAssetsFile", e) + } finally { + output?.close() + input?.close() + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java index 479854f9a..1124f07a6 100755 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioRender.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.nio.ByteBuffer; import io.agora.api.example.MainApplication; @@ -28,6 +25,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -183,15 +181,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission(Permission.Group.STORAGE, Permission.Group.MICROPHONE).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java index cf4024189..ae9ab30e1 100755 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/customaudio/CustomAudioSource.java @@ -17,9 +17,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import io.agora.api.example.MainApplication; import io.agora.api.example.R; import io.agora.api.example.annotation.Example; @@ -27,6 +24,7 @@ import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.AudioFileReader; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -211,15 +209,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission(Permission.Group.STORAGE, Permission.Group.MICROPHONE).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java index 618d18362..44a564abe 100755 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelAudio.java @@ -1,6 +1,8 @@ package io.agora.api.example.examples.basic; import static io.agora.api.example.common.model.Examples.BASIC; +import static io.agora.rtc2.Constants.AudioSourceType.AUDIO_SOURCE_MICROPHONE; +import static io.agora.rtc2.Constants.AudioSourceType.AUDIO_SOURCE_REMOTE_CHANNEL; import android.app.Notification; import android.app.NotificationChannel; @@ -18,13 +20,16 @@ import android.os.Handler; import android.os.IBinder; import android.provider.Settings; +import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.Button; +import android.widget.CompoundButton; import android.widget.EditText; +import android.widget.LinearLayout; import android.widget.SeekBar; import android.widget.Spinner; import android.widget.Switch; @@ -34,12 +39,11 @@ import androidx.appcompat.app.AlertDialog; import androidx.core.app.NotificationManagerCompat; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - import java.util.ArrayList; import java.util.LinkedHashMap; +import java.util.List; import java.util.Map; +import java.util.Random; import io.agora.api.example.MainActivity; import io.agora.api.example.MainApplication; @@ -48,12 +52,17 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.AudioSeatManager; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; import io.agora.rtc2.IRtcEngineEventHandler; +import io.agora.rtc2.LeaveChannelOptions; +import io.agora.rtc2.LocalAudioMixerConfiguration; +import io.agora.rtc2.RtcConnection; import io.agora.rtc2.RtcEngine; import io.agora.rtc2.RtcEngineConfig; +import io.agora.rtc2.RtcEngineEx; import io.agora.rtc2.proxy.LocalAccessPointConfiguration; /** @@ -78,11 +87,18 @@ public class JoinChannelAudio extends BaseFragment implements View.OnClickListen private Button mute, join; private SeekBar record, playout, inear; private Switch inEarSwitch; - private RtcEngine engine; + private LinearLayout exllContainer; + private EditText exEtChannel; + private Switch exSwitch; + private RtcEngineEx engine; private int myUid; private boolean joined = false; private AudioSeatManager audioSeatManager; + private String channelId = ""; + private String exChannelName = ""; + private RtcConnection rtcConnection2 = new RtcConnection(); + private SeekBar.OnSeekBarChangeListener seekBarChangeListener = new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { @@ -214,6 +230,7 @@ record = view.findViewById(R.id.recordingVol); if (savedInstanceState != null) { joined = savedInstanceState.getBoolean("joined"); + setExChannelState(); if (joined) { myUid = savedInstanceState.getInt("myUid"); ArrayList seatRemoteUidList = savedInstanceState.getIntegerArrayList("seatRemoteUidList"); @@ -231,6 +248,36 @@ record = view.findViewById(R.id.recordingVol); } } } + + exllContainer = view.findViewById(R.id.ll_join2); + exEtChannel = view.findViewById(R.id.et_channel2); + exSwitch = view.findViewById(R.id.btn_audio_mixer); + exSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() { + @Override + public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { + exChannelName = exEtChannel.getText().toString(); + if (TextUtils.isEmpty(exChannelName)) { + exSwitch.setChecked(false); + showShortToast(getString(R.string.dest_channel_empty)); + return; + } + if (isChecked) { + joinSecondChannelAndStartAudioMixer(); + } else { + leaveSecondChannelAndStopAudioMixer(); + } + } + }); + } + + private void setExChannelState() { + exSwitch.setEnabled(joined); + if (!joined) { + exSwitch.setChecked(false); + exChannelName = ""; + rtcConnection2.channelId = ""; + rtcConnection2.localUid = -1; + } } @Override @@ -265,7 +312,7 @@ public void onActivityCreated(@Nullable Bundle savedInstanceState) { config.mEventHandler = iRtcEngineEventHandler; config.mAudioScenario = Constants.AudioScenario.getValue(Constants.AudioScenario.valueOf(audioScenarioInput.getSelectedItem().toString())); config.mAreaCode = ((MainApplication) getActivity().getApplication()).getGlobalSettings().getAreaCode(); - engine = RtcEngine.create(config); + engine = (RtcEngineEx) RtcEngine.create(config); /* * This parameter is for reporting the usages of APIExample to agora background. * Generally, it is not necessary for you to set this parameter. @@ -371,26 +418,22 @@ public void onClick(View v) { if (!joined) { CommonUtil.hideInputBoard(getActivity(), et_channel); // call when join button hit - String channelId = et_channel.getText().toString(); + channelId = et_channel.getText().toString(); // Check permission - if (AndPermission.hasPermissions(this, Permission.Group.STORAGE, Permission.Group.MICROPHONE, Permission.Group.CAMERA)) { - joinChannel(channelId); - audioProfileInput.setEnabled(false); - channelProfileInput.setEnabled(false); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - Permission.Group.STORAGE, - Permission.Group.MICROPHONE - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - audioProfileInput.setEnabled(false); - channelProfileInput.setEnabled(false); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + if (allPermissionsGranted) { + // Permissions Granted + joinChannel(channelId); + audioProfileInput.setEnabled(false); + channelProfileInput.setEnabled(false); + } + } + }); } else { joined = false; + setExChannelState(); /*After joining a channel, the user must call the leaveChannel method to end the * call before joining another channel. This method returns 0 if the user leaves the * channel and releases all resources related to the call. This method call is @@ -430,6 +473,46 @@ public void onClick(View v) { } } + private void joinSecondChannelAndStartAudioMixer() { + ChannelMediaOptions mediaOptions = new ChannelMediaOptions(); + mediaOptions.clientRoleType = Constants.CLIENT_ROLE_BROADCASTER; + mediaOptions.publishMicrophoneTrack = false; + mediaOptions.publishMixedAudioTrack = true; + mediaOptions.enableAudioRecordingOrPlayout = false; + mediaOptions.autoSubscribeAudio = false; + rtcConnection2.channelId = exChannelName; + rtcConnection2.localUid = new Random().nextInt(512) + 512; + Log.d(TAG, "joinSecondChannelAndStartAudioMixer: " + rtcConnection2.channelId + " " + rtcConnection2.localUid); + TokenUtils.gen(requireContext(), rtcConnection2.channelId, rtcConnection2.localUid, new TokenUtils.OnTokenGenCallback() { + @Override + public void onTokenGen(String token) { + int res = engine.joinChannelEx(token, rtcConnection2, mediaOptions, secondHandler); + if (res != Constants.ERR_OK) { + showAlert(RtcEngine.getErrorDescription(Math.abs(res))); + } else { + LocalAudioMixerConfiguration config = new LocalAudioMixerConfiguration(); + config.syncWithLocalMic = false; + config.audioInputStreams.clear(); + LocalAudioMixerConfiguration.MixedAudioStream remoteStream = new LocalAudioMixerConfiguration.MixedAudioStream(); + remoteStream.sourceType = AUDIO_SOURCE_REMOTE_CHANNEL; + remoteStream.channelId = channelId; + config.audioInputStreams.add(remoteStream); + + LocalAudioMixerConfiguration.MixedAudioStream remoteStream2 = new LocalAudioMixerConfiguration.MixedAudioStream(); + remoteStream2.sourceType = AUDIO_SOURCE_MICROPHONE; + remoteStream2.channelId = channelId; + config.audioInputStreams.add(remoteStream2); + engine.startLocalAudioMixer(config); + } + } + }); + } + + private void leaveSecondChannelAndStopAudioMixer() { + engine.stopAudioMixing(); + engine.leaveChannelEx(rtcConnection2, new LeaveChannelOptions()); + } + /** * @param channelId Specify the channel name that you want to join. * Users that input the same channel name join the same channel. @@ -486,6 +569,95 @@ private void joinChannel(String channelId) { } + private final IRtcEngineEventHandler secondHandler = new IRtcEngineEventHandler() { + + public static final String LABLE = "secondHandler"; + private List remoteUidList = new ArrayList<>(); + + @Override + public void onError(int error) { + Log.w(TAG, String.format(LABLE + " onError code %d message %s", error, RtcEngine.getErrorDescription(error))); + } + + @Override + public void onLeaveChannel(RtcStats stats) { + super.onLeaveChannel(stats); + Log.i(TAG, String.format(LABLE + " local user %d leaveChannel!", myUid)); + for (Integer i : remoteUidList) { + audioSeatManager.downSeat(i); + } + remoteUidList.clear(); + } + + @Override + public void onJoinChannelSuccess(String channel, int uid, int elapsed) { + Log.i(TAG, String.format(LABLE + " onJoinChannelSuccess channel %s uid %d", channel, uid)); + remoteUidList.clear(); + } + + @Override + public void onRemoteAudioStateChanged(int uid, int state, int reason, int elapsed) { + super.onRemoteAudioStateChanged(uid, state, reason, elapsed); + Log.i(TAG, LABLE + " onRemoteAudioStateChanged->" + uid + ", state->" + state + ", reason->" + reason); + } + + @Override + public void onUserJoined(int uid, int elapsed) { + super.onUserJoined(uid, elapsed); + Log.i(TAG, LABLE + " onUserJoined->" + uid); + runOnUIThread(() -> { + audioSeatManager.upRemoteSeat(uid); + remoteUidList.add(uid); + }); + } + + @Override + public void onUserOffline(int uid, int reason) { + Log.i(TAG, String.format(LABLE + " user %d offline! reason:%d", uid, reason)); + runOnUIThread(() -> { + audioSeatManager.downSeat(uid); + if (remoteUidList.contains(uid)) { + remoteUidList.remove((Integer) uid); + Log.d(TAG, "After removing UID, remoteUidList: " + remoteUidList); + } else { + Log.w(TAG, "UID not found in remoteUidList: " + uid); + } + }); + } + + @Override + public void onLocalAudioStats(LocalAudioStats stats) { + super.onLocalAudioStats(stats); + runOnUIThread(() -> { + Map _stats = new LinkedHashMap<>(); + _stats.put("sentSampleRate", stats.sentSampleRate + ""); + _stats.put("sentBitrate", stats.sentBitrate + " kbps"); + _stats.put("internalCodec", stats.internalCodec + ""); + _stats.put("audioDeviceDelay", stats.audioDeviceDelay + " ms"); + audioSeatManager.getLocalSeat().updateStats(_stats); + }); + } + + @Override + public void onRemoteAudioStats(RemoteAudioStats stats) { + super.onRemoteAudioStats(stats); + runOnUIThread(() -> { + Map _stats = new LinkedHashMap<>(); + _stats.put("numChannels", stats.numChannels + ""); + _stats.put("receivedBitrate", stats.receivedBitrate + " kbps"); + _stats.put("audioLossRate", stats.audioLossRate + ""); + _stats.put("jitterBufferDelay", stats.jitterBufferDelay + " ms"); + audioSeatManager.getRemoteSeat(stats.uid).updateStats(_stats); + }); + } + + @Override + public void onAudioRouteChanged(int routing) { + super.onAudioRouteChanged(routing); + Log.d(TAG, "secondHandler onAudioRouteChanged : " + routing); + } + }; + /** * IRtcEngineEventHandler is an abstract class providing default implementation. * The SDK uses this class to report to the app on SDK runtime events. @@ -524,6 +696,7 @@ public void onJoinChannelSuccess(String channel, int uid, int elapsed) { myUid = uid; joined = true; runOnUIThread(() -> { + setExChannelState(); mute.setEnabled(true); join.setEnabled(true); join.setText(getString(R.string.leave)); diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelVideo.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelVideo.java index f6e8e8ea2..89c2d1149 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelVideo.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/basic/JoinChannelVideo.java @@ -4,10 +4,8 @@ import static io.agora.rtc2.Constants.RENDER_MODE_HIDDEN; import static io.agora.rtc2.video.VideoEncoderConfiguration.STANDARD_BITRATE; -import android.Manifest; import android.annotation.SuppressLint; import android.content.Context; -import android.os.Build; import android.os.Bundle; import android.util.Log; import android.view.LayoutInflater; @@ -21,11 +19,6 @@ import androidx.annotation.NonNull; import androidx.annotation.Nullable; -import com.yanzhenjie.permission.AndPermission; -import com.yanzhenjie.permission.runtime.Permission; - -import java.util.ArrayList; -import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.ConcurrentHashMap; @@ -36,6 +29,7 @@ import io.agora.api.example.common.BaseFragment; import io.agora.api.example.common.widget.VideoReportLayout; import io.agora.api.example.utils.CommonUtil; +import io.agora.api.example.utils.PermissonUtils; import io.agora.api.example.utils.TokenUtils; import io.agora.rtc2.ChannelMediaOptions; import io.agora.rtc2.Constants; @@ -166,29 +160,15 @@ public void onClick(View v) { // call when join button hit String channelId = et_channel.getText().toString(); // Check permission - List permissionList = new ArrayList<>(); - permissionList.add(Permission.READ_EXTERNAL_STORAGE); - permissionList.add(Permission.WRITE_EXTERNAL_STORAGE); - permissionList.add(Permission.RECORD_AUDIO); - permissionList.add(Permission.CAMERA); - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { - permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); - } - - String[] permissionArray = new String[permissionList.size()]; - permissionList.toArray(permissionArray); - - if (AndPermission.hasPermissions(this, permissionArray)) { - joinChannel(channelId); - return; - } - // Request permission - AndPermission.with(this).runtime().permission( - permissionArray - ).onGranted(permissions -> { - // Permissions Granted - joinChannel(channelId); - }).start(); + checkOrRequestPermisson(new PermissonUtils.PermissionResultCallback() { + @Override + public void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults) { + // Permissions Granted + if (allPermissionsGranted) { + joinChannel(channelId); + } + } + }); } else { joined = false; /*After joining a channel, the user must call the leaveChannel method to end the diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/service/MediaProjectionService.java b/Android/APIExample/app/src/main/java/io/agora/api/example/service/MediaProjectionService.java new file mode 100644 index 000000000..16ff7bdda --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/service/MediaProjectionService.java @@ -0,0 +1,90 @@ +package io.agora.api.example.service;// MediaProjectionService.java + +import android.app.Notification; +import android.app.NotificationChannel; +import android.app.NotificationManager; +import android.app.Service; +import android.content.Context; +import android.content.Intent; +import android.content.pm.ApplicationInfo; +import android.content.pm.ServiceInfo; +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.os.Build; +import android.os.IBinder; +import android.util.Log; + +import androidx.annotation.Nullable; + +import io.agora.api.example.R; + +public class MediaProjectionService extends Service { + private static final int NOTIFICATION_ID = 98779; + public static final String CHANNEL_ID = "MediaProjectionServiceChannel"; + private static final String TAG = MediaProjectionService.class.getSimpleName(); + + @Override + public void onCreate() { + super.onCreate(); + + } + + @Override + public int onStartCommand(Intent intent, int flags, int startId) { + Notification notification = getDefaultNotification(); + + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) { + this.startForeground(NOTIFICATION_ID, notification, ServiceInfo.FOREGROUND_SERVICE_TYPE_MEDIA_PROJECTION); + } else { + this.startForeground(NOTIFICATION_ID, notification); + } + } catch (Exception ex) { + Log.e(TAG, "", ex); + } + return START_STICKY; + } + + @Nullable + @Override + public IBinder onBind(Intent intent) { + return null; + } + + private Notification getDefaultNotification() { + ApplicationInfo appInfo = this.getApplicationContext().getApplicationInfo(); + String name = this.getApplicationContext().getPackageManager().getApplicationLabel(appInfo).toString(); + int icon = appInfo.icon; + + try { + Bitmap iconBitMap = BitmapFactory.decodeResource(this.getApplicationContext().getResources(), icon); + if (iconBitMap == null || iconBitMap.getByteCount() == 0) { + Log.w(TAG, "Couldn't load icon from icon of applicationInfo, use android default"); + icon = R.mipmap.ic_launcher; + } + } catch (Exception ex) { + Log.w(TAG, "Couldn't load icon from icon of applicationInfo, use android default"); + icon = R.mipmap.ic_launcher; + } + + if (Build.VERSION.SDK_INT >= 26) { + NotificationChannel mChannel = new NotificationChannel(CHANNEL_ID, name, NotificationManager.IMPORTANCE_DEFAULT); + NotificationManager mNotificationManager = (NotificationManager) this.getSystemService(Context.NOTIFICATION_SERVICE); + mNotificationManager.createNotificationChannel(mChannel); + } + + + Notification.Builder builder = new Notification.Builder(this) + .setContentText("Screen Sharing ...") + .setOngoing(true) + .setPriority(Notification.PRIORITY_HIGH) + .setSmallIcon(icon) + .setTicker(name) + .setWhen(System.currentTimeMillis()); + if (Build.VERSION.SDK_INT >= 26) { + builder.setChannelId(CHANNEL_ID); + } + + return builder.build(); + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java new file mode 100644 index 000000000..603bbafac --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/utils/PermissonUtils.java @@ -0,0 +1,52 @@ +package io.agora.api.example.utils; + +import android.Manifest; +import android.content.Context; +import android.content.pm.PackageManager; +import android.os.Build; + +import androidx.core.content.ContextCompat; + +import java.util.ArrayList; +import java.util.List; + +public class PermissonUtils { + private static final String TAG = "PermissonUtils"; + + public static String[] getCommonPermission() { + List permissionList = new ArrayList<>(); + permissionList.add(Manifest.permission.READ_EXTERNAL_STORAGE); + permissionList.add(Manifest.permission.WRITE_EXTERNAL_STORAGE); + permissionList.add(Manifest.permission.RECORD_AUDIO); + permissionList.add(Manifest.permission.CAMERA); + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.S) { + permissionList.add(Manifest.permission.READ_PHONE_STATE); + permissionList.add(Manifest.permission.BLUETOOTH_CONNECT); + } + String[] permissionArray = new String[permissionList.size()]; + permissionList.toArray(permissionArray); + return permissionArray; + } + + //check array permission is granted + public static boolean checkPermissions(Context context, String[] permissions) { + for (String permission : permissions) { + if (ContextCompat.checkSelfPermission(context, permission) != PackageManager.PERMISSION_GRANTED) { + return false; + } + } + return true; + } + + //check single permission is granted + public static boolean checkPermission(Context context, String permission) { + return ContextCompat.checkSelfPermission(context, permission) == PackageManager.PERMISSION_GRANTED; + } + + + // Callback interface for permission results + public interface PermissionResultCallback { + void onPermissionsResult(boolean allPermissionsGranted, String[] permissions, int[] grantResults); + } + +} diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt index b3022ab34..aff0a8971 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt @@ -31,7 +31,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 浣跨敤澹扮綉鍐呴儴鐨勭ゼ鏁版嵁鎺ュ彛杩涜澶勭悊 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt index e5b35bcc2..14bf60a10 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt @@ -36,6 +36,8 @@ import io.agora.base.VideoFrame.I420Buffer import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.RendererCommon import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.bytedance.utils.APIReporter +import io.agora.beautyapi.bytedance.utils.APIType import io.agora.beautyapi.bytedance.utils.AgoraImageHelper import io.agora.beautyapi.bytedance.utils.ImageUtil import io.agora.beautyapi.bytedance.utils.LogUtils @@ -51,8 +53,6 @@ import java.util.concurrent.Executors class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private val TAG = "ByteDanceBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 鑷姩鏍规嵁buffer绫诲瀷鍒囨崲锛1锛氬浐瀹氫娇鐢∣ES绾圭悊锛2锛氬浐瀹氫娇鐢╥420 @@ -75,6 +75,9 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) private var frameWidth = 0 private var frameHeight = 0 + private val apiReporter by lazy { + APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) + } private enum class BeautyProcessType{ UNKNOWN, TEXTURE_OES, TEXTURE_2D, I420 @@ -95,7 +98,17 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${RenderManager.getSDKVersion()}") - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "$config", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.captureMode, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -114,7 +127,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } this.enable = enable - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "$enable", 0) + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -125,7 +142,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVideo", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if (view is TextureView || view is SurfaceView) { val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -180,7 +201,15 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset, beautyNodePath=$beautyNodePath, beauty4ItemNodePath=$beauty4ItemNodePath, reSharpNodePath=$reSharpNodePath", 0) + apiReporter.reportFuncEvent( + "setBeautyPreset", + mapOf( + "preset" to preset, + "beautyNodePath" to beautyNodePath, + "beauty4ItemNodePath" to beauty4ItemNodePath, + "reSharpNodePath" to reSharpNodePath + ), + emptyMap()) runOnProcessThread { val renderManager = @@ -247,6 +276,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", mapOf("key" to key, "value" to value), emptyMap()) when (key) { "beauty_mode" -> beautyMode = value.toInt() } @@ -273,7 +303,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -293,8 +327,9 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) LogUtils.i(TAG, "release") + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) isReleased = true workerThreadExecutor.shutdown() textureBufferHelper?.let { @@ -364,7 +399,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED ) } - skipFrame = 2 + textureBufferHelper?.invoke { + skipFrame = 2 + imageUtils?.release() + } + apiReporter.startDurationEvent("first_beauty_frame") return false } @@ -432,6 +471,8 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return false } + apiReporter.endDurationEvent("first_beauty_frame", emptyMap()) + val processBuffer: TextureBuffer = textureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, @@ -520,7 +561,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { videoFrame.timestampNs ) if (!success) { - return@Callable -1 + return@Callable srcTexture } return@Callable dstTexture }) @@ -583,7 +624,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return@Callable if (success) { dstTexture } else { - -1 + srcTexture } }) } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt new file mode 100644 index 000000000..156978e8c --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.bytedance.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K姝 + CALL(2), // 鍛煎彨杩為害 + BEAUTY(3), // 缇庨 + VIDEO_LOADER(4), // 绉掑紑绉掑垏 + PK(5), // 鍥㈡垬 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 灞忓箷鍏变韩 + AUDIO_SCENARIO(8) // 闊抽 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //棰戦亾浣跨敤鑰楁椂 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //棣栧抚瀹為檯鑰楁椂 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //棣栧抚鎰熷畼鑰楁椂 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 涓婃姤鏅氬満鏅寲API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 涓婃姤鑰楁椂鎵撶偣淇℃伅 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 涓婃姤鑷畾涔変俊鎭 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //娴嬭瘯鐜浣跨敤 + // 鏁版嵁涓婃姤 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 鏃ュ織鍐欏叆 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt index 9580a6071..813e16ee8 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt @@ -56,9 +56,9 @@ class AgoraImageHelper { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.frameBufferId) if(texType == VideoFrame.TextureBuffer.Type.OES){ - drawer.drawOes(texId, 0, transform, width, height, 0, 0, width, height, 0) + drawer.drawOes(texId,0, transform, width, height, 0, 0, width, height,0) }else{ - drawer.drawRgb(texId, 0, transform, width, height, 0, 0, width, height, 0) + drawer.drawRgb(texId,0, transform, width, height, 0, 0, width, height,0) } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) GLES20.glFinish() diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt index 1058ea229..4da67d5a5 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt @@ -31,7 +31,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 浣跨敤澹扮綉鍐呴儴鐨勭ゼ鏁版嵁鎺ュ彛杩涜澶勭悊 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt index dd51417aa..710b392c3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt @@ -47,6 +47,8 @@ import io.agora.base.VideoFrame.SourceType import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.EglBase import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.faceunity.utils.APIReporter +import io.agora.beautyapi.faceunity.utils.APIType import io.agora.beautyapi.faceunity.utils.FuDeviceUtils import io.agora.beautyapi.faceunity.utils.LogUtils import io.agora.beautyapi.faceunity.utils.StatsHelper @@ -63,13 +65,10 @@ import java.util.concurrent.Callable class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private val TAG = "FaceUnityBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 鑷姩鏍规嵁buffer绫诲瀷鍒囨崲锛1锛氬浐瀹氫娇鐢∣ES绾圭悊锛2锛氬浐瀹氫娇鐢╥420锛3: 鍗曠汗鐞嗘ā寮 - private var enableTextureAsync = true // 鏄惁寮鍚汗鐞+寮傛缂撳瓨澶勭悊銆傚浜嶨PU鎬ц兘濂界殑鎵嬫満鍙互鍑忓皬缇庨澶勭悊鑰楁椂锛屽浜庝腑绔満寮鍚悗鏁堟灉涔熶笉鏄庢樉銆 + private var enableTextureAsync = false // 鏄惁寮鍚汗鐞+寮傛缂撳瓨澶勭悊銆傚浜嶨PU鎬ц兘濂界殑鎵嬫満鍙互鍑忓皬缇庨澶勭悊鑰楁椂锛屽浜庝腑绔満寮鍚悗鏁堟灉涔熶笉鏄庢樉銆 - private var textureBufferHelper: TextureBufferHelper? = null - private var wrapTextureBufferHelper: TextureBufferHelper? = null + private var beautyTextureBufferHelper: TextureBufferHelper? = null private var byteBuffer: ByteBuffer? = null private var byteArray: ByteArray? = null private var config: Config? = null @@ -79,7 +78,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var captureMirror = false private var renderMirror = false private val identityMatrix = Matrix() - private var mTextureProcessHelper: TextureProcessHelper? = null private var statsHelper: StatsHelper? = null private var skipFrame = 0 private enum class ProcessSourceType{ @@ -97,6 +95,13 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) private val transformGLFrameBuffer = GLFrameBuffer() + private val outGLFrameBuffer = GLFrameBuffer() + private val apiReporter by lazy { + APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) + } + + private var asyncTextureProcessHelper: TextureProcessHelper? = null + private var asyncTextureBufferHelper: TextureBufferHelper? = null override fun initialize(config: Config): Int { if (this.config != null) { @@ -123,7 +128,17 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel") - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.cameraConfig, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -141,8 +156,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { skipFrame = 2 LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0) - + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) if(this.enable != enable){ this.enable = enable enableChange = true @@ -159,7 +177,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") localVideoRenderMode = renderMode - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVideo", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if (view is TextureView || view is SurfaceView) { val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -193,8 +215,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) - + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -207,10 +232,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!") return } - if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { + if (beautyTextureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { run.invoke() - } else if (textureBufferHelper != null) { - textureBufferHelper?.handler?.post(run) + } else if (beautyTextureBufferHelper != null) { + beautyTextureBufferHelper?.handler?.post(run) } else { pendingProcessRunList.add(run) } @@ -219,6 +244,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { override fun isFrontCamera() = isFrontCamera override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", + mapOf("key" to key, "value" to value), + emptyMap() + ) when(key){ "beauty_mode" -> beautyMode = value.toInt() "enableTextureAsync" -> enableTextureAsync = value.toBoolean() @@ -237,8 +266,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0) - + apiReporter.reportFuncEvent("setBeautyPreset", + mapOf("preset" to preset), + emptyMap() + ) val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) if (preset == BeautyPreset.DEFAULT) { recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1 @@ -304,24 +335,27 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) isReleased = true - textureBufferHelper?.let { - textureBufferHelper = null + beautyTextureBufferHelper?.let { + beautyTextureBufferHelper = null it.handler.removeCallbacksAndMessages(null) it.invoke { fuRenderer.release() - mTextureProcessHelper?.release() - mTextureProcessHelper = null transformGLFrameBuffer.release() + outGLFrameBuffer.release() null } - // it.handler.looper.quit() it.dispose() } - wrapTextureBufferHelper?.let { - wrapTextureBufferHelper = null + asyncTextureBufferHelper?.let { + asyncTextureBufferHelper = null + it.invoke { + asyncTextureProcessHelper?.release() + asyncTextureProcessHelper = null + } it.dispose() } statsHelper?.reset() @@ -378,10 +412,13 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED ) } - textureBufferHelper?.invoke { - mTextureProcessHelper?.reset() + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() + } + beautyTextureBufferHelper?.invoke { + skipFrame = 2 + outGLFrameBuffer.resetTexture() } - skipFrame = 2 return false } @@ -394,8 +431,8 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if(enableChange){ enableChange = false - textureBufferHelper?.invoke { - mTextureProcessHelper?.reset() + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() } return false } @@ -404,12 +441,12 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return true } - if (textureBufferHelper == null) { - textureBufferHelper = TextureBufferHelper.create( + if (beautyTextureBufferHelper == null) { + beautyTextureBufferHelper = TextureBufferHelper.create( "FURender", EglBaseProvider.instance().rootEglBase.eglBaseContext ) - textureBufferHelper?.invoke { + beautyTextureBufferHelper?.invoke { synchronized(pendingProcessRunList){ val iterator = pendingProcessRunList.iterator() while (iterator.hasNext()){ @@ -419,13 +456,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode") - } - if (wrapTextureBufferHelper == null) { - wrapTextureBufferHelper = TextureBufferHelper.create( - "FURenderWrap", - EglBaseProvider.instance().rootEglBase.eglBaseContext - ) - LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode") + apiReporter.startDurationEvent("first_beauty_frame") } val startTime = System.currentTimeMillis() val processTexId = when (beautyMode) { @@ -456,7 +487,16 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return false } - val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer( + apiReporter.endDurationEvent("first_beauty_frame", + mapOf( + "width" to videoFrame.rotatedWidth, + "height" to videoFrame.rotatedHeight, + "camera_facing" to videoFrame.sourceType.name, + "buffer_type" to videoFrame.buffer::class.java.simpleName, + ) + ) + + val processBuffer: TextureBuffer = beautyTextureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, TextureBuffer.Type.RGB, @@ -481,16 +521,21 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int { - val texBufferHelper = wrapTextureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1 + if (asyncTextureBufferHelper == null) { + asyncTextureBufferHelper = TextureBufferHelper.create( + "FURenderAsync", + EglBaseProvider.instance().rootEglBase.eglBaseContext + ) + LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode") + } + when(textureBuffer.type){ TextureBuffer.Type.OES -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC return -1 } @@ -498,19 +543,15 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { else -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC - skipFrame = 6 return -1 } } } - if(mTextureProcessHelper == null) { - mTextureProcessHelper = TextureProcessHelper() - mTextureProcessHelper?.setFilter { frame -> + if(asyncTextureProcessHelper == null) { + asyncTextureProcessHelper = TextureProcessHelper() + asyncTextureProcessHelper?.setFilter { frame -> val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1 val input = FURenderInputData(frame.width, frame.height) @@ -522,22 +563,22 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { input.renderConfig.let { if (isFront) { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } } if (isReleased) { return@setFilter -1 } - val ret = textureBufferHelper?.invoke { + val ret = texBufferHelper.invoke { synchronized(EglBase.lock){ return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } @@ -546,12 +587,12 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } - return texBufferHelper.invoke { + return asyncTextureBufferHelper?.invoke { if(isReleased){ return@invoke -1 } - return@invoke mTextureProcessHelper?.process( + return@invoke asyncTextureProcessHelper?.process( textureBuffer.textureId, when (textureBuffer.type) { TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES @@ -564,20 +605,17 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { isFrontCamera, (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror) )?: -1 - } + } ?: -1 } private fun processBeautySingleTexture(videoFrame: VideoFrame): Int { - val texBufferHelper = textureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1 when(textureBuffer.type){ TextureBuffer.Type.OES -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_OES return -1 } @@ -585,11 +623,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { else -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_2D - skipFrame = 6 return -1 } } @@ -627,34 +661,35 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { input.renderConfig.let { if (isFront) { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } } if (isReleased) { return@invoke -1 } + var fuTexId = -1 synchronized(EglBase.lock){ - return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + fuTexId = fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } + outGLFrameBuffer.setSize(videoFrame.rotatedWidth, videoFrame.rotatedHeight) + outGLFrameBuffer.resetTransform() + return@invoke outGLFrameBuffer.process(fuTexId, GLES20.GL_TEXTURE_2D) } } private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int { - val texBufferHelper = textureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 if(currProcessSourceType != ProcessSourceType.I420){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.I420 return -1 } @@ -748,15 +783,20 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } - mTextureProcessHelper?.let { - if(it.size() > 0){ - it.reset() - return@Callable -1 + if ((asyncTextureProcessHelper?.size() ?: 0) > 0) { + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() } + return@Callable -1 } - synchronized(EglBase.lock){ - return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + + var fuTexId = -1 + synchronized(EglBase.lock) { + fuTexId = fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } + outGLFrameBuffer.setSize(videoFrame.rotatedWidth, videoFrame.rotatedHeight) + outGLFrameBuffer.resetTransform() + return@Callable outGLFrameBuffer.process(fuTexId, GLES20.GL_TEXTURE_2D); }) } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt new file mode 100644 index 000000000..6df300520 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.faceunity.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K姝 + CALL(2), // 鍛煎彨杩為害 + BEAUTY(3), // 缇庨 + VIDEO_LOADER(4), // 绉掑紑绉掑垏 + PK(5), // 鍥㈡垬 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 灞忓箷鍏变韩 + AUDIO_SCENARIO(8) // 闊抽 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //棰戦亾浣跨敤鑰楁椂 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //棣栧抚瀹為檯鑰楁椂 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //棣栧抚鎰熷畼鑰楁椂 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 涓婃姤鏅氬満鏅寲API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 涓婃姤鑰楁椂鎵撶偣淇℃伅 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 涓婃姤鑷畾涔変俊鎭 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //娴嬭瘯鐜浣跨敤 + // 鏁版嵁涓婃姤 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 鏃ュ織鍐欏叆 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java index 7b1c1dd86..5815b4e78 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java @@ -115,9 +115,9 @@ public int process(int textureId, int textureType) { synchronized (EglBase.lock){ if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ - drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0); + drawer.drawOes(textureId,0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); }else{ - drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0); + drawer.drawRgb(textureId,0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); } } @@ -201,4 +201,7 @@ private void bindFramebuffer(int textureId) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE); } + public void resetTexture() { + deleteTexture(); + } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt index 1451750b4..439a185cc 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt @@ -173,7 +173,7 @@ class TextureProcessHelper( future = futureQueue.poll() } glTextureBufferQueueIn.reset() -// glFrameBuffer.release() + glFrameBuffer.resetTexture() executeSync { glTextureBufferQueueOut.reset() } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt index 0ecec0df9..2606617ff 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt @@ -32,7 +32,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 浣跨敤澹扮綉鍐呴儴鐨勭ゼ鏁版嵁鎺ュ彛杩涜澶勭悊 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt index ec346087a..efcdf6757 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt @@ -40,7 +40,10 @@ import io.agora.base.VideoFrame.I420Buffer import io.agora.base.VideoFrame.SourceType import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.RendererCommon +import io.agora.base.internal.video.YuvConverter import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.sensetime.utils.APIReporter +import io.agora.beautyapi.sensetime.utils.APIType import io.agora.beautyapi.sensetime.utils.LogUtils import io.agora.beautyapi.sensetime.utils.StatsHelper import io.agora.beautyapi.sensetime.utils.processor.IBeautyProcessor @@ -57,8 +60,6 @@ import java.util.concurrent.Executors class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private val TAG = "SenseTimeBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 鑷姩鏍规嵁buffer绫诲瀷鍒囨崲锛1锛氬浐瀹氫娇鐢∣ES绾圭悊锛2锛氬浐瀹氫娇鐢╥420 private var textureBufferHelper: TextureBufferHelper? = null @@ -77,6 +78,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private var cameraConfig = CameraConfig() private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) + private val apiReporter by lazy { APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) } private enum class ProcessSourceType{ UNKNOWN, @@ -104,8 +106,17 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { cameraConfig = CameraConfig(config.cameraConfig.frontMirror, config.cameraConfig.backMirror) LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${STCommonNative.getVersion()}") - // config.rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\":101}") // 瀹炴椂涓婃姤 - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.captureMode, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -123,7 +134,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { skipFrame = 2 LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0) + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) if(this.enable != enable){ this.enable = enable @@ -142,7 +157,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") localVideoRenderMode = renderMode - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVide", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if(view is TextureView || view is SurfaceView){ val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -184,7 +203,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_HAS_RELEASED.value } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0) + apiReporter.reportFuncEvent( + "setBeautyPreset", + mapOf("preset" to preset), + emptyMap() + ) val enable = preset == BeautyPreset.DEFAULT workerThreadExecutor.submit { @@ -316,7 +339,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -324,6 +351,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { override fun isFrontCamera() = isFrontCamera override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", mapOf("key" to key, "value" to value), emptyMap()) when(key){ "beauty_mode" -> beautyMode = value.toInt() } @@ -342,7 +370,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) LogUtils.i(TAG, "release") isReleased = true @@ -414,6 +443,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { textureBufferHelper?.invoke { beautyProcessor?.reset() } + apiReporter.startDurationEvent("first_beauty_frame") return false } @@ -475,6 +505,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return false } + apiReporter.endDurationEvent("first_beauty_frame", emptyMap()) + val processBuffer: TextureBuffer = textureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, @@ -632,6 +664,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private fun getNV21Buffer(videoFrame: VideoFrame) : ByteArray? { val buffer = videoFrame.buffer + YuvConverter.setEnablePboOpt(true) + YuvConverter.setEnableConvertPerLog(true) val i420Buffer = buffer as? I420Buffer ?: buffer.toI420() val width = i420Buffer.width val height = i420Buffer.height diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt new file mode 100644 index 000000000..bbef8261c --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.sensetime.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K姝 + CALL(2), // 鍛煎彨杩為害 + BEAUTY(3), // 缇庨 + VIDEO_LOADER(4), // 绉掑紑绉掑垏 + PK(5), // 鍥㈡垬 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 灞忓箷鍏变韩 + AUDIO_SCENARIO(8) // 闊抽 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //棰戦亾浣跨敤鑰楁椂 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //棣栧抚瀹為檯鑰楁椂 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //棣栧抚鎰熷畼鑰楁椂 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 涓婃姤鏅氬満鏅寲API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 涓婃姤鑰楁椂鎵撶偣淇℃伅 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 涓婃姤鑷畾涔変俊鎭 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //娴嬭瘯鐜浣跨敤 + // 鏁版嵁涓婃姤 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 鏃ュ織鍐欏叆 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java index a73ff9287..d2f245c83 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java @@ -113,9 +113,9 @@ public int process(int textureId, int textureType) { float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform); if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ - drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0); + drawer.drawOes(textureId, 0,matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); }else{ - drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0); + drawer.drawRgb(textureId, 0,matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glFinish(); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt index 164406442..36bca0b0f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt @@ -443,13 +443,12 @@ class BeautyProcessor : IBeautyProcessor { if (isReleased) { return -1 } - mSTMobileEffectNative.render( + val ret = mSTMobileEffectNative.render( sTEffectRenderInParam, stEffectRenderOutParam, false ) - if (event == mCustomEvent) { mCustomEvent = 0 } @@ -457,12 +456,18 @@ class BeautyProcessor : IBeautyProcessor { if (isReleased) { return -1 } + + var finalTextId = stEffectRenderOutParam.texture?.id ?: 0 + if(ret < 0){ + finalTextId = textureId + } + glFrameBuffer.setSize(width, height) glFrameBuffer.resetTransform() glFrameBuffer.setFlipV(true) glFrameBuffer.textureId = finalOutTextureId glFrameBuffer.process( - stEffectRenderOutParam.texture?.id ?: 0, + finalTextId, GLES20.GL_TEXTURE_2D ) GLES20.glFinish() diff --git a/Android/APIExample/app/src/main/res/drawable/ic_scale_full.png b/Android/APIExample/app/src/main/res/drawable/ic_scale_full.png new file mode 100644 index 000000000..9eb0b99ed Binary files /dev/null and b/Android/APIExample/app/src/main/res/drawable/ic_scale_full.png differ diff --git a/Android/APIExample/app/src/main/res/layout/fragment_custom_remoterender.xml b/Android/APIExample/app/src/main/res/layout/fragment_custom_remoterender.xml index 2d1ac930b..dc14aee22 100644 --- a/Android/APIExample/app/src/main/res/layout/fragment_custom_remoterender.xml +++ b/Android/APIExample/app/src/main/res/layout/fragment_custom_remoterender.xml @@ -3,6 +3,7 @@ xmlns:tools="http://schemas.android.com/tools" android:layout_width="match_parent" android:layout_height="match_parent" + xmlns:app="http://schemas.android.com/apk/res-auto" android:fitsSystemWindows="true" tools:context=".examples.basic.JoinChannelVideo"> @@ -43,4 +44,58 @@ + + + + + + + + + + + + - + @@ -42,14 +43,30 @@ + + + + + + + - + + + + + + - @@ -69,20 +86,20 @@ - - + + - + + - @@ -94,4 +111,9 @@ + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift index 2679f0521..9b4bcb416 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePush/CustomVideoSourcePush.swift @@ -31,11 +31,16 @@ class CustomVideoSourcePushEntry: UIViewController { @IBOutlet weak var joinButton: AGButton! @IBOutlet weak var channelTextField: AGTextField! let identifier = "CustomVideoSourcePush" + private var isHDR: Bool = false override func viewDidLoad() { super.viewDidLoad() } + @IBAction func onHDRAction(sender: UISwitch) { + isHDR = sender.isOn + } + @IBAction func doJoinPressed(sender: AGButton) { guard let channelName = channelTextField.text else { return } // resign channel text field @@ -46,7 +51,7 @@ class CustomVideoSourcePushEntry: UIViewController { guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else { return } newViewController.title = channelName - newViewController.configs = ["channelName": channelName] + newViewController.configs = ["channelName": channelName, "isHDR": isHDR] navigationController?.pushViewController(newViewController, animated: true) } } @@ -55,9 +60,10 @@ class CustomVideoSourcePushMain: BaseViewController { var localVideo = Bundle.loadView(fromNib: "VideoViewSampleBufferDisplayView", withType: SampleBufferDisplayView.self) var remoteVideo = Bundle.loadView(fromNib: "VideoView", withType: VideoView.self) var customCamera: AgoraYUVImageSourcePush? + private var isHDR: Bool = false @IBOutlet weak var container: AGEVideoContainer! - var agoraKit: AgoraRtcEngineKit! + var agoraKit: AgoraRtcEngineKit? // indicate if current instance has joined channel var isJoined: Bool = false @@ -67,6 +73,7 @@ class CustomVideoSourcePushMain: BaseViewController { // layout render view remoteVideo.setPlaceholder(text: "Remote Host".localized) container.layoutStream(views: [localVideo, remoteVideo]) + localVideo.backgroundColor = .black // set up agora instance when view loaded let config = AgoraRtcEngineConfig() @@ -74,12 +81,28 @@ class CustomVideoSourcePushMain: BaseViewController { config.areaCode = GlobalSettings.shared.area config.channelProfile = .liveBroadcasting agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) + + guard let agoraKit = agoraKit else { + return + } + + // Turn off quick start for hardware decoding + agoraKit.setParameters("{\"rtc.video.enable_hwdec_quickly_start\": false}") + // Configuring Privatization Parameters Util.configPrivatization(agoraKit: agoraKit) agoraKit.setLogFile(LogUtils.sdkLogPath()) // get channel name from configs guard let channelName = configs["channelName"] as? String else {return} + isHDR = configs["isHDR"] as? Bool ?? false + + let cap1 = agoraKit.queryHDRCapability(.hardwareEncoder) == .supported ? true : false + if !cap1 { + showAlert(title: "Error", message: "Current device does not support HDR") + self.navigationController?.popViewController(animated: true) + return + } // make myself a broadcaster // agoraKit.setClientRole(.broadcaster) @@ -91,15 +114,17 @@ class CustomVideoSourcePushMain: BaseViewController { // setup my own camera as custom video source // note setupLocalVideo is not working when using pushExternalVideoFrame // so you will have to prepare the preview yourself - customCamera = AgoraYUVImageSourcePush(size: CGSize(width: 320, height: 180), - fileName: "sample", - frameRate: 15) + let size = isHDR ? CGSize(width: 1280, height: 720) : CGSize(width: 320, height: 180) + customCamera = AgoraYUVImageSourcePush(size: size, + fileName: isHDR ? "hlg-hdr" : "sample", + frameRate: 15, + isHDR: isHDR) customCamera?.delegate = self customCamera?.startSource() customCamera?.trackId = 0 agoraKit.setExternalVideoSource(true, useTexture: true, sourceType: .videoFrame) - let resolution = (GlobalSettings.shared.getSetting(key: "resolution")?.selectedOption().value as? CGSize) ?? .zero + let resolution = size //(GlobalSettings.shared.getSetting(key: "resolution")?.selectedOption().value as? CGSize) ?? .zero let fps = (GlobalSettings.shared.getSetting(key: "fps")?.selectedOption().value as? AgoraVideoFrameRate) ?? .fps15 let orientation = (GlobalSettings.shared.getSetting(key: "orientation")? .selectedOption().value as? AgoraVideoOutputOrientationMode) ?? .fixedPortrait @@ -122,14 +147,15 @@ class CustomVideoSourcePushMain: BaseViewController { option.publishCustomAudioTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.publishCustomVideoTrack = GlobalSettings.shared.getUserRole() == .broadcaster option.clientRoleType = GlobalSettings.shared.getUserRole() - NetworkManager.shared.generateToken(channelName: channelName, success: { token in - let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) + NetworkManager.shared.generateToken(channelName: channelName, success: {[weak self] token in + guard let self = self else {return} + let result = self.agoraKit?.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) if result != 0 { // Usually happens with invalid parameters // Error code description can be found at: // en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode // cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code - self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params") + self.showAlert(title: "Error", message: "joinChannel call failed: \(result ?? 0), please check your params") } }) } @@ -140,17 +166,18 @@ class CustomVideoSourcePushMain: BaseViewController { customCamera?.stopSource() // leave channel when exiting the view if isJoined { - agoraKit.disableAudio() - agoraKit.disableVideo() + agoraKit?.disableAudio() + agoraKit?.disableVideo() let option = AgoraRtcChannelMediaOptions() option.publishCustomAudioTrack = false option.publishCustomVideoTrack = false - agoraKit.updateChannel(with: option) - agoraKit.leaveChannel { (stats) -> Void in + agoraKit?.updateChannel(with: option) + agoraKit?.leaveChannel { (stats) -> Void in LogUtils.log(message: "left channel, duration: \(stats.duration)", level: .info) } agoraKit = nil } + AgoraRtcEngineKit.destroy() } } } @@ -196,8 +223,8 @@ extension CustomVideoSourcePushMain: AgoraRtcEngineDelegate { videoCanvas.uid = uid // the view to be binded videoCanvas.view = remoteVideo.videoView - videoCanvas.renderMode = .hidden - agoraKit.setupRemoteVideo(videoCanvas) + videoCanvas.renderMode = .fit + agoraKit?.setupRemoteVideo(videoCanvas) } /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event @@ -212,10 +239,7 @@ extension CustomVideoSourcePushMain: AgoraRtcEngineDelegate { // you will need to remove the EAGL sublayer from your binded view let videoCanvas = AgoraRtcVideoCanvas() videoCanvas.uid = uid - // the view to be binded - videoCanvas.view = nil - videoCanvas.renderMode = .hidden - agoraKit.setupRemoteVideo(videoCanvas) + agoraKit?.setupRemoteVideo(videoCanvas) } } @@ -223,21 +247,22 @@ extension CustomVideoSourcePushMain: AgoraRtcEngineDelegate { extension CustomVideoSourcePushMain: AgoraYUVImageSourcePushDelegate { func onVideoFrame(_ buffer: CVPixelBuffer, size: CGSize, trackId: UInt, rotation: Int32) { let videoFrame = AgoraVideoFrame() - /** Video format: - * - 1: I420 - * - 2: BGRA - * - 3: NV21 - * - 4: RGBA - * - 5: IMC2 - * - 7: ARGB - * - 8: NV12 - * - 12: iOS texture (CVPixelBufferRef) - */ - videoFrame.format = 12 + + if isHDR { + let colorSpace = AgoraColorSpace() + colorSpace.rangeID = .limited + colorSpace.transferID = .IDARIB_STD_B67 + colorSpace.matrixID = .IDBT2020_NCL + colorSpace.primaryID = .IDBT2020 + videoFrame.colorSpace = colorSpace + videoFrame.format = AgoraVideoFormat.cvPixelP010.rawValue + } else { + videoFrame.format = AgoraVideoFormat.cvPixelNV12.rawValue + } videoFrame.textureBuf = buffer videoFrame.rotation = Int32(rotation) // once we have the video frame, we can push to agora sdk - agoraKit.pushExternalVideoFrame(videoFrame, videoTrackId: trackId) + agoraKit?.pushExternalVideoFrame(videoFrame, videoTrackId: trackId) let outputVideoFrame = AgoraOutputVideoFrame() outputVideoFrame.width = Int32(size.width) diff --git a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift index 6d40f4563..6da255f13 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/CustomVideoSourcePushMulti/CustomVideoSourcePushMulti.swift @@ -14,10 +14,27 @@ class UserModel { var uid: UInt = 0 var canvasView: SampleBufferDisplayView? var trackId: UInt32 = 0 - var isJoin: Bool = false - var customSource: AgoraYUVImageSourcePush? + var isJoin: Bool = false { + didSet { + self.canvasView?.videoView.isHidden = !isJoin + self.canvasView?.videoView.reset() + } + } var isEncode: Bool = false + var customSource: AgoraYUVImageSourcePush? var customEncodeSource: KFMP4Demuxer? + + func reset() { + self.customSource?.stopSource() + self.customSource = nil + self.customEncodeSource?.cancelReading() + self.customEncodeSource = nil + + self.uid = UInt(Int.random(in: 10000...99999)) + self.trackId = 0 + self.isJoin = false + self.isEncode = false + } } class CustomVideoSourcePushMultiEntry: UIViewController { @@ -46,16 +63,14 @@ class CustomVideoSourcePushMultiEntry: UIViewController { } class CustomVideoSourcePushMultiMain: BaseViewController { - var localVideo = Bundle.loadView(fromNib: "VideoViewSampleBufferDisplayView", - withType: SampleBufferDisplayView.self) - lazy var remoteVideos: [UserModel] = (0..<3).map({ _ in + private let localUid: UInt = UInt(Int.random(in: 10000...99999)) + lazy var remoteVideos: [UserModel] = (0..<4).map({ _ in let model = UserModel() - model.uid = UInt(Int.random(in: 10000...99999)) model.canvasView = Bundle.loadView(fromNib: "VideoViewSampleBufferDisplayView", withType: SampleBufferDisplayView.self) + model.reset() return model }) - var customCamera: AgoraYUVImageSourcePush? @IBOutlet weak var container: AGEVideoContainer! var agoraKit: AgoraRtcEngineKit! @@ -77,7 +92,7 @@ class CustomVideoSourcePushMultiMain: BaseViewController { Util.configPrivatization(agoraKit: agoraKit) agoraKit.setLogFile(LogUtils.sdkLogPath()) - container.layoutStream2x2(views: [localVideo] + remoteVideos.compactMap({ $0.canvasView })) + container.layoutStream2x2(views: remoteVideos.compactMap({ $0.canvasView })) // make myself a broadcaster // agoraKit.setClientRole(.broadcaster) @@ -85,17 +100,6 @@ class CustomVideoSourcePushMultiMain: BaseViewController { agoraKit.enableVideo() agoraKit.enableAudio() - // setup my own camera as custom video source - // note setupLocalVideo is not working when using pushExternalVideoFrame - // so you will have to prepare the preview yourself - customCamera = AgoraYUVImageSourcePush(size: CGSize(width: 320, height: 180), - fileName: "sample", - frameRate: 15) - customCamera?.trackId = agoraKit.createCustomVideoTrack() - customCamera?.delegate = self - customCamera?.startSource() - agoraKit.setExternalVideoSource(true, useTexture: true, sourceType: .videoFrame) - let resolution = (GlobalSettings.shared.getSetting(key: "resolution")?.selectedOption().value as? CGSize) ?? .zero let fps = (GlobalSettings.shared.getSetting(key: "fps")?.selectedOption().value as? AgoraVideoFrameRate) ?? .fps15 let orientation = (GlobalSettings.shared.getSetting(key: "orientation")? @@ -115,29 +119,39 @@ class CustomVideoSourcePushMultiMain: BaseViewController { // 2. If app certificate is turned on at dashboard, token is needed // when joining channel. The channel name and uid used to calculate // the token has to match the ones used for channel join - joinChannel(uid: 999, - trackId: customCamera?.trackId ?? 0, + joinChannel(uid: localUid, + trackId: nil, publishEncodedVideoTrack: false) } - private func joinChannel(uid: UInt, trackId: UInt32, publishEncodedVideoTrack: Bool) { + private func joinChannel(uid: UInt, trackId: UInt32?, publishEncodedVideoTrack: Bool) { guard let channelName = configs["channelName"] as? String else {return} let option = AgoraRtcChannelMediaOptions() - option.publishCustomVideoTrack = !publishEncodedVideoTrack option.publishMicrophoneTrack = false - option.autoSubscribeAudio = true - option.autoSubscribeVideo = true - option.customVideoTrackId = Int(trackId) - option.publishEncodedVideoTrack = publishEncodedVideoTrack - option.clientRoleType = GlobalSettings.shared.getUserRole() - let connection = AgoraRtcConnection() - connection.localUid = uid - connection.channelId = channelName + option.publishCameraTrack = false + + let delegate: AgoraRtcEngineDelegate? = self + if let trackId = trackId { + option.publishCustomVideoTrack = !publishEncodedVideoTrack + option.customVideoTrackId = Int(trackId) + option.publishEncodedVideoTrack = publishEncodedVideoTrack + option.clientRoleType = .broadcaster + option.autoSubscribeAudio = false + option.autoSubscribeVideo = false +// delegate = nil + } else { + option.clientRoleType = .audience + option.autoSubscribeAudio = true + option.autoSubscribeVideo = true + } NetworkManager.shared.generateToken(channelName: channelName, uid: uid) { token in + let connection = AgoraRtcConnection() + connection.localUid = uid + connection.channelId = channelName let result = self.agoraKit.joinChannelEx(byToken: token, - connection: connection, - delegate: self, - mediaOptions: option, - joinSuccess: nil) + connection: connection, + delegate: delegate, + mediaOptions: option, + joinSuccess: nil) if result != 0 { // Usually happens with invalid parameters // Error code description can be found at: @@ -195,8 +209,9 @@ class CustomVideoSourcePushMultiMain: BaseViewController { private func createVideoTrack(userModel: UserModel) { let customCamera = AgoraYUVImageSourcePush(size: CGSize(width: 320, height: 180), - fileName: "sample", - frameRate: 15) + fileName: "sample", + frameRate: 15, + isHDR: false) customCamera.trackId = userModel.trackId customCamera.delegate = self userModel.isJoin = true @@ -209,51 +224,61 @@ class CustomVideoSourcePushMultiMain: BaseViewController { @IBAction func onDestoryVideoTrack(_ sender: Any) { guard let channelName = configs["channelName"] as? String else {return} - let userModel = remoteVideos.filter({ $0.isJoin == true }).last - userModel?.isJoin = false - userModel?.customSource?.stopSource() - userModel?.customEncodeSource?.cancelReading() - userModel?.customEncodeSource = nil - userModel?.canvasView?.videoView.reset() - userModel?.customSource = nil + guard let userModel = remoteVideos.filter({ $0.trackId != 0 }).last else { return } + let trackId = UInt(userModel.trackId) let connection = AgoraRtcConnection() - connection.localUid = userModel?.uid ?? 0 + connection.localUid = userModel.uid connection.channelId = channelName - agoraKit.destroyCustomVideoTrack(UInt(userModel?.trackId ?? 0)) - agoraKit.destroyCustomEncodedVideoTrack(UInt(userModel?.trackId ?? 0)) - userModel?.trackId = 0 - userModel?.isEncode = false - userModel?.uid = UInt(Int.random(in: 10000...99999)) + agoraKit.destroyCustomVideoTrack(trackId) + agoraKit.destroyCustomEncodedVideoTrack(trackId) agoraKit.leaveChannelEx(connection) { state in - LogUtils.log(message: "warning: \(state.description)", level: .info) + LogUtils.log(message: "leaveChannelEx: \(state.description)", level: .info) } + + cleanCanvas(uid: userModel.uid) } override func willMove(toParent parent: UIViewController?) { if parent == nil { - // stop capture - customCamera?.stopSource() // leave channel when exiting the view if isJoined, let channelName = configs["channelName"] as? String { remoteVideos.forEach({ let connection = AgoraRtcConnection() connection.localUid = $0.uid connection.channelId = channelName - $0.customSource?.stopSource() agoraKit.leaveChannelEx(connection) { state in - LogUtils.log(message: "warning: \(state.description)", level: .info) + LogUtils.log(message: "leaveChannelEx: \(state.description)", level: .info) } + $0.reset() }) let connection = AgoraRtcConnection() - connection.localUid = 999 + connection.localUid = localUid connection.channelId = channelName agoraKit.leaveChannelEx(connection) { state in - LogUtils.log(message: "warning: \(state.description)", level: .info) + LogUtils.log(message: "leaveChannelEx: \(state.description)", level: .info) } - AgoraRtcEngineKit.destroy() } + AgoraRtcEngineKit.destroy() } } + + private func cleanCanvas(uid: UInt) { + guard let channelName = configs["channelName"] as? String, + let userModel = remoteVideos.first(where: { $0.uid == uid }) else { return } + LogUtils.log(message: "cleanCanvas: \(uid)", level: .info) + + // to unlink your view from sdk, so that your view reference will be released + // note the video will stay at its last frame, to completely remove it + // you will need to remove the EAGL sublayer from your binded view + userModel.reset() + + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + let connect = AgoraRtcConnection() + connect.localUid = localUid + connect.channelId = channelName + agoraKit.setupRemoteVideoEx(videoCanvas, connection: connect) + } } /// agora rtc engine delegate events @@ -265,7 +290,7 @@ extension CustomVideoSourcePushMultiMain: AgoraRtcEngineDelegate { /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html /// @param warningCode warning code of the problem func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) { - LogUtils.log(message: "warning: \(warningCode.description)", level: .warning) + LogUtils.log(message: "didOccurWarning: \(warningCode.description)", level: .warning) } /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand @@ -275,8 +300,8 @@ extension CustomVideoSourcePushMultiMain: AgoraRtcEngineDelegate { /// cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code /// @param errorCode error code of the problem func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) { - LogUtils.log(message: "error: \(errorCode)", level: .error) - self.showAlert(title: "Error", message: "Error \(errorCode.description) occur") + LogUtils.log(message: "didOccurError: \(errorCode)", level: .error) + self.showAlert(title: "Error", message: "didOccurError \(errorCode.description) occur") } func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { @@ -288,24 +313,23 @@ extension CustomVideoSourcePushMultiMain: AgoraRtcEngineDelegate { /// @param uid uid of remote joined user /// @param elapsed time elapse since current sdk instance join the channel in ms func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { + guard let channelName = configs["channelName"] as? String else {return} + if let _ = remoteVideos.first(where: { $0.uid == uid }) { return } + guard let userModel = remoteVideos.first(where: { $0.isJoin == false && $0.trackId == 0 }) else { return } LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info) + userModel.uid = uid + userModel.isJoin = true - // Only one remote video view is available for this - // tutorial. Here we check if there exists a surface - // view tagged as this uid. - if uid == 999 { return } - for model in remoteVideos where model.uid == uid { - return - } + // the view to be binded let videoCanvas = AgoraRtcVideoCanvas() videoCanvas.uid = uid - // the view to be binded - guard let userModel = remoteVideos.first(where: { $0.isJoin == false }) else { return } videoCanvas.view = userModel.canvasView?.videoView - videoCanvas.renderMode = .hidden - userModel.uid = uid - userModel.isJoin = true - agoraKit.setupRemoteVideo(videoCanvas) + videoCanvas.renderMode = .fit + + let connect = AgoraRtcConnection() + connect.localUid = localUid + connect.channelId = channelName + agoraKit.setupRemoteVideoEx(videoCanvas, connection: connect) } /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event @@ -313,15 +337,10 @@ extension CustomVideoSourcePushMultiMain: AgoraRtcEngineDelegate { /// @param reason reason why this user left, note this event may be triggered when the remote user /// become an audience in live broadcasting profile func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { + guard let _ = remoteVideos.first(where: { $0.uid == uid }) else { return } LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info) - // to unlink your view from sdk, so that your view reference will be released - // note the video will stay at its last frame, to completely remove it - // you will need to remove the EAGL sublayer from your binded view - let userModel = remoteVideos.first(where: { $0.uid == uid }) - userModel?.isJoin = false - userModel?.uid = UInt(Int.random(in: 10000...99999)) - userModel?.canvasView?.videoView.reset() + cleanCanvas(uid: uid) } } @@ -329,17 +348,7 @@ extension CustomVideoSourcePushMultiMain: AgoraRtcEngineDelegate { extension CustomVideoSourcePushMultiMain: AgoraYUVImageSourcePushDelegate { func onVideoFrame(_ buffer: CVPixelBuffer, size: CGSize, trackId: UInt, rotation: Int32) { let videoFrame = AgoraVideoFrame() - /** Video format: - * - 1: I420 - * - 2: BGRA - * - 3: NV21 - * - 4: RGBA - * - 5: IMC2 - * - 7: ARGB - * - 8: NV12 - * - 12: iOS texture (CVPixelBufferRef) - */ - videoFrame.format = 12 + videoFrame.format = AgoraVideoFormat.cvPixelNV12.rawValue videoFrame.textureBuf = buffer videoFrame.rotation = Int32(rotation) @@ -348,12 +357,8 @@ extension CustomVideoSourcePushMultiMain: AgoraYUVImageSourcePushDelegate { outputVideoFrame.height = Int32(size.height) outputVideoFrame.pixelBuffer = buffer outputVideoFrame.rotation = rotation - if customCamera?.trackId ?? 0 == trackId { - localVideo.videoView.renderVideoPixelBuffer(outputVideoFrame) - } else { - let userModel = remoteVideos.first(where: { $0.trackId == trackId }) - userModel?.canvasView?.videoView.renderVideoPixelBuffer(outputVideoFrame) - } + let userModel = remoteVideos.first(where: { $0.trackId == trackId }) + userModel?.canvasView?.videoView.renderVideoPixelBuffer(outputVideoFrame) // once we have the video frame, we can push to agora sdk agoraKit?.pushExternalVideoFrame(videoFrame, videoTrackId: trackId) } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift b/iOS/APIExample/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift index f61a227c6..df5b970bd 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/FaceCapture/FaceCapture.swift @@ -231,6 +231,7 @@ extension FaceCaptureMain: AgoraFaceInfoDelegate { extension FaceCaptureMain: AgoraVideoFrameDelegate { func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { let info = videoFrame.metaInfo["KEY_FACE_CAPTURE"] as? String + localVideo.statsInfo?.updateMetaInfo(data: info) LogUtils.log(message: info ?? "", level: .info) return true } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/KtvCopyrightMusic/KtvCopyrightMusic.swift b/iOS/APIExample/APIExample/Examples/Advanced/KtvCopyrightMusic/KtvCopyrightMusic.swift index 8ddd6eb75..712c12267 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/KtvCopyrightMusic/KtvCopyrightMusic.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/KtvCopyrightMusic/KtvCopyrightMusic.swift @@ -10,7 +10,7 @@ import UIKit class KtvCopyrightMusic: UIViewController { - private let urlString = "https://doc.shengwang.cn/doc/online-ktv/ios/implementation/ktv-scenario/integrate-ktvapi" + private let urlString = "https://doc.shengwang.cn/doc/online-ktv/ios/ktv-scenario/get-started/integrate-ktvapi" override func viewDidLoad() { super.viewDidLoad() diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard index c0255764f..d433b379b 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/Base.lproj/LiveStreaming.storyboard @@ -1,9 +1,9 @@ - + - + @@ -65,10 +65,10 @@ + @@ -88,11 +96,14 @@ + + + @@ -111,6 +122,7 @@ + @@ -129,6 +141,9 @@ + + + @@ -306,16 +321,14 @@ - + + + + - + + @@ -422,6 +459,24 @@ + + + + + + + + + + + + + + + + + + @@ -430,38 +485,47 @@ - + + + + + + + - + + + + @@ -476,7 +540,11 @@ + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift index dca80ec67..afe8e60db 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/LiveStreaming.swift @@ -14,6 +14,7 @@ class LiveStreamingEntry: UIViewController { @IBOutlet weak var preloadButton: UIButton! @IBOutlet weak var channelTextField: UITextField! @IBOutlet weak var cameraButton: UIButton? + @IBOutlet weak var videoScenarioButton: UIButton? let identifier = "LiveStreaming" var role: AgoraClientRole = .broadcaster private var isFirstFrame: Bool = false @@ -130,6 +131,25 @@ class LiveStreamingEntry: UIViewController { } } + @IBAction func onTapVideoScenarioButton(_ sender: UIButton) { + let pickerView = PickerView() + pickerView.dataArray = [ + AgoraApplicationScenarioType.applicationGeneralScenario.description(), + AgoraApplicationScenarioType.applicationMeetingScenario.description(), + AgoraApplicationScenarioType.application1V1Scenario.description(), + AgoraApplicationScenarioType.applicationLiveShowScenario.description() + ] + AlertManager.show(view: pickerView, alertPostion: .bottom) + pickerView.pickerViewSelectedValueClosure = { [weak self, weak pickerView] key in + guard let self = self else { return } + let idx = pickerView?.dataArray?.firstIndex(where: { $0 == key}) ?? 0 + let type = AgoraApplicationScenarioType(rawValue: idx) ?? .applicationGeneralScenario + let ret = self.agoraKit.setVideoScenario(type) + print("setVideoScenario[\(type.rawValue)] ret = \(ret)") + self.videoScenarioButton?.setTitle(key, for: .normal) + } + } + func doJoin() { guard let channelName = channelTextField.text else { return } let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil) @@ -163,11 +183,13 @@ class LiveStreamingMain: BaseViewController { var backgroundVideo = Bundle.loadVideoView(type: .remote, audioOnly: false) @IBOutlet weak var foregroundVideoContainer: UIView! @IBOutlet weak var backgroundVideoContainer: UIView! + @IBOutlet weak var touchBackgroundView: UIView? @IBOutlet weak var clientRoleToggleView: UIView! @IBOutlet weak var ultraLowLatencyToggleView: UIView! @IBOutlet weak var clientRoleToggle: UISwitch! @IBOutlet weak var ultraLowLatencyToggle: UISwitch! @IBOutlet weak var takeSnapshot: UIButton! + @IBOutlet weak var takeLocalSnapshot: UIButton! @IBOutlet weak var watarMarkContainer: UIView! @IBOutlet weak var dualStreamContainer: UIView! @IBOutlet weak var dualStreamTipsLabel: UILabel! @@ -177,6 +199,8 @@ class LiveStreamingMain: BaseViewController { @IBOutlet weak var centerStageContainerView: UIView! @IBOutlet weak var CameraFocalButton: UIButton! @IBOutlet weak var cameraStabilizationButton: UIButton? + @IBOutlet weak var localRenderTextField: UITextField? + @IBOutlet weak var remoteRenderTextField: UITextField? var remoteUid: UInt? { didSet { foregroundVideoContainer.isHidden = !(role == .broadcaster && remoteUid != nil) @@ -193,6 +217,9 @@ class LiveStreamingMain: BaseViewController { videoImageContainer.isHidden = role == .audience centerStageContainerView.isHidden = role == .audience CameraFocalButton.isHidden = role == .audience + localRenderTextField?.isHidden = role == .audience + cameraStabilizationButton?.isHidden = role == .audience + takeLocalSnapshot.isHidden = role == .audience } } var isLocalVideoForeground = false { @@ -206,7 +233,9 @@ class LiveStreamingMain: BaseViewController { } } } - var isUltraLowLatencyOn: Bool = false + private var isUltraLowLatencyOn: Bool = false + + private var cameraStabilizationKey: String? = nil // indicate if current instance has joined channel var isJoined: Bool = false @@ -219,6 +248,11 @@ class LiveStreamingMain: BaseViewController { foregroundVideo.bindFrameToSuperviewBounds() backgroundVideo.bindFrameToSuperviewBounds() + touchBackgroundView?.addGestureRecognizer(UITapGestureRecognizer(target: self, action: #selector(onTapGesture(_ :)))) + + localRenderTextField?.addTarget(self, action: #selector(textFieldDidChange), for: .editingChanged) + remoteRenderTextField?.addTarget(self, action: #selector(textFieldDidChange), for: .editingChanged) + let modeKey = stabilizationModeParams.first?.keys.first ?? "" cameraStabilizationButton?.setTitle("\("CameraStabilizationMode".localized) \(modeKey)", for: .normal) @@ -329,6 +363,11 @@ class LiveStreamingMain: BaseViewController { return isLocalVideoForeground ? backgroundVideo.videoView : foregroundVideo.videoView } private var cameraDirection: AgoraCameraDirection = .front + + @IBAction func onTapGesture(_ ges: UIGestureRecognizer) { + self.view.endEditing(true) + } + @IBAction func onTapCameraFocalButton(_ sender: UIButton) { let infos = agoraKit.queryCameraFocalLengthCapability() let pickerView = PickerView() @@ -364,9 +403,19 @@ class LiveStreamingMain: BaseViewController { pickerView.dataArray = stabilizationModeParams.map({ $0.keys.first ?? "" }) AlertManager.show(view: pickerView, alertPostion: .bottom) pickerView.pickerViewSelectedValueClosure = { [weak self] key in - guard let map = stabilizationModeParams.filter({$0.keys.contains(key)}).first else {return} - sender.setTitle("\("CameraStabilizationMode".localized) \(key)", for: .normal) - self?.agoraKit.setCameraStabilizationMode(map[key] ?? .auto) + guard let self = self, let map = stabilizationModeParams.filter({$0.keys.contains(key)}).first else {return} + self.cameraStabilizationKey = key + sender.setTitle("\("CameraStabilizationMode".localized) \(self.cameraStabilizationKey ?? "")", for: .normal) + self.updateCameraStabilization() + } + } + + private func updateCameraStabilization() { + guard let key = cameraStabilizationKey, + let map = stabilizationModeParams.filter({$0.keys.contains(key)}).first else {return} + let ret = self.agoraKit.setCameraStabilizationMode(map[key] ?? .auto) + if ret != 0 { + LogUtils.log(message: "setCameraStabilizationMode[\(key)] fail: \(ret)", level: .error) } } @@ -374,8 +423,10 @@ class LiveStreamingMain: BaseViewController { let options = AgoraImageTrackOptions() let imgPath = Bundle.main.path(forResource: "agora-logo", ofType: "png") options.imageUrl = imgPath - let res = agoraKit.enableVideoImageSource(sender.isOn, options: options) - print(res) + let ret = agoraKit.enableVideoImageSource(sender.isOn, options: options) + if ret != 0 { + LogUtils.log(message: "enableVideoImageSource fail: \(ret)", level: .info) + } } @IBAction func onTapBFrameSwitch(_ sender: UISwitch) { let encoderConfig = AgoraVideoEncoderConfiguration() @@ -434,6 +485,29 @@ class LiveStreamingMain: BaseViewController { agoraKit.takeSnapshot(Int(remoteUid), filePath: path) showAlert(title: "Screenshot successful".localized, message: path) } + + @IBAction func onTakeLocalSnapshot(_ sender: Any) { + let pickerView = PickerView() + let values: [AgoraVideoModulePosition] = [ +// .postCapture, + .preRenderer, + .preEncoder, + .postCaptureOrigin + ] + pickerView.dataArray = values.map({ $0.description()}) + AlertManager.show(view: pickerView, alertPostion: .bottom) + pickerView.pickerViewSelectedValueClosure = { [weak self, weak pickerView] key in + guard let self = self else { return } + let idx = pickerView?.dataArray?.firstIndex(where: { $0 == key}) ?? 0 + let position = values[idx] + let config = AgoraSnapshotConfig() + config.position = position + config.filePath = NSTemporaryDirectory().appending("local_\(position.rawValue).png") + let ret = self.agoraKit.takeSnapshotWithConfig(0, config: config) + print("takeSnapshot ret: \(ret) path: \(config.filePath ?? "")") + self.showAlert(title: "Screenshot successful".localized, message: config.filePath ?? "") + } + } @IBAction func onTapForegroundVideo(_ sender: UIGestureRecognizer) { isLocalVideoForeground = !isLocalVideoForeground let localVideoCanvas = AgoraRtcVideoCanvas() @@ -494,6 +568,24 @@ class LiveStreamingMain: BaseViewController { } } } + + @objc func textFieldDidChange(_ textField: UITextField) { + if let text = textField.text, let number = Int(text) { + if number > 60 { + textField.text = "60" + } else if number == 0 { + textField.text = "" + } + } else { + textField.text = "" + } + + if textField == localRenderTextField { + agoraKit.setLocalRenderTargetFps(.camera, targetFps: Int32(textField.text ?? "") ?? 15) + } else { + agoraKit.setRemoteRenderTargetFps(Int32(textField.text ?? "") ?? 15) + } + } } /// agora rtc engine delegate events @@ -611,4 +703,9 @@ extension LiveStreamingMain: AgoraRtcEngineDelegate { tracingInfo: AgoraVideoRenderingTracingInfo) { backgroundVideo.statsInfo?.updateFirstFrameInfo(tracingInfo) } + + func rtcEngine(_ engine: AgoraRtcEngineKit, localVideoStateChangedOf state: AgoraVideoLocalState, reason: AgoraLocalVideoStreamReason, sourceType: AgoraVideoSourceType) { + guard state == .capturing else {return} + self.updateCameraStabilization() + } } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings index d8bf37071..07532f2d2 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings +++ b/iOS/APIExample/APIExample/Examples/Advanced/LiveStreaming/zh-Hans.lproj/LiveStreaming.strings @@ -9,14 +9,19 @@ /* Class = "UIButton"; normalTitle = "Join"; ObjectID = "kbN-ZR-nNn"; */ "kbN-ZR-nNn.normalTitle" = "鍔犲叆棰戦亾"; -"w4q-aT-JBc.normalTitle" = "鎴浘"; +"w4q-aT-JBc.normalTitle" = "鏈湴鎴浘"; +"lpn-6Z-VV3.normalTitle" = "杩滅鎴浘"; "ohV-am-Acd.text" = "棣栧抚鍑哄浘"; "Q0N-nV-bez.normalTitle" = "榛樿鑳屾櫙鑹"; "S19-UR-C2c.normalTitle" = "棰勫姞杞"; -"S19-UR-C2c.selectTitle" = "鍙栨秷棰勫姞杞"; "8kn-Rl-VMd.text" = "鍨墖鎺ㄦ祦"; "dZm-Rf-imt.normalTitle" = "鐩告満瀵圭劍"; "ug1-fz-GYz.normalTitle" = "鐩告満瀵圭劍"; + +"ZB2-jf-zOV.normalTitle" = "瑙嗛涓氬姟鍦烘櫙"; + +"tOf-AP-HSe.placeholder" = "鏈湴娓叉煋甯х巼(1-60)锛岄粯璁15"; +"UFF-wU-Wze.placeholder" = "杩滅娓叉煋甯х巼(1-60)锛岄粯璁15"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/Base.lproj/PictureInPicture.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard similarity index 74% rename from iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/Base.lproj/PictureInPicture.storyboard rename to iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard index 84b72944c..1da29a8f6 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/Base.lproj/PictureInPicture.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/Base.lproj/LocalCompositeGraph.storyboard @@ -1,18 +1,18 @@ - + - + - + - + @@ -59,30 +59,35 @@ - + - - + + - - + + - + + + + - + + + + + + + + + + - + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift new file mode 100644 index 000000000..8f11f643d --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/LocalCompositeGraph.swift @@ -0,0 +1,290 @@ +// +// LocalCompositeGraph.swift +// APIExample +// +// Created by 寮犱咕娉 on 2020/4/17. +// Copyright 漏 2020 Agora Corp. All rights reserved. +// +import UIKit +import AGEVideoLayout +import AgoraRtcKit + +class LocalCompositeGraphEntry: UIViewController { + @IBOutlet weak var joinButton: UIButton! + @IBOutlet weak var channelTextField: UITextField! + let identifier = "LocalCompositeGraph" + + override func viewDidLoad() { + super.viewDidLoad() + } + + @IBAction func doJoinPressed(sender: UIButton) { + guard let channelName = channelTextField.text else { return } + // resign channel text field + channelTextField.resignFirstResponder() + + let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil) + // create new view controller every time to ensure we get a clean vc + guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else {return} + newViewController.title = channelName + newViewController.configs = ["channelName": channelName] + navigationController?.pushViewController(newViewController, animated: true) + } +} + +class LocalCompositeGraphMain: BaseViewController { + var localVideo = Bundle.loadVideoView(type: .local, audioOnly: false) + + @IBOutlet weak var container: AGEVideoContainer! + var agoraKit: AgoraRtcEngineKit! + + // indicate if current instance has joined channel + var isJoined: Bool = false + + private lazy var screenParams: AgoraScreenCaptureParameters2 = { + let params = AgoraScreenCaptureParameters2() + params.captureVideo = true + params.captureAudio = true + let audioParams = AgoraScreenAudioParameters() + audioParams.captureSignalVolume = 50 + params.audioParams = audioParams + let videoParams = AgoraScreenVideoParameters() + videoParams.dimensions = screenShareVideoDimension() + videoParams.frameRate = .fps15 + videoParams.bitrate = AgoraVideoBitrateStandard + params.videoParams = videoParams + return params + }() + + private lazy var option: AgoraRtcChannelMediaOptions = { + let option = AgoraRtcChannelMediaOptions() + option.clientRoleType = GlobalSettings.shared.getUserRole() + option.publishCameraTrack = true + option.publishMicrophoneTrack = true + return option + }() + + private var systemBroadcastPicker: RPSystemBroadcastPickerView? + + override func viewDidLoad() { + super.viewDidLoad() + // layout render view + localVideo.setPlaceholder(text: "Local Host".localized) + container.layoutStream(views: [localVideo]) + + // set up agora instance when view loaded + let config = AgoraRtcEngineConfig() + config.appId = KeyCenter.AppId + config.areaCode = GlobalSettings.shared.area + config.channelProfile = .liveBroadcasting + agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) + + agoraKit.setLogFile(LogUtils.sdkLogPath()) + + // get channel name from configs + guard let channelName = configs["channelName"] as? String else {return} + + // make myself a broadcaster + agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) + // enable video module and set up video encoding configs + agoraKit.enableVideo() + agoraKit.enableAudio() + + // set up local video to render your local camera preview + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = 0 + // the view to be binded + videoCanvas.view = localVideo.videoView + videoCanvas.mirrorMode = .disabled + videoCanvas.renderMode = .fit + videoCanvas.sourceType = .transCoded + agoraKit.setupLocalVideo(videoCanvas) + // you have to call startPreview to see local video + agoraKit.startPreview() + + // Set audio route to speaker + agoraKit.setDefaultAudioRouteToSpeakerphone(true) + + //start screen capture + self.startScreenCapture() + + // start camera + let captureConfig = AgoraCameraCapturerConfiguration() + captureConfig.dimensions = CGSize(width: 100, height: 100) + agoraKit.startCameraCapture(.camera, config: captureConfig) + + // start joining channel + // 1. Users can only see each other after they join the + // same channel successfully using the same app id. + // 2. If app certificate is turned on at dashboard, token is needed + // when joining channel. The channel name and uid used to calculate + // the token has to match the ones used for channel join + let option = AgoraRtcChannelMediaOptions() + option.publishCameraTrack = true + option.publishMicrophoneTrack = true + option.publishTranscodedVideoTrack = true + option.clientRoleType = .broadcaster + NetworkManager.shared.generateToken(channelName: channelName, success: { token in + let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option) + if result != 0 { + // Usually happens with invalid parameters + // Error code description can be found at: + // en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode + // cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code + self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params") + } + self.startVideoTranscoder() + }) + } + + override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(animated) + agoraKit.disableAudio() + agoraKit.disableVideo() + if isJoined { + stopScreenCapture() + agoraKit.stopPreview() + agoraKit.leaveChannel { (stats) -> Void in + LogUtils.log(message: "left channel, duration: \(stats.duration)", level: .info) + } + } + } + + private func screenShareVideoDimension() -> CGSize { + let screenSize = UIScreen.main.bounds + var boundingSize = CGSize(width: 540, height: 960) + let mW: CGFloat = boundingSize.width / screenSize.width + let mH: CGFloat = boundingSize.height / screenSize.height + if mH < mW { + boundingSize.width = boundingSize.height / screenSize.height * screenSize.width + } else if mW < mH { + boundingSize.height = boundingSize.width / screenSize.width * screenSize.height + } + return boundingSize + } + + private func prepareSystemBroadcaster() { + if #available(iOS 12.0, *) { + let frame = CGRect(x: 0, y: 0, width: 60, height: 60) + systemBroadcastPicker = RPSystemBroadcastPickerView(frame: frame) + systemBroadcastPicker?.showsMicrophoneButton = false + systemBroadcastPicker?.autoresizingMask = [.flexibleTopMargin, .flexibleRightMargin] + let bundleId = Bundle.main.bundleIdentifier ?? "" + systemBroadcastPicker?.preferredExtension = "\(bundleId).Agora-ScreenShare-Extension" + + } else { + self.showAlert(message: "Minimum support iOS version is 12.0") + } + } + + private func stopScreenCapture() { + agoraKit.stopScreenCapture() + option.publishScreenCaptureVideo = false + option.publishScreenCaptureAudio = false + agoraKit.updateChannel(with: option) + } + + private func startScreenCapture() { + guard !UIScreen.main.isCaptured else { return } + agoraKit.startScreenCapture(screenParams) + prepareSystemBroadcaster() + guard let picker = systemBroadcastPicker else { return } + for view in picker.subviews where view is UIButton { + (view as? UIButton)?.sendActions(for: .allEvents) + break + } + + option.publishScreenCaptureVideo = true + option.publishScreenCaptureAudio = true + agoraKit.updateChannel(with: option) + } + + private func startVideoTranscoder() { + + // camera capture + let cameraStream = AgoraTranscodingVideoStream() + cameraStream.rect = CGRect(origin: .zero, size: CGSize(width: 100, height: 100)) + cameraStream.sourceType = .camera + + // screen capture + let screenStream = AgoraTranscodingVideoStream() + screenStream.sourceType = .screen + screenStream.rect = CGRect(origin: .zero, size: screenShareVideoDimension()) + + let config = AgoraLocalTranscoderConfiguration() + config.videoOutputConfiguration.dimensions = screenShareVideoDimension() + // set transcoder config + config.videoInputStreams = [screenStream, cameraStream] + agoraKit.startLocalVideoTranscoder(config) + } +} + +/// agora rtc engine delegate events +extension LocalCompositeGraphMain: AgoraRtcEngineDelegate { + /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out + /// what is happening + /// Warning code description can be found at: + /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html + /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html + /// @param warningCode warning code of the problem + func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) { + LogUtils.log(message: "warning: \(warningCode.description)", level: .warning) + } + + /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand + /// to let user know something wrong is happening + /// Error code description can be found at: + /// en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode + /// cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code + /// @param errorCode error code of the problem + func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) { + LogUtils.log(message: "error: \(errorCode)", level: .error) + self.showAlert(title: "Error", message: "Error \(errorCode.description) occur") + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { + self.isJoined = true + LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info) + } + + /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param elapsed time elapse since current sdk instance join the channel in ms + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { + LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info) + } + + /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param reason reason why this user left, note this event may be triggered when the remote user + /// become an audience in live broadcasting profile + func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { + LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info) + + // to unlink your view from sdk, so that your view reference will be released + // note the video will stay at its last frame, to completely remove it + // you will need to remove the EAGL sublayer from your binded view + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + // the view to be binded + videoCanvas.view = nil + videoCanvas.renderMode = .hidden + agoraKit.setupRemoteVideo(videoCanvas) + } + + /// Reports the statistics of the current call. The SDK triggers this callback once every two seconds after the user joins the channel. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, reportRtcStats stats: AgoraChannelStats) { + localVideo.statsInfo?.updateChannelStats(stats) + } + + /// Reports the statistics of the uploading local audio streams once every two seconds. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, localAudioStats stats: AgoraRtcLocalAudioStats) { + localVideo.statsInfo?.updateLocalAudioStats(stats) + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didLocalVideoTranscoderErrorWithStream stream: AgoraTranscodingVideoStream, errorCode: AgoraVideoTranscoderError) { + print("didLocalVideoTranscoderError: \(errorCode.rawValue), source type: \(stream.sourceType.rawValue)") + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings new file mode 100644 index 000000000..25a97ee8c --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/LocalCompositeGraph/zh-Hans.lproj/LocalCompositeGraph.strings @@ -0,0 +1,21 @@ + +/* Class = "UITextField"; placeholder = "Enter channel name"; ObjectID = "GWc-L5-fZV"; */ +"GWc-L5-fZV.placeholder" = "杈撳叆棰戦亾鍚"; + +/* Class = "UINavigationItem"; title = "Join Channel"; ObjectID = "Iy0-Dq-h5x"; */ +"Iy0-Dq-h5x.title" = "鍔犲叆棰戦亾"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "VpM-9W-auG"; */ +"VpM-9W-auG.normalTitle" = "Button"; + +/* Class = "UIButton"; normalTitle = "Join"; ObjectID = "kbN-ZR-nNn"; */ +"kbN-ZR-nNn.normalTitle" = "鍔犲叆棰戦亾"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "kf0-3f-UI5"; */ +"kf0-3f-UI5.normalTitle" = "Button"; + +/* Class = "UIViewController"; title = "Join Channel Video"; ObjectID = "p70-sh-D1h"; */ +"p70-sh-D1h.title" = "瑙嗛瀹炴椂閫氳瘽"; + +/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "wHl-zh-dFe"; */ +"wHl-zh-dFe.normalTitle" = "Button"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift index 539a2cf57..cddf2a31b 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/MediaChannelRelay/MediaChannelRelay.swift @@ -45,6 +45,8 @@ class MediaChannelRelayMain: BaseViewController { @IBOutlet weak var resumeButton: UIButton! @IBOutlet weak var relayChannelField: UITextField! var agoraKit: AgoraRtcEngineKit! + // configure source info, channel name defaults to current, and uid defaults to local + let mediaRelayconfig = AgoraChannelMediaRelayConfiguration() // indicate if current instance has joined channel var isJoined: Bool = false @@ -128,6 +130,7 @@ class MediaChannelRelayMain: BaseViewController { // cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params") } + self.mediaRelayconfig.sourceInfo = AgoraChannelMediaRelayInfo(token: token) }) } @@ -140,15 +143,12 @@ class MediaChannelRelayMain: BaseViewController { self.showAlert(message: "Destination channel name is empty") return } + NetworkManager.shared.generateToken(channelName: destinationChannelName) { token in - // configure source info, channel name defaults to current, and uid defaults to local - let config = AgoraChannelMediaRelayConfiguration() - config.sourceInfo = AgoraChannelMediaRelayInfo(token: token) - // configure target channel info let destinationInfo = AgoraChannelMediaRelayInfo(token: token) - config.setDestinationInfo(destinationInfo, forChannelName: destinationChannelName) - self.agoraKit.startOrUpdateChannelMediaRelay(config) + self.mediaRelayconfig.setDestinationInfo(destinationInfo, forChannelName: destinationChannelName) + self.agoraKit.startOrUpdateChannelMediaRelay(self.mediaRelayconfig) } } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/ChannelViewController.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/ChannelViewController.swift new file mode 100644 index 000000000..49e931b0c --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/ChannelViewController.swift @@ -0,0 +1,59 @@ +// +// ViewController.swift +// PIPDemo +// +// Created by qinhui on 2024/8/7. +// + +import UIKit + +class ChannelViewController: UIViewController { + lazy var textField: UITextField = { + let t = UITextField() + t.placeholder = "杈撳叆鎴块棿鍙" + t.borderStyle = .line + t.backgroundColor = .orange + return t + }() + + var pipCls: T.Type? + + lazy var button: UIButton = { + let b = UIButton(type: .custom) + b.setTitle("鍔犲叆鎴块棿", for: .normal) + b.setTitleColor(.blue, for: .normal) + b.addTarget(self, action: #selector(joinAction), for: .touchUpInside) + return b + }() + + override func viewDidLoad() { + super.viewDidLoad() + view.backgroundColor = .white + + view.addSubview(textField) + view.addSubview(button) + + button.snp.makeConstraints { make in + make.center.equalTo(view) + } + + textField.snp.makeConstraints { make in + make.bottom.equalTo(button.snp.top).offset(-50) + make.centerX.equalTo(button) + make.width.equalTo(150) + make.height.equalTo(30) + } + } + + @objc func joinAction() { + guard let channelId = textField.text, let cls = pipCls else { return } + + let vc = cls.init() + vc.channelId = channelId + self.navigationController?.pushViewController(vc, animated: true) + } + + override func touchesBegan(_ touches: Set, with event: UIEvent?) { + view.endEditing(true) + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPService.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPService.swift new file mode 100644 index 000000000..4e99795c7 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPService.swift @@ -0,0 +1,169 @@ +// +// RtcManager.swift +// PIPDemo +// +// Created by qinhui on 2024/8/7. +// + +import Foundation +import AgoraRtcKit + +class CustomViewPIPService: NSObject { + var rtcEngineDelegate: AgoraRtcEngineDelegate? + var videoFrameDelegte: AgoraVideoFrameDelegate? + + weak var localView: UIView? + weak var remoteView: UIView? + var channelId: String + + private lazy var rtcConfig: AgoraRtcEngineConfig = { + let config = AgoraRtcEngineConfig() + config.appId = KeyCenter.AppId + config.areaCode = .global + config.channelProfile = .liveBroadcasting + return config + }() + + private lazy var rtcEngine: AgoraRtcEngineKit = { + let engine = AgoraRtcEngineKit.sharedEngine(with: rtcConfig, delegate: self) + engine.setClientRole(.broadcaster) + engine.enableAudio() + engine.enableVideo() + engine.setVideoEncoderConfiguration(AgoraVideoEncoderConfiguration(size: CGSize(width: 960, height: 540), + frameRate: .fps15, + bitrate: AgoraVideoBitrateStandard, + orientationMode: .adaptative, + mirrorMode: .auto)) + engine.setVideoFrameDelegate(self) + return engine + }() + + init(localView: UIView, remoteView: UIView, channelId: String) { + self.localView = localView + self.remoteView = remoteView + self.channelId = channelId + + super.init() + + setupRtcEngin() + } + + private func setupRtcEngin() { + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = 0 + videoCanvas.view = localView + videoCanvas.renderMode = .hidden + + rtcEngine.setupLocalVideo(videoCanvas) + rtcEngine.startPreview() + rtcEngine.setDefaultAudioRouteToSpeakerphone(true) + rtcEngine.setVideoFrameDelegate(self) + + let option = AgoraRtcChannelMediaOptions() + option.publishCameraTrack = true + option.publishMicrophoneTrack = true + option.clientRoleType = .broadcaster + + NetworkManager.shared.generateToken(channelName: channelId, success: { [weak self] token in + guard let self = self else { return } + + let result = self.rtcEngine.joinChannel(byToken: token, channelId: self.channelId, uid: 0, mediaOptions: option) + if result != 0 { + ToastView.showWait(text: "joinChannel call failed: \(result), please check your params", view: nil) + } + }) + } + + func disable() { + rtcEngine.disableAudio() + rtcEngine.disableVideo() + } + + func leave() { + rtcEngine.stopPreview() + rtcEngine.leaveChannel(nil) + } + +} + +extension CustomViewPIPService: AgoraRtcEngineDelegate { + func rtcEngine(_ engine: AgoraRtcEngineKit, didOccur errorType: AgoraEncryptionErrorType) { + rtcEngineDelegate?.rtcEngine?(engine, didOccur: errorType) + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { + rtcEngineDelegate?.rtcEngine?(engine, didJoinChannel: channel, withUid: uid, elapsed: elapsed) + } + + /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param elapsed time elapse since current sdk instance join the channel in ms + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { + // Only one remote video view is available for this + // tutorial. Here we check if there exists a surface + // view tagged as this uid. + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + // the view to be binded + videoCanvas.view = remoteView + videoCanvas.renderMode = .hidden + rtcEngine.setupRemoteVideo(videoCanvas) + + rtcEngineDelegate?.rtcEngine?(engine, didJoinedOfUid: uid, elapsed: elapsed) + } + + /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event + /// @param uid uid of remote joined user + /// @param reason reason why this user left, note this event may be triggered when the remote user + /// become an audience in live broadcasting profile + func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = uid + // the view to be binded + videoCanvas.view = nil + videoCanvas.renderMode = .hidden + rtcEngine.setupRemoteVideo(videoCanvas) + + rtcEngineDelegate?.rtcEngine?(engine, didOfflineOfUid: uid, reason: reason) + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, connectionChangedTo state: AgoraConnectionState, reason: AgoraConnectionChangedReason) { + rtcEngineDelegate?.rtcEngine?(engine, connectionChangedTo: state, reason: reason) + } + + /// Reports the statistics of the current call. The SDK triggers this callback once every two seconds after the user joins the channel. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, reportRtcStats stats: AgoraChannelStats) { + rtcEngineDelegate?.rtcEngine?(engine, reportRtcStats: stats) + } + + /// Reports the statistics of the uploading local audio streams once every two seconds. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, localAudioStats stats: AgoraRtcLocalAudioStats) { + rtcEngineDelegate?.rtcEngine?(engine, localAudioStats: stats) + } + + /// Reports the statistics of the video stream from each remote user/host. + /// @param stats stats struct + func rtcEngine(_ engine: AgoraRtcEngineKit, remoteVideoStats stats: AgoraRtcRemoteVideoStats) { + rtcEngineDelegate?.rtcEngine?(engine, remoteVideoStats: stats) + } + + /// Reports the statistics of the audio stream from each remote user/host. + /// @param stats stats struct for current call statistics + func rtcEngine(_ engine: AgoraRtcEngineKit, remoteAudioStats stats: AgoraRtcRemoteAudioStats) { + rtcEngineDelegate?.rtcEngine?(engine, remoteAudioStats: stats) + } +} + +extension CustomViewPIPService: AgoraVideoFrameDelegate { + func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { + print("") + return true + } + + func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool { + print("") + return true + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPViewController.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPViewController.swift new file mode 100644 index 000000000..ebe0ac770 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/CustomViewPIPViewController/CustomViewPIPViewController.swift @@ -0,0 +1,234 @@ +// +// VideoViewController.swift +// PIPDemo +// +// Created by qinhui on 2024/8/7. +// + +import UIKit +import SnapKit +import AgoraRtcKit +import AVKit + +@available(iOS 15.0, *) +class CustomViewPIPViewController: PIPBaseViewController { + private let containerH = 250.0 + private var isJoined: Bool = false + private var pipController: AVPictureInPictureController? + private var videoCallbackController: AVPictureInPictureVideoCallViewController? + private var backgroundTask: UIBackgroundTaskIdentifier = .invalid + + private var pipSizes = [ + CGSize(width: 150, height: 300), + CGSize(width: 300, height: 150) + ] + + private lazy var pipButton: UIButton = { + let button = UIButton(type: .custom) + button.setTitle("鐢讳腑鐢", for: .normal) + button.addTarget(self, action: #selector(pipAction), for: .touchUpInside) + button.backgroundColor = .purple + return button + }() + + private lazy var sizeButton: UIButton = { + let button = UIButton(type: .custom) + button.setTitle("鍒囨崲灏哄", for: .normal) + button.addTarget(self, action: #selector(sizeAction), for: .touchUpInside) + button.backgroundColor = .red + + return button + }() + + private lazy var localVideoView: UIView = { + let view = UIView() + view.backgroundColor = .green + return view + }() + + private lazy var remoteVideoView: UIView = { + let view = UIView() + view.backgroundColor = .orange + return view + }() + + private lazy var videoContainerView: UIView = { + let view = UIView() + view.backgroundColor = .purple + return view + }() + + private var rtcService: CustomViewPIPService! + + override func viewDidLoad() { + super.viewDidLoad() + view.backgroundColor = .white + initRtc() + configViews() + if AVPictureInPictureController.isPictureInPictureSupported() { + configPIPViewController() + } + } + + override func viewWillDisappear(_ animated: Bool) { + super.viewWillDisappear(animated) + guard let pipController = pipController else { return } + pipController.stopPictureInPicture() + } + + override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(animated) + rtcService.disable() + + if isJoined { + rtcService.leave() + } + } +} + +@available(iOS 15.0, *) +extension CustomViewPIPViewController { + @objc func pipAction() { + guard let pipController = pipController else { return } + + if pipController.isPictureInPictureActive { + pipController.stopPictureInPicture() + } else { + pipController.startPictureInPicture() + } + } + + @objc func sizeAction() { + guard let videoCallbackController = videoCallbackController else { return } + + let i = Int.random(in: 0.. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/zh-Hans.lproj/PictureInPicture.strings b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.strings similarity index 100% rename from iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/zh-Hans.lproj/PictureInPicture.strings rename to iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.strings diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift index 01c87c5ea..f686853bb 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PictureInPicture.swift @@ -1,334 +1,59 @@ // -// PictureInPicture.swift -// APIExample +// HomeViewController.swift +// PIPDemo // -// Created by 鑳℃鼎杈 on 2022/4/6. -// Copyright 漏 2022 Agora Corp. All rights reserved. +// Created by qinhui on 2024/8/8. // import UIKit -import AGEVideoLayout -import AgoraRtcKit -import MediaPlayer -class PictureInPictureEntry: UIViewController { - @IBOutlet weak var joinButton: AGButton! - @IBOutlet weak var channelTextField: AGTextField! - let identifier = "PictureInPicture" - - override func viewDidLoad() { - super.viewDidLoad() - } - - @IBAction func doJoinPressed(sender: AGButton) { - guard let channelName = channelTextField.text else {return} - // resign channel text field - channelTextField.resignFirstResponder() - - let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil) - // create new view controller every time to ensure we get a clean vc - guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else { - return - } - newViewController.title = channelName - newViewController.configs = ["channelName": channelName] - navigationController?.pushViewController(newViewController, animated: true) +class Model { + var title: String + var cls: T.Type + init(title: String, cls: T.Type) { + self.title = title + self.cls = cls } } -@available(iOS 15.0, *) -class PictureInPictureMain: BaseViewController { - var localVideo = Bundle.loadVideoView(type: .local, audioOnly: false) - var remoteVideo = Bundle.loadView(fromNib: "VideoViewSampleBufferDisplayView", withType: SampleBufferDisplayView.self) - var agoraKit: AgoraRtcEngineKit! - private lazy var callViewController: AVPictureInPictureVideoCallViewController = { - let callViewController = AVPictureInPictureVideoCallViewController() - callViewController.preferredContentSize = view.bounds.size - callViewController.view.backgroundColor = .clear - callViewController.modalPresentationStyle = .overFullScreen - return callViewController - }() - var pipController: AVPictureInPictureController? - var remoteUid: UInt? - // indicate if current instance has joined channel - var isJoined: Bool = false - private lazy var containerView: UIView = { - let view = UIView() - view.backgroundColor = .red - return view +class PictureInPicture: UITableViewController { + lazy var dataArray: [Model] = { + if #available(iOS 15.0, *) { + return [ + Model(title: "SDK 娓叉煋", cls: CustomViewPIPViewController.self), + Model(title: "澶氫汉瑙嗛鑷覆鏌", cls: PixelBufferPIPViewController.self) + ] + } else { + // Fallback on earlier versions + return [] + } }() - // swiftlint: disable function_body_length override func viewDidLoad() { super.viewDidLoad() - // layout render view - localVideo.setPlaceholder(text: "Local Host".localized) - remoteVideo.setPlaceholder(text: "Remote Host".localized) - view.addSubview(containerView) - containerView.frame = CGRect(x: 0, y: 0, width: SCREENSIZE.width, height: 280) - containerView.addSubview(localVideo) - containerView.addSubview(remoteVideo) - localVideo.translatesAutoresizingMaskIntoConstraints = false - remoteVideo.translatesAutoresizingMaskIntoConstraints = false - localVideo.leadingAnchor.constraint(equalTo: containerView.leadingAnchor).isActive = true - localVideo.topAnchor.constraint(equalTo: containerView.topAnchor).isActive = true - localVideo.bottomAnchor.constraint(equalTo: containerView.bottomAnchor).isActive = true - localVideo.widthAnchor.constraint(equalTo: containerView.widthAnchor, multiplier: 0.5).isActive = true - remoteVideo.trailingAnchor.constraint(equalTo: containerView.trailingAnchor).isActive = true - remoteVideo.topAnchor.constraint(equalTo: containerView.topAnchor).isActive = true - remoteVideo.bottomAnchor.constraint(equalTo: containerView.bottomAnchor).isActive = true - remoteVideo.widthAnchor.constraint(equalTo: containerView.widthAnchor, multiplier: 0.5).isActive = true - - pipController = AVPictureInPictureController(contentSource: .init(activeVideoCallSourceView: containerView, - contentViewController: callViewController)) - pipController?.canStartPictureInPictureAutomaticallyFromInline = true - pipController?.delegate = self - //iOS 15 workaround - pipController?.setValue(1, forKey: "controlsStyle") - - // set up agora instance when view loadedlet config = AgoraRtcEngineConfig() - let config = AgoraRtcEngineConfig() - config.appId = KeyCenter.AppId - config.channelProfile = .liveBroadcasting - config.areaCode = GlobalSettings.shared.area - // setup log file path - let logConfig = AgoraLogConfig() - logConfig.level = .info - config.logConfig = logConfig - agoraKit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: self) - // Configuring Privatization Parameters - Util.configPrivatization(agoraKit: agoraKit) - // get channel name from configs - guard let channelName = configs["channelName"] as? String, - let resolution = GlobalSettings.shared.getSetting(key: "resolution")?.selectedOption().value as? CGSize, - let fps = GlobalSettings.shared.getSetting(key: "fps")?.selectedOption().value as? AgoraVideoFrameRate, - let orientation = GlobalSettings.shared.getSetting(key: "orientation")? - .selectedOption().value as? AgoraVideoOutputOrientationMode else { - return - } - // To enable MPNowPlayingInfoCenter, you need to add the following two private parameters - agoraKit.setParameters("{\"adm_mix_with_others\":false}") - agoraKit.setParameters("{\"che.audio.nonmixable.option\":true}") - - // make myself a broadcaster - agoraKit.setChannelProfile(.liveBroadcasting) - agoraKit.setClientRole(GlobalSettings.shared.getUserRole()) - - // enable video module and set up video encoding configs - agoraKit.enableVideo() - agoraKit.setVideoEncoderConfiguration(AgoraVideoEncoderConfiguration(size: resolution, - frameRate: fps, - bitrate: AgoraVideoBitrateStandard, - orientationMode: orientation, - mirrorMode: AgoraVideoMirrorMode.auto)) - // set up local video to render your local camera preview - let videoCanvas = AgoraRtcVideoCanvas() - videoCanvas.uid = 0 - // the view to be binded - videoCanvas.view = localVideo.videoView - videoCanvas.renderMode = .hidden - agoraKit.setupLocalVideo(videoCanvas) - - // Set audio route to speaker - agoraKit.setDefaultAudioRouteToSpeakerphone(true) - - // Setup raw video data frame observer - agoraKit.setVideoFrameDelegate(self) - - // start joining channel - // 1. Users can only see each other after they join the - // same channel successfully using the same app id. - // 2. If app certificate is turned on at dashboard, token is needed - // when joining channel. The channel name and uid used to calculate - // the token has to match the ones used for channel join - let option = AgoraRtcChannelMediaOptions() - NetworkManager.shared.generateToken(channelName: channelName, success: { token in - let result = self.agoraKit.joinChannel(byToken: token, channelId: channelName, uid: 0, mediaOptions: option, joinSuccess: nil) - if result != 0 { - // Usually happens with invalid parameters - // Error code description can be found at: - // en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode - // cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code - self.showAlert(title: "Error", message: "joinChannel call failed: \(result), please check your params") - } - }) - - NotificationCenter.default.addObserver(self, - selector: #selector(didEnterBackgroundNotification), - name: UIApplication.willResignActiveNotification, - object: nil) + title = "Picture In Picture" + tableView.register(UITableViewCell.self, forCellReuseIdentifier: "cell") } - // swiftlint: enable function_body_length - override func viewDidAppear(_ animated: Bool) { - super.viewDidAppear(animated) - setupPlayintInfoCenter() - } - - private func setupPlayintInfoCenter() { - UIApplication.shared.beginReceivingRemoteControlEvents() - var nowPlayingInfo = [String: Any]() - let path = Bundle.main.path(forResource: "agora-logo", ofType: "png") ?? "" - guard let image = UIImage(contentsOfFile: path) else { return } - let artWork = MPMediaItemArtwork(boundsSize: image.size) { _ in - return image - } - nowPlayingInfo[MPMediaItemPropertyArtwork] = artWork - nowPlayingInfo[MPMediaItemPropertyTitle] = "Song Title" - nowPlayingInfo[MPMediaItemPropertyArtist] = "Artist Name" - nowPlayingInfo[MPMediaItemPropertyAlbumTitle] = "Album Name" - nowPlayingInfo[MPNowPlayingInfoPropertyIsLiveStream] = true - MPNowPlayingInfoCenter.default().nowPlayingInfo = nowPlayingInfo - MPNowPlayingInfoCenter.default().playbackState = .playing + override func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat { + return 50 } - override var canBecomeFirstResponder: Bool { - true + override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { + dataArray.count } - deinit { - NotificationCenter.default.removeObserver(self) + override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { + let model = dataArray[indexPath.row] + let vc = ChannelViewController() + vc.pipCls = model.cls + self.navigationController?.pushViewController(vc, animated: true) } - @objc - private func didEnterBackgroundNotification() { - onPIP(_btn: UIButton()) - } - - @IBAction func onPIP(_btn: UIButton) { - if let currentPipController = pipController { - currentPipController.startPictureInPicture() - } else { - showAlert(message: "PIP Support iOS 15+".localized) - } - } - - override func willMove(toParent parent: UIViewController?) { - if parent == nil { - // leave channel when exiting the view - if isJoined { - if let pipController = pipController, pipController.isPictureInPictureActive { - pipController.stopPictureInPicture() - } - agoraKit.leaveChannel { (stats) -> Void in - LogUtils.log(message: "left channel, duration: \(stats.duration)", level: .info) - } - } - } - } -} - -/// agora rtc engine delegate events -@available(iOS 15.0, *) -extension PictureInPictureMain: AgoraRtcEngineDelegate { - /// callback when warning occured for agora sdk, warning can usually be ignored, still it's nice to check out - /// what is happening - /// Warning code description can be found at: - /// en: https://api-ref.agora.io/en/voice-sdk/ios/3.x/Constants/AgoraWarningCode.html - /// cn: https://docs.agora.io/cn/Voice/API%20Reference/oc/Constants/AgoraWarningCode.html - /// @param warningCode warning code of the problem - func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurWarning warningCode: AgoraWarningCode) { - LogUtils.log(message: "warning: \(warningCode.description)", level: .warning) - } - - /// callback when error occured for agora sdk, you are recommended to display the error descriptions on demand - /// to let user know something wrong is happening - /// Error code description can be found at: - /// en: https://api-ref.agora.io/en/video-sdk/ios/4.x/documentation/agorartckit/agoraerrorcode - /// cn: https://doc.shengwang.cn/api-ref/rtc/ios/error-code - /// @param errorCode error code of the problem - func rtcEngine(_ engine: AgoraRtcEngineKit, didOccurError errorCode: AgoraErrorCode) { - LogUtils.log(message: "error: \(errorCode)", level: .error) - self.showAlert(title: "Error", message: "Error \(errorCode.description) occur") - } - - /// callback when the local user joins a specified channel. - /// @param channel - /// @param uid uid of local user - /// @param elapsed time elapse since current sdk instance join the channel in ms - func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { - isJoined = true - LogUtils.log(message: "Join \(channel) with uid \(uid) elapsed \(elapsed)ms", level: .info) - } - - /// callback when a remote user is joinning the channel, note audience in live broadcast mode will NOT trigger this event - /// @param uid uid of remote joined user - /// @param elapsed time elapse since current sdk instance join the channel in ms - func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { - LogUtils.log(message: "remote user join: \(uid) \(elapsed)ms", level: .info) - - remoteVideo.videoView.reset() - } - - /// callback when a remote user is leaving the channel, note audience in live broadcast mode will NOT trigger this event - /// @param uid uid of remote joined user - /// @param reason reason why this user left, note this event may be triggered when the remote user - /// become an audience in live broadcasting profile - func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { - LogUtils.log(message: "remote user left: \(uid) reason \(reason)", level: .info) - - // to unlink your view from sdk, so that your view reference will be released - // note the video will stay at its last frame, to completely remove it - // you will need to remove the EAGL sublayer from your binded view -// remoteVideo.videoView.reset() - } - func rtcEngine(_ engine: AgoraRtcEngineKit, - remoteVideoStateChangedOfUid uid: UInt, - state: AgoraVideoRemoteState, reason: AgoraVideoRemoteReason, - elapsed: Int) { - if reason == .remoteMuted { - let pixelBuffer = MediaUtils.cvPixelBufferRef(from: UIImage(named: "agora-logo") ?? UIImage()).takeRetainedValue() - let videoFrame = AgoraOutputVideoFrame() - videoFrame.pixelBuffer = pixelBuffer - videoFrame.width = Int32(remoteVideo.videoView.frame.width) - videoFrame.height = Int32(remoteVideo.videoView.frame.height) - remoteVideo.videoView.renderVideoPixelBuffer(videoFrame) - } - } -} - -// MARK: - AgoraVideoDataFrameProtocol -@available(iOS 15.0, *) -extension PictureInPictureMain: AgoraVideoFrameDelegate { - func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { - true - } - - func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool { - remoteVideo.videoView.renderVideoPixelBuffer(videoFrame) - return true - } - - func getVideoFormatPreference() -> AgoraVideoFormat { - .cvPixelBGRA - } - func getRotationApplied() -> Bool { - true - } -} - -@available(iOS 15.0, *) -extension PictureInPictureMain: AVPictureInPictureControllerDelegate { - func pictureInPictureControllerWillStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { - } - - func pictureInPictureControllerDidStartPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { - containerView.removeFromSuperview() - let vc = pictureInPictureController.contentSource?.activeVideoCallContentViewController - containerView.frame.size = vc?.view.bounds.size ?? .zero - vc?.view.addSubview(containerView) - } - - func pictureInPictureController(_ pictureInPictureController: AVPictureInPictureController, - failedToStartPictureInPictureWithError error: Error) { - } - - func pictureInPictureControllerWillStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { - containerView.removeFromSuperview() - containerView.frame.size = CGSize(width: SCREENSIZE.width, height: 280) - view.addSubview(containerView) - } - - func pictureInPictureControllerDidStopPictureInPicture(_ pictureInPictureController: AVPictureInPictureController) { + override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { + let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) + let model = dataArray[indexPath.row] + cell.textLabel?.text = model.title + return cell } } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPService.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPService.swift new file mode 100644 index 000000000..d34047993 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPService.swift @@ -0,0 +1,114 @@ +// +// PixelBufferPIPService.swift +// PIPDemo +// +// Created by qinhui on 2024/8/8. +// + +import Foundation +import AgoraRtcKit + +class PixelBufferPIPService: NSObject { + var videoFrameDelegte: AgoraVideoFrameDelegate? + var rtcEngineDelegate: AgoraRtcEngineDelegate? + weak var localView: PixelBufferRenderView? + + var uid: UInt = 0 + var channelId: String + + private lazy var rtcConfig: AgoraRtcEngineConfig = { + let config = AgoraRtcEngineConfig() + config.appId = KeyCenter.AppId + config.areaCode = .global + config.channelProfile = .liveBroadcasting + return config + }() + + private lazy var rtcEngine: AgoraRtcEngineKit = { + let engine = AgoraRtcEngineKit.sharedEngine(with: rtcConfig, delegate: self) + engine.setClientRole(.broadcaster) + engine.enableAudio() + engine.enableVideo() + engine.setVideoEncoderConfiguration(AgoraVideoEncoderConfiguration(size: CGSize(width: 960, height: 540), + frameRate: .fps15, + bitrate: AgoraVideoBitrateStandard, + orientationMode: .fixedPortrait, + mirrorMode: .auto)) + engine.setVideoFrameDelegate(self) + + return engine + }() + + init(channelId: String, uid: UInt, localView: PixelBufferRenderView) { + self.channelId = channelId + self.uid = uid + self.localView = localView + super.init() + + setupRtcEngin() + } + + private func setupRtcEngin() { + let videoCanvas = AgoraRtcVideoCanvas() + videoCanvas.uid = 0 + videoCanvas.view = localView + videoCanvas.renderMode = .hidden + + rtcEngine.setupLocalVideo(videoCanvas) + rtcEngine.startPreview() + + rtcEngine.setDefaultAudioRouteToSpeakerphone(true) + rtcEngine.setVideoFrameDelegate(self) + + let option = AgoraRtcChannelMediaOptions() + option.publishCameraTrack = true + option.publishMicrophoneTrack = true + option.clientRoleType = .broadcaster + + NetworkManager.shared.generateToken(channelName: channelId, success: { [weak self] token in + guard let self = self else { return } + let result = self.rtcEngine.joinChannel(byToken: token, channelId: self.channelId, uid: self.uid, mediaOptions: option) + if result != 0 { + ToastView.showWait(text: "joinChannel call failed: \(result), please check your params", view: nil) + } else { + self.localView?.uid = self.uid + } + }) + } + + func disable() { + rtcEngine.disableAudio() + rtcEngine.disableVideo() + } + + func leave() { + rtcEngine.stopPreview() + rtcEngine.leaveChannel(nil) + } + +} + +extension PixelBufferPIPService: AgoraVideoFrameDelegate { + func onCapture(_ videoFrame: AgoraOutputVideoFrame, sourceType: AgoraVideoSourceType) -> Bool { + return ((self.videoFrameDelegte?.onCapture?(videoFrame, sourceType: sourceType)) != nil) + } + + func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool { + return ((self.videoFrameDelegte?.onRenderVideoFrame?(videoFrame, uid: uid, channelId: channelId)) != nil) + } +} + +extension PixelBufferPIPService: AgoraRtcEngineDelegate { + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinChannel channel: String, withUid uid: UInt, elapsed: Int) { + rtcEngineDelegate?.rtcEngine?(engine, didJoinChannel: channel, withUid: uid, elapsed: elapsed) + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didJoinedOfUid uid: UInt, elapsed: Int) { + rtcEngineDelegate?.rtcEngine?(engine, didJoinedOfUid: uid, elapsed: elapsed) + } + + func rtcEngine(_ engine: AgoraRtcEngineKit, didOfflineOfUid uid: UInt, reason: AgoraUserOfflineReason) { + rtcEngineDelegate?.rtcEngine?(engine, didOfflineOfUid: uid, reason: reason) + } + +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPViewController.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPViewController.swift new file mode 100644 index 000000000..d501f8a46 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferPIPViewController.swift @@ -0,0 +1,264 @@ +// +// PixelBufferPIPViewController.swift +// PIPDemo +// +// Created by qinhui on 2024/8/8. +// +import UIKit +import AVKit +import AgoraRtcKit + +@available(iOS 15.0, *) +class PixelBufferPIPViewController: PIPBaseViewController { + private let mockUid: UInt = UInt.random(in: 0...100000) + private var pipController: AVPictureInPictureController? + private var videoCallbackController: AVPictureInPictureVideoCallViewController? + var isJoined = false + private var pipSizes = [ + CGSize(width: 150, height: 300), + CGSize(width: 300, height: 150) + ] + + private lazy var pipButton: UIButton = { + let button = UIButton(type: .custom) + button.setTitle("鐢讳腑鐢", for: .normal) + button.setTitleColor(.black, for: .normal) + button.addTarget(self, action: #selector(pipAction), for: .touchUpInside) + + return button + }() + + private lazy var sizeButton: UIButton = { + let button = UIButton(type: .custom) + button.setTitle("鍒囨崲灏哄", for: .normal) + button.setTitleColor(.black, for: .normal) + button.addTarget(self, action: #selector(sizeAction), for: .touchUpInside) + + return button + }() + + private lazy var topLeftView: PixelBufferRenderView = { + let view = PixelBufferRenderView() + view.backgroundColor = .blue + return view + }() + + private lazy var topRightView: PixelBufferRenderView = { + let view = PixelBufferRenderView() + view.backgroundColor = .red + return view + }() + + private lazy var bottomLeftView: PixelBufferRenderView = { + let view = PixelBufferRenderView() + view.backgroundColor = .green + return view + }() + + private lazy var bottomRightView: PixelBufferRenderView = { + let view = PixelBufferRenderView() + view.backgroundColor = .purple + return view + }() + + private lazy var videoContainerView: UIView = { + let view = UIView() + view.backgroundColor = .purple + return view + }() + + private lazy var displayViews: NSHashTable = { + let table = NSHashTable(options: .weakMemory, capacity: 4) + table.add(self.topLeftView) + table.add(self.topRightView) + table.add(bottomLeftView) + table.add(bottomRightView) + return table + }() + + private var rtcService: PixelBufferPIPService! + + override func viewDidLoad() { + super.viewDidLoad() + view.backgroundColor = .white + initRtc() + configViews() + if AVPictureInPictureController.isPictureInPictureSupported() { + configPIPViewController() + } + } + + override func viewWillDisappear(_ animated: Bool) { + super.viewWillDisappear(animated) + guard let pipController = pipController else { return } + pipController.stopPictureInPicture() + } + + override func viewDidDisappear(_ animated: Bool) { + super.viewDidDisappear(animated) + rtcService.disable() + + if isJoined { + rtcService.leave() + } + } +} + +@available(iOS 15.0, *) +extension PixelBufferPIPViewController { + private func configPIPViewController() { + let videoCallViewController = AVPictureInPictureVideoCallViewController() + videoCallViewController.preferredContentSize = view.bounds.size + videoCallViewController.view.backgroundColor = .clear + videoCallViewController.modalPresentationStyle = .overFullScreen + + self.videoCallbackController = videoCallViewController + pipController = AVPictureInPictureController(contentSource: .init(activeVideoCallSourceView: videoContainerView, + contentViewController: videoCallViewController)) + pipController?.canStartPictureInPictureAutomaticallyFromInline = true + pipController?.delegate = self + pipController?.setValue(1, forKey: "controlsStyle") + } + + private func configViews() { + self.view.addSubview(videoContainerView) + videoContainerView.addSubview(topLeftView) + videoContainerView.addSubview(topRightView) + videoContainerView.addSubview(bottomLeftView) + videoContainerView.addSubview(bottomRightView) + + self.view.addSubview(pipButton) + self.view.addSubview(sizeButton) + + videoContainerView.snp.makeConstraints { make in + make.left.top.right.bottom.equalTo(0) + } + + topLeftView.snp.makeConstraints { make in + make.top.left.equalToSuperview() + make.width.equalToSuperview().dividedBy(2) + make.height.equalToSuperview().dividedBy(2) + } + + topRightView.snp.makeConstraints { make in + make.top.equalToSuperview() + make.right.equalToSuperview() + make.width.equalToSuperview().dividedBy(2) + make.height.equalToSuperview().dividedBy(2) + } + + bottomLeftView.snp.makeConstraints { make in + make.bottom.equalToSuperview() + make.left.equalToSuperview() + make.width.equalToSuperview().dividedBy(2) + make.height.equalToSuperview().dividedBy(2) + } + + bottomRightView.snp.makeConstraints { make in + make.bottom.equalToSuperview() + make.right.equalToSuperview() + make.width.equalToSuperview().dividedBy(2) + make.height.equalToSuperview().dividedBy(2) + } + + pipButton.snp.makeConstraints { make in + make.center.equalTo(view) + } + + sizeButton.snp.makeConstraints { make in + make.top.equalTo(self.pipButton.snp.bottom).offset(10) + make.centerX.equalTo(self.pipButton.snp.centerX) + } + } + + private func initRtc() { + guard let channelId = channelId else { return } + rtcService = PixelBufferPIPService(channelId: channelId, uid: mockUid, localView: topLeftView) + rtcService.videoFrameDelegte = self + rtcService.rtcEngineDelegate = self + } + + @objc func pipAction() { + guard let pipController = pipController else { return } + + if pipController.isPictureInPictureActive { + pipController.stopPictureInPicture() + } else { + pipController.startPictureInPicture() + } + } + + @objc func sizeAction() { + guard let videoCallbackController = videoCallbackController else { return } + + let i = Int.random(in: 0.. Bool { + if let view = displayViews.allObjects.first(where: { $0.uid == mockUid }), let pixelBuffer = videoFrame.pixelBuffer { + view.renderVideoPixelBuffer(pixelBuffer: pixelBuffer, width: videoFrame.width, height: videoFrame.height) + } + + return true + } + + func onRenderVideoFrame(_ videoFrame: AgoraOutputVideoFrame, uid: UInt, channelId: String) -> Bool { + if let view = displayViews.allObjects.first(where: { $0.uid == uid }) { + view.renderFromVideoFrameData(videoData: videoFrame, uid: Int(uid)) + } + + return true + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferRenderView.swift b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferRenderView.swift new file mode 100644 index 000000000..8b8bc93ce --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/PictureInPicture/PixelBufferPIPViewController/PixelBufferRenderView.swift @@ -0,0 +1,179 @@ +// +// PixelBufferRenderView.swift +// PIPDemo +// +// Created by qinhui on 2024/8/8. +// + +import UIKit +import AVFoundation +import AgoraRtcKit + +enum VideoPosition { + case topLeft + case topRight + case bottomLeft + case bottomRight +} + +class PixelBufferRenderView: UIView { + var uid: UInt = 0 + private var videoWidth: Int32 = 0 + private var videoHeight: Int32 = 0 + + lazy var displayLayer: AVSampleBufferDisplayLayer = { + let layer = AVSampleBufferDisplayLayer() + return layer + }() + + override init(frame: CGRect) { + super.init(frame: frame) + configLayers() + } + + required init?(coder: NSCoder) { + fatalError("init(coder:) has not been implemented") + } + + private func configLayers() { + self.layer.addSublayer(displayLayer) + displayLayer.frame = self.bounds + } + + func createLayer() -> AVSampleBufferDisplayLayer { + let layer = AVSampleBufferDisplayLayer() + return layer + } + + func clean() { + uid = 0 + self.displayLayer.removeFromSuperlayer() + self.displayLayer = createLayer() + self.layer.addSublayer(displayLayer) + } + + func renderFromVideoFrameData(videoData: AgoraOutputVideoFrame, uid: Int) { + let width = videoData.width + let height = videoData.height + let yStride = videoData.yStride + let uStride = videoData.uStride + let vStride = videoData.vStride + + let yBuffer = videoData.yBuffer + let uBuffer = videoData.uBuffer + let vBuffer = videoData.vBuffer + + autoreleasepool { + var pixelBuffer: CVPixelBuffer? + let pixelAttributes: [String: Any] = [kCVPixelBufferIOSurfacePropertiesKey as String: [:]] + + let result = CVPixelBufferCreate(kCFAllocatorDefault, + Int(width), + Int(height), + kCVPixelFormatType_420YpCbCr8Planar, + pixelAttributes as CFDictionary, + &pixelBuffer) + + guard result == kCVReturnSuccess, let pixelBuffer = pixelBuffer else { + print("Unable to create CVPixelBuffer: \(result)") + return + } + + CVPixelBufferLockBaseAddress(pixelBuffer, .init(rawValue: 0)) + let yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0) + let pixelBufferYBytes = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0) + + for i in 0.. 0, videoHeight > 0, !self.frame.size.equalTo(CGSize.zero) else { + return + } + + let viewWidth = self.frame.size.width + let viewHeight = self.frame.size.height + + let videoRatio = CGFloat(videoWidth) / CGFloat(videoHeight) + let viewRatio = viewWidth / viewHeight + + var videoSize = CGSize.zero + if videoRatio >= viewRatio { + videoSize.height = viewHeight + videoSize.width = videoSize.height * videoRatio + } else { + videoSize.width = viewWidth + videoSize.height = videoSize.width / videoRatio + } + + let xOffset = max(0, (viewWidth - videoSize.width) / 2) + let yOffset = max(0, (viewHeight - videoSize.height) / 2) + let renderRect = CGRect(x: xOffset, y: yOffset, width: videoSize.width, height: videoSize.height) + + if !renderRect.equalTo(displayLayer.frame) { + displayLayer.frame = renderRect + } + } + +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/Base.lproj/RtePlayer.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/Base.lproj/RtePlayer.storyboard new file mode 100644 index 000000000..e41fe78b8 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/Base.lproj/RtePlayer.storyboard @@ -0,0 +1,148 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/PlayerObserver.swift b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/PlayerObserver.swift new file mode 100644 index 000000000..fe36b0ecc --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/PlayerObserver.swift @@ -0,0 +1,58 @@ +// +// PlayerObserver.swift +// APIExample +// +// Created by wushengtao on 2024/10/13. +// Copyright 漏 2024 Agora Corp. All rights reserved. +// + +import AgoraRtcKit + +@objc protocol PlayerObserverDelegate: AnyObject { + + @objc optional func onStateChanged(oldState: AgoraRtePlayerState, newState: AgoraRtePlayerState, error: AgoraRteError?) + + @objc optional func onPositionChanged(currentTime: UInt64, utcTime: UInt64) + + @objc optional func onResolutionChanged(width: Int, height: Int) + + @objc optional func onEvent(event: AgoraRtePlayerEvent) + + @objc optional func onMetadata(type: AgoraRtePlayerMetadataType, data: Data) + + @objc optional func onPlayerInfoUpdated(info: AgoraRtePlayerInfo) + + @objc optional func onAudioVolumeIndication(volume: Int32) +} + +class PlayerObserver: AgoraRtePlayerObserver { + weak var delegate: PlayerObserverDelegate? + + func onStateChanged(oldState: AgoraRtePlayerState, newState: AgoraRtePlayerState, error: AgoraRteError?) { + self.delegate?.onStateChanged?(oldState: oldState, newState: newState, error: error) + } + + func onPositionChanged(currentTime: UInt64, utcTime: UInt64) { + self.delegate?.onPositionChanged?(currentTime: currentTime, utcTime: utcTime) + } + + func onResolutionChanged(width: Int, height: Int) { + self.delegate?.onResolutionChanged?(width: width, height: height) + } + + func onEvent(event: AgoraRtePlayerEvent) { + self.delegate?.onEvent?(event: event) + } + + func onMetadata(type: AgoraRtePlayerMetadataType, data: Data) { + self.delegate?.onMetadata?(type: type, data: data) + } + + func onPlayerInfoUpdated(info: AgoraRtePlayerInfo) { + self.delegate?.onPlayerInfoUpdated?(info: info) + } + + func onAudioVolumeIndication(volume: Int32) { + self.delegate?.onAudioVolumeIndication?(volume: volume) + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/RtePlayer.swift b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/RtePlayer.swift new file mode 100644 index 000000000..532c8675e --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/RtePlayer.swift @@ -0,0 +1,204 @@ +// +// RtePlayer.swift +// APIExample +// +// Created by 寮犱咕娉 on 2020/4/17. +// Copyright 漏 2020 Agora Corp. All rights reserved. +// +import UIKit +import AgoraRtcKit + +class RtePlayerEntry: UIViewController { + @IBOutlet weak var joinButton: UIButton! + @IBOutlet weak var channelTextField: UITextField! + let identifier = "RtePlayer" + private var backgroundColor: UInt32 = 0x000000 + + private lazy var agoraKit: AgoraRtcEngineKit = { + let config = AgoraRtcEngineConfig() + config.appId = KeyCenter.AppId + config.channelProfile = .liveBroadcasting + let kit = AgoraRtcEngineKit.sharedEngine(with: config, delegate: nil) + Util.configPrivatization(agoraKit: kit) + kit.setLogFile(LogUtils.sdkLogPath()) + return kit + }() + + override func viewDidLoad() { + super.viewDidLoad() + channelTextField.text = "rte://\(KeyCenter.AppId)/{\("Live Channel Name".localized)}" + } + + override func willMove(toParent parent: UIViewController?) { + if parent == nil { + AgoraRtcEngineKit.destroy() + } + } + + @IBAction func doJoinPressed(sender: UIButton) { + // resign channel text field + channelTextField.resignFirstResponder() + + guard let channelName = channelTextField.text else { return } + let storyBoard: UIStoryboard = UIStoryboard(name: identifier, bundle: nil) + // create new view controller every time to ensure we get a clean vc + guard let newViewController = storyBoard.instantiateViewController(withIdentifier: identifier) as? BaseViewController else { return + } + newViewController.title = channelName + newViewController.configs = ["channelName": channelName, + "backgroundColor": backgroundColor, + "engine": agoraKit] + navigationController?.pushViewController(newViewController, animated: true) + } + + override func touchesEnded(_ touches: Set, with event: UIEvent?) { + super.touchesEnded(touches, with: event) + self.view.endEditing(true) + } +} + +class RtePlayerMain: BaseViewController { + @IBOutlet weak var playerView: UIView? + private var playerUrl: String? + private lazy var rte: AgoraRte = { + let initialConfig = AgoraRteInitialConfig() + let rte = AgoraRte(initialConfig: initialConfig) + + let config = AgoraRteConfig() + config.setAppId(KeyCenter.AppId, error: nil) + rte.setConfigs(config, error: nil) + + return rte + }() + + private lazy var playerObserver: PlayerObserver = { + let observer = PlayerObserver() + observer.delegate = self + + return observer + }() + + private lazy var player: AgoraRtePlayer = { + // Create player + let playerInitialConfig = AgoraRtePlayerInitialConfig() + self.player = AgoraRtePlayer(rte: rte, initialConfig: playerInitialConfig) + + let playerConfig = AgoraRtePlayerConfig() + playerConfig.setAutoPlay(true, error: nil) + player.setConfigs(playerConfig, error: nil) + + // Create playerObserver + player.register(playerObserver, error: nil) + + return player + }() + + private lazy var canvas: AgoraRteCanvas = { + // Create canvas + let canvasInitialConfig = AgoraRteCanvasInitialConfig() + let canvas = AgoraRteCanvas(rte: self.rte, initialConfig: canvasInitialConfig) + + let canvasConfig = AgoraRteCanvasConfig() + canvasConfig.setVideoRenderMode(.fit, error: nil) + canvas.setConfigs(canvasConfig, error: nil) + + return canvas + }() + + override func viewDidLoad() { + super.viewDidLoad() + + guard let url = configs["channelName"] as? String else {return} + // layout render view + let channelName = url.components(separatedBy: "/").last ?? "" + NetworkManager.shared.generateToken(channelName: channelName, success: {[weak self] token in + guard let self = self else {return} + print("token: \(token ?? "")") + self.playerUrl = "\(url)?token=\(self.urlEncoded(content: token ?? "") ?? "")" + self.initRte {[weak self] success in + print("initRte: ret: \(success)") + guard success else {return} + self?.onRtePlayerStart() + } + }) + } + + private func urlEncoded(content: String?) -> String? { + // Create a mutable allowed character set + var allowedCharacters = CharacterSet.urlQueryAllowed + + // Remove the characters that need to be encoded + allowedCharacters.remove(charactersIn: "+/") + + // Encode the string + if let encodedString = content?.addingPercentEncoding(withAllowedCharacters: allowedCharacters) { + print("Encoded token: \(encodedString)") + return encodedString + } else { + print("Encoding failed.") + return nil + } + } + + private func initRte(_ completion: ((Bool) -> Void)?) { + let initErr: AgoraRteError? = nil + rte.initMediaEngine({[weak self] err in + guard let self = self else {return} + print("initMediaEngine callback: \(err?.code().rawValue ?? 0)") + let success = err?.code() == .ok ? true : false + DispatchQueue.main.async { + guard success, let playerView = self.playerView else { + completion?(false) + return + } + + self.canvas.add(playerView, config: nil, error: nil) + self.player.setCanvas(self.canvas, error: nil) + completion?(true) + } + }, error: initErr) + print("initMediaEngine err: \(initErr?.code().rawValue ?? 0)") + } + + @IBAction func onRtePlayerStart() { + guard let url = self.playerUrl else {return} + print("open: \(url)") + player.open(withUrl: url, startTime: 0) { err in + print("open completion: \(err?.code().rawValue ?? 0)") + } + } + + @IBAction func onRtePlayerStop() { + player.stop(nil) + } +} + +extension RtePlayerMain: PlayerObserverDelegate { + func onStateChanged(oldState: AgoraRtePlayerState, newState: AgoraRtePlayerState, error: AgoraRteError?) { + print("onStateChanged: oldState: \(oldState.rawValue) newState:\(newState.rawValue) error: \(error?.code().rawValue ?? 0)") + } + + func onPositionChanged(currentTime: UInt64, utcTime: UInt64) { + + } + + func onResolutionChanged(width: Int, height: Int) { + print("onResolutionChanged: {\(width), \(height)}") + } + + func onEvent(event: AgoraRtePlayerEvent) { + print("onEvent: \(event.rawValue)") + } + + func onMetadata(type: AgoraRtePlayerMetadataType, data: Data) { + + } + + func onPlayerInfoUpdated(info: AgoraRtePlayerInfo) { + + } + + func onAudioVolumeIndication(volume: Int32) { + + } +} diff --git a/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/zh-Hans.lproj/RtePlayer.strings b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/zh-Hans.lproj/RtePlayer.strings new file mode 100644 index 000000000..2a0b88a1e --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/RtePlayer/zh-Hans.lproj/RtePlayer.strings @@ -0,0 +1,11 @@ +/* Class = "UITextField"; placeholder = "Enter channel name"; ObjectID = "GWc-L5-fZV"; */ +"GWc-L5-fZV.placeholder" = "杈撳叆棰戦亾鍚"; + +/* Class = "UILabel"; text = "Ultra Low Latency"; ObjectID = "Lzz-2R-G7f"; */ +"Lzz-2R-G7f.text" = "鏋侀熺洿鎾"; + +"17J-Ix-Qot.normalTitle" = "鎾斁"; +"tqt-S5-7Lt.normalTitle" = "鍋滄"; + + +"BMd-dm-te6.text" = "璇风敤鍙︿竴鍙拌澶囦綔涓轰富鎾韩浠藉紑鍚竴涓洿鎾閬擄紝骞跺湪涓婅堪杈撳叆妗嗗唴濉叆瀵瑰簲棰戦亾鍚嶃"; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/BeautyAPI.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/BeautyAPI.m index 844870bc9..ece43cc0d 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/BeautyAPI.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/BeautyAPI.m @@ -6,8 +6,9 @@ // #import "BeautyAPI.h" +#import "APIReporter.h" -static NSString *const beautyAPIVnersio = @"1.0.3"; +static NSString *const beautyAPIVersion = @"1.0.7"; @implementation BeautyStats @end @@ -24,6 +25,8 @@ @interface BeautyAPI () @property (nonatomic, assign) CFTimeInterval preTime; @property (nonatomic, strong) NSMutableArray *statsArray; @property (nonatomic, assign) AgoraVideoRenderMode renderMode; +@property (nonatomic, strong) APIReporter *reporter; +@property (nonatomic, assign) BOOL isFirstFrame; @end @@ -34,6 +37,13 @@ @interface BeautyAPI () @implementation BeautyAPI +- (instancetype)init { + if (self == [super init]) { + _isFrontCamera = YES; + } + return self; +} + - (NSMutableArray *)statsArray { if (_statsArray == nil) { _statsArray = [NSMutableArray new]; @@ -41,6 +51,15 @@ - (NSMutableArray *)statsArray { return _statsArray; } +- (APIReporter *)reporter { + if (_reporter == nil) { + _reporter = [[APIReporter alloc] initWithType:(APITypeBeauty) + version:beautyAPIVersion + engine:self.config.rtcEngine]; + } + return _reporter; +} + - (int)initialize:(BeautyConfig *)config { if (config.cameraConfig == nil) { CameraConfig *cameraConfig = [[CameraConfig alloc] init]; @@ -50,7 +69,6 @@ - (int)initialize:(BeautyConfig *)config { } [LogUtil log:[NSString stringWithFormat:@"RTC Version == %@", [AgoraRtcEngineKit getSdkVersion]]]; [LogUtil log:[NSString stringWithFormat:@"BeautyAPI Version == %@", [self getVersion]]]; - _isFrontCamera = YES; self.config = config; if (self.config.statsDuration <= 0) { self.config.statsDuration = 1; @@ -64,6 +82,7 @@ - (int)initialize:(BeautyConfig *)config { return -1; } [LogUtil log:[NSString stringWithFormat:@"beautyRender == %@", config.beautyRender.description]]; + [self.reporter startDurationEventWithName:@"initialize-release"]; self.beautyRender = config.beautyRender; if (config.captureMode == CaptureModeAgora) { #if __has_include() @@ -79,6 +98,7 @@ - (int)initialize:(BeautyConfig *)config { } }; [self rtcReportWithEvent:@"initialize" label:dict]; + [self setupMirror]; #else [LogUtil log:@"rtc 鏈鍏" level:(LogLevelError)]; return -1; @@ -86,15 +106,17 @@ - (int)initialize:(BeautyConfig *)config { } else { [LogUtil log:@"captureMode == Custom"]; } + [self setupMirror]; return 0; } - (int)switchCamera { _isFrontCamera = !_isFrontCamera; - [self setupMirror]; NSDictionary *dict = @{ @"cameraPosition": @(_isFrontCamera) }; [self rtcReportWithEvent:@"cameraPosition" label:dict]; - return [self.config.rtcEngine switchCamera]; + int res = [self.config.rtcEngine switchCamera]; + [self setupMirror]; + return res; } - (AgoraVideoMirrorMode)setupMirror { @@ -190,6 +212,7 @@ - (int)destroy { [self.config.beautyRender destroy]; self.config = nil; [LogUtil log:@"destroy"]; + [self.reporter endDurationEventWithName:@"initialize-release" ext:@{}]; return 0; } @@ -198,39 +221,34 @@ - (void)rtcReportWithEvent: (NSString *)event label: (NSDictionary *)label { [LogUtil log:@"rtc 涓嶈兘涓虹┖" level:(LogLevelError)]; return; } - NSString *jsonString = [self convertToJson:label]; - [self.config.rtcEngine sendCustomReportMessage:@"scenarioAPI" - category:[NSString stringWithFormat:@"beauty_iOS_%@",[self getVersion]] - event:event - label:jsonString - value:0]; -} - -- (NSString *)convertToJson: (NSDictionary *)object { - NSError *error = nil; - NSData *jsonData = [NSJSONSerialization dataWithJSONObject:object - options:0 - error:&error]; - if (error) { - // 杞崲澶辫触 - NSLog(@"Error: %@", error.localizedDescription); - return nil; - } - NSString *jsonString = [[NSString alloc] initWithData:jsonData - encoding:NSUTF8StringEncoding]; - return jsonString; + [self.reporter reportFuncEventWithName:event value:label ext:@{}]; } - (NSString *)getVersion { - return beautyAPIVnersio; + return beautyAPIVersion; } #pragma mark - VideoFrameDelegate #if __has_include() +- (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame { + return [self onCaptureVideoFrame:videoFrame sourceType:(AgoraVideoSourceTypeCamera)]; +} - (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame sourceType:(AgoraVideoSourceType)sourceType { if (!self.isEnable) { return YES; } CFTimeInterval startTime = CACurrentMediaTime(); + if (!self.isFirstFrame) { + [self.reporter startDurationEventWithName:@"first_beauty_frame"]; + } CVPixelBufferRef pixelBuffer = [self.config.beautyRender onCapture:videoFrame.pixelBuffer]; + if (!self.isFirstFrame) { + [self.reporter endDurationEventWithName:@"first_beauty_frame" ext:@{ + @"width": @(CVPixelBufferGetWidth(pixelBuffer)), + @"height": @(CVPixelBufferGetHeight(pixelBuffer)), + @"camera_facing": _isFrontCamera ? @"front" : @"back", + @"buffer_type": @"pixelbuffer" + }]; + self.isFirstFrame = YES; + } CFTimeInterval endTime = CACurrentMediaTime(); if (self.config.statsEnable) { [self.statsArray addObject:@(endTime - startTime)]; @@ -239,7 +257,7 @@ - (BOOL)onCaptureVideoFrame:(AgoraOutputVideoFrame *)videoFrame sourceType:(Agor if (self.config.eventCallback && self.preTime > 0 && self.config.statsEnable) { CFTimeInterval time = startTime - self.preTime; if (time > self.config.statsDuration && self.statsArray.count > 0) { - NSArray *sortArray = [self.statsArray sortedArrayUsingComparator:^NSComparisonResult(NSNumber * _Nonnull obj1, NSNumber * _Nonnull obj2) { + NSArray *sortArray = [self.statsArray sortedArrayUsingComparator:^NSComparisonResult(NSNumber * _Nonnull obj1, NSNumber * _Nonnull obj2) { return obj1.doubleValue > obj2.doubleValue; }]; double totalValue = 0; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.h index bbe434af1..d9b637a5e 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.h @@ -5,13 +5,14 @@ // Created by zhaoyongqiang on 2023/6/30. // -#define BytesMoudle "bef_effect_ai_api.h" +#define BytesMoudle #import -#if __has_include("BEImageUtils.h") && __has_include("BEFrameProcessor.h") +#if __has_include("BEImageUtils.h") && __has_include("BEEffectManager.h") #import "BEImageUtils.h" -#import "BEFrameProcessor.h" +#import "BEEffectManager.h" +#import "BEEffectResourceHelper.h" #endif #import "BeautyAPI.h" @@ -19,11 +20,11 @@ NS_ASSUME_NONNULL_BEGIN @interface BytesBeautyRender : NSObject -#if __has_include("BEImageUtils.h") && __has_include("BEFrameProcessor.h") -@property (nonatomic, strong) BEFrameProcessor *frameProcessor; +#if __has_include("BEImageUtils.h") && __has_include("BEEffectManager.h") +@property (nonatomic, strong) BEEffectManager *effectManager; @property (nonatomic, strong) BEImageUtils *imageUtils; #endif - +- (BOOL)checkLicense; @end NS_ASSUME_NONNULL_END diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.m index 645b31a3c..cce26dd58 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/BytesRender/BytesBeautyRender.m @@ -7,26 +7,30 @@ #import "BytesBeautyRender.h" + @interface BytesBeautyRender () @property (nonatomic, strong) NSMutableArray *bytesNodes; +@property (nonatomic, weak) BEPixelBufferGLTexture *outTexture; @end @implementation BytesBeautyRender -#if __has_include("BEImageUtils.h") && __has_include("BEFrameProcessor.h") -- (BEFrameProcessor *)frameProcessor { - if (_frameProcessor == nil) { - EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; - [EAGLContext setCurrentContext:context]; - _frameProcessor = [[BEFrameProcessor alloc]initWithContext:context resourceDelegate:nil]; - _frameProcessor.processorResult = BECVPixelBuffer; - [_frameProcessor setEffectOn:YES]; - [_frameProcessor updateComposerNodes:self.bytesNodes]; +#if __has_include("BEImageUtils.h") && __has_include("BEEffectManager.h") +- (BEEffectManager *)effectManager { + if (_effectManager == nil) { + _effectManager = [[BEEffectManager alloc] initWithResourceProvider:[BEEffectResourceHelper new] licenseProvider:[BELicenseHelper shareInstance]]; +#if __has_include(BytesMoudle) + int ret = [_effectManager initTask]; + if (ret == BEF_RESULT_SUC){ + [_effectManager updateComposerNodes:self.bytesNodes]; + } +#endif } - return _frameProcessor; + return _effectManager; } + - (BEImageUtils *)imageUtils { if (_imageUtils == nil) { _imageUtils = [[BEImageUtils alloc] init]; @@ -42,84 +46,150 @@ - (NSMutableArray *)bytesNodes { return _bytesNodes; } -- (void)destroy { +- (BOOL)checkLicense { +#if __has_include(BytesMoudle) + return [[BELicenseHelper shareInstance] checkLicense]; +#else + return NO; +#endif +} + +- (void)destroy { #if __has_include(BytesMoudle) - _frameProcessor = nil; + [_effectManager cleanPipeline]; + [_effectManager destroyTask]; + _effectManager = nil; _imageUtils = nil; + [self.outTexture destroy]; + self.outTexture = nil; #endif } -- (nonnull CVPixelBufferRef)onCapture:(nonnull CVPixelBufferRef)pixelBuffer { +- (nonnull CVPixelBufferRef)onCapture:(nonnull CVPixelBufferRef)pixelBuffer { #if __has_include(BytesMoudle) - pixelBuffer = [self.imageUtils transforCVPixelBufferToCVPixelBuffer:pixelBuffer outputFormat:BE_BGRA]; - CVPixelBufferRef px = [self.frameProcessor process: pixelBuffer - timeStamp: [NSDate date].timeIntervalSince1970].pixelBuffer; - return px; + double timeStamp = [[NSDate date] timeIntervalSince1970]; + BEPixelBufferInfo *pixelBufferInfo = [self.imageUtils getCVPixelBufferInfo:pixelBuffer]; + if (pixelBufferInfo.format != BE_BGRA) { + pixelBuffer = [self.imageUtils transforCVPixelBufferToCVPixelBuffer:pixelBuffer + outputFormat:pixelBufferInfo.format]; + } + + if ([self getDeviceOrientation] != BEF_AI_CLOCKWISE_ROTATE_0) { + pixelBuffer = [self.imageUtils rotateCVPixelBuffer:pixelBuffer rotation:BEF_AI_CLOCKWISE_ROTATE_0]; + } +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + if ([EAGLContext currentContext] != self.effectManager.glContext) { + [EAGLContext setCurrentContext: self.effectManager.glContext]; + } +#pragma clang diagnostic pop + id texture = [self.imageUtils transforCVPixelBufferToTexture:pixelBuffer]; + BEPixelBufferGLTexture *outTexture = nil; + + outTexture = [self.imageUtils getOutputPixelBufferGLTextureWithWidth:texture.width + height:texture.height + format:pixelBufferInfo.format + withPipeline:self.effectManager.usePipeline]; + self.outTexture = outTexture; + int ret = [self.effectManager processTexture:texture.texture + outputTexture:outTexture.texture + width:pixelBufferInfo.width + height:pixelBufferInfo.height + rotate:[self getDeviceOrientation] + timeStamp:timeStamp]; + if (ret != BEF_RESULT_SUC) { + outTexture = texture; + } + return [(BEPixelBufferGLTexture *)outTexture pixelBuffer]; +#else + return pixelBuffer; #endif - return nil; } + +#if __has_include(BytesMoudle) +- (bef_ai_rotate_type)getDeviceOrientation { + UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; + switch (orientation) { + case UIDeviceOrientationPortrait: + return BEF_AI_CLOCKWISE_ROTATE_0; + + case UIDeviceOrientationPortraitUpsideDown: + return BEF_AI_CLOCKWISE_ROTATE_180; + + case UIDeviceOrientationLandscapeLeft: + return BEF_AI_CLOCKWISE_ROTATE_270; + + case UIDeviceOrientationLandscapeRight: + return BEF_AI_CLOCKWISE_ROTATE_90; + + default: + return BEF_AI_CLOCKWISE_ROTATE_0; + } +} +#endif + #if __has_include() - (AgoraVideoFormat)getVideoFormatPreference { return AgoraVideoFormatCVPixelBGRA; } #endif -- (void)reset { +- (void)reset { #if __has_include(BytesMoudle) - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Cheekbone" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Nose" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Chin" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Jawbone" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Forehead" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_ZoomMouth" intensity:0]; - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_4Items" key:@"BEF_BEAUTY_WHITEN_TEETH" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Cheekbone" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Nose" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Chin" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Jawbone" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Forehead" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_ZoomMouth" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_4Items" key:@"BEF_BEAUTY_WHITEN_TEETH" intensity:0]; #endif } -- (void)setBeautyPreset { +- (void)setBeautyPreset { #if __has_include(BytesMoudle) - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.2]; - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.3]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0.15]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Cheekbone" intensity:0.3]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0.15]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Nose" intensity:0.15]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Chin" intensity:0.46]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Jawbone" intensity:0.46]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Forehead" intensity:0.4]; - [self.frameProcessor updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_ZoomMouth" intensity:0.16]; - [self.frameProcessor updateComposerNodeIntensity:@"/beauty_4Items" key:@"BEF_BEAUTY_WHITEN_TEETH" intensity:0.2]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.2]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.3]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Overall" intensity:0.15]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Cheekbone" intensity:0.3]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Eye" intensity:0.15]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Nose" intensity:0.15]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Chin" intensity:0.46]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Zoom_Jawbone" intensity:0.46]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_Forehead" intensity:0.4]; + [self.effectManager updateComposerNodeIntensity:@"/reshape_lite" key:@"Internal_Deform_ZoomMouth" intensity:0.16]; + [self.effectManager updateComposerNodeIntensity:@"/beauty_4Items" key:@"BEF_BEAUTY_WHITEN_TEETH" intensity:0.2]; #endif } -- (void)setMakeup:(BOOL)isSelected { +- (void)setMakeup:(BOOL)isSelected { #if __has_include(BytesMoudle) if (isSelected) { if (![self.bytesNodes containsObject:@"/style_makeup/qise"]) { [self.bytesNodes addObject:@"/style_makeup/qise"]; - [self.frameProcessor updateComposerNodes:self.bytesNodes]; + [self.effectManager updateComposerNodes:self.bytesNodes]; } - [self.frameProcessor updateComposerNodeIntensity:@"/style_makeup/qise" key:@"Makeup_ALL" intensity:0.6]; + [self.effectManager updateComposerNodeIntensity:@"/style_makeup/qise" key:@"Makeup_ALL" intensity:0.6]; } else { if ([self.bytesNodes containsObject:@"/style_makeup/qise"]) { [self.bytesNodes removeObject:@"/style_makeup/qise"]; - [self.frameProcessor updateComposerNodes:self.bytesNodes]; + [self.effectManager updateComposerNodes:self.bytesNodes]; } - [self.frameProcessor updateComposerNodeIntensity:@"/style_makeup/qise" key:@"Makeup_ALL" intensity:0]; + [self.effectManager updateComposerNodeIntensity:@"/style_makeup/qise" key:@"Makeup_ALL" intensity:0]; } #endif } -- (void)setSticker:(BOOL)isSelected { +- (void)setSticker:(BOOL)isSelected { #if __has_include(BytesMoudle) if (isSelected) { - [self.frameProcessor setStickerPath:@"matting_bg"]; + [self.effectManager setStickerPath:@"stickers_zhaocaimao"]; } else { - [self.frameProcessor setStickerPath:@""]; + [self.effectManager setStickerPath:@""]; } #endif } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.h index daa31195b..a37bd6db7 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.h @@ -25,11 +25,13 @@ NS_ASSUME_NONNULL_BEGIN @property (nonatomic, strong) FUManager *fuManager; #endif -- (void)setBeautyWithPath: (NSString *)path key: (NSString *)key value: (float)value; ++ (BOOL)checkLicense; -- (void)setStyleWithPath: (NSString *)path key: (NSString *)key value: (float)value; +- (void)setBeautyWithPath:(NSString *)path key:(NSString *)key value:(float)value; -- (void)setStickerWithPath: (NSString *)path; +- (void)setStyleWithPath:(NSString *)path key:(NSString *)key value:(float)value isCombined:(BOOL)isCombined; + +- (void)setStickerWithPath:(NSString *)path; - (void)reset; @@ -39,6 +41,8 @@ NS_ASSUME_NONNULL_BEGIN - (void)destroy; + + @end NS_ASSUME_NONNULL_END diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.m index 662529ef6..e312eb363 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/FURender/FUBeautyRender.m @@ -8,6 +8,10 @@ #import "FUBeautyRender.h" #import "BundleUtil.h" +#if __has_include(FURenderMoudle) +#import "authpack.h" +#endif + @interface FUBeautyRender () #if __has_include(FURenderMoudle) @@ -15,6 +19,7 @@ @interface FUBeautyRender () @property (nonatomic, strong) FUSticker *currentSticker; @property (nonatomic, strong) FUAnimoji *currentAnimoji; #endif +@property (nonatomic, copy) NSString *makeupKey; @end @@ -29,12 +34,27 @@ - (instancetype)init { return self; } ++ (BOOL)checkLicense { + BOOL success = NO; +#if __has_include(FURenderMoudle) + FUSetupConfig *setupConfig = [[FUSetupConfig alloc] init]; + setupConfig.authPack = FUAuthPackMake(g_auth_package, sizeof(g_auth_package)); + + // 鍒濆鍖 FURenderKit + success = [FURenderKit setupWithSetupConfig:setupConfig]; +#endif + return success; +} + - (void)destroy { #if __has_include(FURenderMoudle) - [FURenderKit shareRenderKit].beauty = nil; - [FURenderKit shareRenderKit].makeup = nil; - [[FURenderKit shareRenderKit].stickerContainer removeAllSticks]; - [FURenderKit destroy]; + dispatch_queue_t referQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0); + dispatch_async(referQueue, ^{ + [FURenderKit shareRenderKit].beauty = nil; + [FURenderKit shareRenderKit].makeup = nil; + [[FURenderKit shareRenderKit].stickerContainer removeAllSticks]; + [FURenderKit destroy]; + }); _fuManager = nil; #endif } @@ -55,8 +75,7 @@ - (void)setBeautyWithPath:(NSString *)path key:(NSString *)key value:(float)valu #if __has_include(FURenderMoudle) FUBeauty *beauty = [FURenderKit shareRenderKit].beauty; if (beauty == nil) { - NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; - NSString *faceAIPath = [bundle pathForResource:[NSString stringWithFormat:@"graphics/%@", path] ofType:@"bundle"]; + NSString *faceAIPath = [[NSBundle mainBundle] pathForResource:[NSString stringWithFormat:@"%@", path] ofType:@"bundle"]; beauty = [[FUBeauty alloc] initWithPath:faceAIPath name:@"FUBeauty"]; beauty.heavyBlur = 0; } @@ -129,21 +148,46 @@ - (void)setBeautyWithPath:(NSString *)path key:(NSString *)key value:(float)valu } else if ([key isEqualToString:@"sharpen"]) { beauty.sharpen = value; } + beauty.enable = YES; [FURenderKit shareRenderKit].beauty = beauty; #endif } -- (void)setStyleWithPath:(NSString *)path key:(NSString *)key value:(float)value { +- (void)setStyleWithPath:(NSString *)path key:(NSString *)key value:(float)value isCombined:(BOOL)isCombined { #if __has_include(FURenderMoudle) - NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; - NSString *makeupPath = [bundle pathForResource:path ofType:@"bundle"]; + [self setupBeauty]; FUMakeup *makeup = [FURenderKit shareRenderKit].makeup; - if (makeup == nil) { - makeup = [[FUMakeup alloc] initWithPath:makeupPath name:@"face_makeup"]; - makeup.isMakeupOn = YES; - [FURenderKit shareRenderKit].makeup = makeup; - [FURenderKit shareRenderKit].makeup.enable = YES; + if (isCombined) { + if (makeup == nil || self.makeupKey != key) { + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; + NSString *stylePath = [bundle pathForResource:key ofType:@"bundle"]; + makeup = [[FUMakeup alloc] initWithPath:stylePath name:@"makeup"]; + makeup.isMakeupOn = YES; + dispatch_queue_t referQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0); + dispatch_async(referQueue, ^{ + [FURenderKit shareRenderKit].makeup = makeup; + [FURenderKit shareRenderKit].makeup.intensity = value; + [FURenderKit shareRenderKit].makeup.enable = YES; + }); + } + [FURenderKit shareRenderKit].makeup.intensity = value; + self.makeupKey = key; + } else { + NSString *makeupPath = [[NSBundle mainBundle] pathForResource:path ofType:@"bundle"]; + if (makeup == nil || self.makeupKey != path) { + makeup = [[FUMakeup alloc] initWithPath:makeupPath name:@"face_makeup"]; + makeup.isMakeupOn = YES; + [FURenderKit shareRenderKit].makeup = makeup; + [FURenderKit shareRenderKit].makeup.enable = YES; + } + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; + NSString *stylePath = [bundle pathForResource:key ofType:@"bundle"]; + FUItem *makupItem = [[FUItem alloc] initWithPath:stylePath name:key]; + [makeup updateMakeupPackage:makupItem needCleanSubItem:NO]; + makeup.intensity = value; + self.makeupKey = path; } + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; NSString *stylePath = [bundle pathForResource:key ofType:@"bundle"]; FUItem *makupItem = [[FUItem alloc] initWithPath:stylePath name:key]; [makeup updateMakeupPackage:makupItem needCleanSubItem:NO]; @@ -153,6 +197,7 @@ - (void)setStyleWithPath:(NSString *)path key:(NSString *)key value:(float)value - (void)setAnimojiWithPath:(NSString *)path { #if __has_include(FURenderMoudle) + [self setupBeauty]; if (self.currentSticker) { [[FURenderKit shareRenderKit].stickerContainer removeSticker:self.currentSticker completion:nil]; self.currentSticker = nil; @@ -173,10 +218,15 @@ - (void)setAnimojiWithPath:(NSString *)path { } - (void)setStickerWithPath:(NSString *)path { +#if __has_include(FURenderMoudle) + [self setupBeauty]; NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; NSString *stickerPath = [bundle pathForResource:[NSString stringWithFormat:@"sticker/%@", path] ofType:@"bundle"]; -#if __has_include(FURenderMoudle) - FUSticker *sticker = [[FUSticker alloc] initWithPath:stickerPath name:@"sticker"]; + if (stickerPath == nil && self.currentSticker == nil) { + return; + } + FUSticker *sticker = [[FUSticker alloc] initWithPath:stickerPath + name:path]; if (self.currentAnimoji) { [[FURenderKit shareRenderKit].stickerContainer removeSticker:self.currentAnimoji completion:nil]; self.currentAnimoji = nil; @@ -192,15 +242,15 @@ - (void)setStickerWithPath:(NSString *)path { - (void)reset { #if __has_include(FURenderMoudle) - [FURenderKit shareRenderKit].beauty = nil; + [FURenderKit shareRenderKit].beauty.enable = NO; #endif } - (void)resetStyle { #if __has_include(FURenderMoudle) [FURenderKit shareRenderKit].makeup.enable = NO; - [FURenderKit shareRenderKit].makeup = nil; #endif + self.makeupKey = nil; } - (void)resetSticker { @@ -213,19 +263,31 @@ - (void)resetSticker { - (void)setBeautyPreset { #if __has_include(FURenderMoudle) - NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; - NSString *faceAIPath = [bundle pathForResource:@"graphics/face_beautification" ofType:@"bundle"]; - FUBeauty *beauty = [[FUBeauty alloc] initWithPath:faceAIPath name:@"FUBeauty"]; - [FURenderKit shareRenderKit].beauty = beauty; + dispatch_queue_t referQueue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0); + dispatch_async(referQueue, ^{ + NSString *faceAIPath = [[NSBundle mainBundle] pathForResource:@"face_beautification" ofType:@"bundle"]; + FUBeauty *beauty = [[FUBeauty alloc] initWithPath:faceAIPath name:@"FUBeauty"]; + [FURenderKit shareRenderKit].beauty = beauty; + }); +#endif +} + +- (void)setupBeauty { +#if __has_include(FURenderMoudle) + if ([FURenderKit shareRenderKit].beauty == nil) { + NSString *faceAIPath = [[NSBundle mainBundle] pathForResource:@"face_beautification" ofType:@"bundle"]; + FUBeauty *beauty = [[FUBeauty alloc] initWithPath:faceAIPath name:@"FUBeauty"]; + [FURenderKit shareRenderKit].beauty = beauty; + } #endif } - (void)setMakeup:(BOOL)isSelected { #if __has_include(FURenderMoudle) if (isSelected) { - NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; - NSString *makeupPath = [bundle pathForResource:@"graphics/face_makeup" ofType:@"bundle"]; + NSString *makeupPath = [[NSBundle mainBundle] pathForResource:@"face_makeup" ofType:@"bundle"]; FUMakeup *makeup = [[FUMakeup alloc] initWithPath:makeupPath name:@"face_makeup"]; + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"FURenderKit" podName:@"fuLib"]; NSString *path = [bundle pathForResource:@"makeup/ziyun" ofType:@"bundle"]; FUItem *makupItem = [[FUItem alloc] initWithPath:path name:@"ziyun"]; makeup.isMakeupOn = YES; @@ -235,7 +297,6 @@ - (void)setMakeup:(BOOL)isSelected { makeup.intensity = 0.7; } else { [FURenderKit shareRenderKit].makeup.enable = NO; - [FURenderKit shareRenderKit].makeup = nil; } #endif } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/SenseRender/SenseBeautyRender.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/SenseRender/SenseBeautyRender.m index 323e32f87..52fba49ef 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/SenseRender/SenseBeautyRender.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Render/SenseRender/SenseBeautyRender.m @@ -61,10 +61,11 @@ - (void)checkSensetimeLicense { self.isSuccessLicense = [EffectsProcess authorizeWithLicensePath:licensePath]; __weak SenseBeautyRender *weakSelf = self; self.timer = [NSTimer timerWithTimeInterval:1 repeats:YES block:^(NSTimer * _Nonnull timer) { - weakSelf.isSuccessLicense = weakSelf.videoProcessing.effectsProcess.isAuthrized; if (weakSelf.isSuccessLicense) { [weakSelf.timer invalidate]; weakSelf.timer = nil; + } else { + weakSelf.isSuccessLicense = weakSelf.videoProcessing.effectsProcess.isAuthrized; } if (weakSelf.licenseEventCallback) { weakSelf.licenseEventCallback(weakSelf.isSuccessLicense); @@ -86,7 +87,7 @@ - (nonnull CVPixelBufferRef)onCapture:(nonnull CVPixelBufferRef)pixelBuffer { if (self.isSuccessLicense) { return [self.videoProcessing videoProcessHandler:pixelBuffer]; } - return nil; + return pixelBuffer; #endif return pixelBuffer; } @@ -134,14 +135,12 @@ - (void)setBeautyDefault { - (void)setMakeup:(BOOL)isSelected { #if __has_include(Sensetime) if (isSelected) { - NSString *path = [[NSBundle mainBundle] pathForResource:@"qise.zip" ofType:nil]; __weak SenseBeautyRender *weakself = self; - [self.videoProcessing.effectsProcess addStickerWithPath:path callBack:^(st_result_t state, int sticker, uint64_t action) { - [weakself.videoProcessing.effectsProcess setPackageId:sticker groupType:EFFECT_BEAUTY_GROUP_MAKEUP strength:0.5]; + [self.videoProcessing addStylePath:@"hunxue.zip" groupId:0 strength:0.5 callBack:^(int sticker) { weakself.stickerId = sticker; }]; } else { - [self.videoProcessing.effectsProcess removeSticker:self.stickerId]; + [self.videoProcessing removeStickerId:self.stickerId]; self.stickerId = 0; } #endif @@ -150,7 +149,7 @@ - (void)setMakeup:(BOOL)isSelected { - (void)setSticker:(BOOL)isSelected { #if __has_include(Sensetime) if (isSelected) { - NSString *path = [[NSBundle mainBundle] pathForResource:@"lianxingface.zip" ofType:nil]; + NSString *path = [[NSBundle mainBundle] pathForResource:@"ShangBanLe.zip" ofType:nil]; [self.videoProcessing.effectsProcess setStickerWithPath:path callBack:^(st_result_t state, int stickerId, uint64_t action) { }]; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.h new file mode 100644 index 000000000..fe85201a1 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.h @@ -0,0 +1,49 @@ +// +// APIReporter.h +// BeautyAPi +// +// Created by zhaoyongqiang on 2024/4/17. +// + +#import +#import + +typedef NS_ENUM(NSInteger, APIType) { + APITypeKTV = 1, //K姝 + APITypeCall = 2, //鍛煎彨 + APITypeBeauty = 3, //缇庨 + APITypeVideoLoader = 4, //绉掑紑/绉掑垏 + APITypePK = 5, //鍥㈡垬 + APITypeVitualSpace = 6, + APITypeScreenSpace = 7, //灞忓箷鍏变韩 + APITypeAudioScenario = 8 //闊抽scenario +}; + +typedef NS_ENUM(NSInteger, APIEventType) { + APIEventTypeAPI = 0, //api浜嬩欢 + APIEventTypeCost, //鑰楁椂浜嬩欢 + APIEventTypeCustom //鑷畾涔変簨浠 +}; + +typedef NS_ENUM(NSInteger, APICostEvent) { + APICostEventChannelUsage = 0, //棰戦亾浣跨敤鑰楁椂 + APICostEventFirstFrameActual, //棣栧抚瀹為檯鑰楁椂 + APICostEventFirstFramePerceived //棣栧抚鎰熷畼鑰楁椂 +}; + +NS_ASSUME_NONNULL_BEGIN + +@interface APIReporter : NSObject + +- (instancetype)initWithType:(APIType)type version:(NSString *)version engine:(AgoraRtcEngineKit *)engine; +- (void)reportFuncEventWithName:(NSString *)name value:(NSDictionary *)value ext:(NSDictionary *)ext; +- (void)startDurationEventWithName:(NSString *)name; +- (void)endDurationEventWithName:(NSString *)name ext:(NSDictionary *)ext; +- (void)reportCostEventWithName:(APICostEvent)name cost:(NSInteger)cost ext:(NSDictionary *)ext; +- (void)reportCustomEventWithName:(NSString *)name ext:(NSDictionary *)ext; +- (void)writeLogWithContent:(NSString *)content level:(AgoraLogLevel)level; +- (void)cleanCache; + +@end + +NS_ASSUME_NONNULL_END diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.m new file mode 100644 index 000000000..cb4ca73f1 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/BeautyAPI/Report/APIReporter.m @@ -0,0 +1,143 @@ +// +// APIReporter.m +// BeautyAPi +// +// Created by zhaoyongqiang on 2024/4/17. +// + +#import "APIReporter.h" + +@interface APIReporter () + +@property (nonatomic, strong) AgoraRtcEngineKit *engine; +@property (nonatomic, copy) NSString *category; +@property (nonatomic, strong) NSMutableDictionary *durationEventStartMap; +@property (nonatomic, copy) NSString *messsageId; + +@end + +@implementation APIReporter + +- (NSString *)messsageId { + return @"agora:scenarioAPI"; +} + +- (instancetype)initWithType:(APIType)type version:(NSString *)version engine:(AgoraRtcEngineKit *)engine { + self = [super init]; + if (self) { + _category = [NSString stringWithFormat:@"%ld_iOS%@", (long)type, version]; + _engine = engine; + [self configParameters]; + } + return self; +} + +- (void)reportFuncEventWithName:(NSString *)name value:(NSDictionary *)value ext:(NSDictionary *)ext { + NSString *content = [NSString stringWithFormat:@"[APIReporter]reportFuncEvent: %@ value: %@ ext: %@", name, value, ext]; + [self debugApiPrint:content]; + + NSDictionary *eventMap = @{ @"type": @(APIEventTypeAPI), @"desc": name }; + NSDictionary *labelMap = @{ @"apiValue": value, @"ts": @([self getCurrentTs]), @"ext": ext}; + + NSString *event = [self convertToJSONString:eventMap]; + NSString *label = [self convertToJSONString:labelMap]; + + [self.engine sendCustomReportMessage:self.messsageId category:self.category event:event label:label value:0]; +} + +- (void)startDurationEventWithName:(NSString *)name { + self.durationEventStartMap[name] = @([self getCurrentTs]); +} + +- (void)endDurationEventWithName:(NSString *)name ext:(NSDictionary *)ext { + NSNumber *beginTs = self.durationEventStartMap[name]; + if (!beginTs) { + return; + } + [self.durationEventStartMap removeObjectForKey:name]; + + NSInteger ts = [self getCurrentTs]; + NSInteger cost = ts - beginTs.integerValue; + + [self reportCostEventWithTs:ts name:name cost:cost ext:ext]; +} + +- (void)reportCostEventWithName:(APICostEvent)name cost:(NSInteger)cost ext:(NSDictionary *)ext { [self.durationEventStartMap removeObjectForKey: [self getCostEventWithName:name]]; + + [self reportCostEventWithTs:[self getCurrentTs] name:[self getCostEventWithName:name] cost:cost ext:ext]; +} + +- (void)reportCustomEventWithName:(NSString *)name ext:(NSDictionary *)ext { + NSString *content = [NSString stringWithFormat:@"[APIReporter]reportCustomEvent: %@ ext: %@", name, ext]; + [self debugApiPrint:content]; + + NSDictionary *eventMap = @{ @"type": @(APIEventTypeCustom), @"desc": name }; + NSDictionary *labelMap = @{ @"ts": @([self getCurrentTs]), @"ext": ext }; + + NSString *event = [self convertToJSONString:eventMap]; + NSString *label = [self convertToJSONString:labelMap]; + + [self.engine sendCustomReportMessage:self.messsageId category:self.category event:event label:label value:0]; +} + +- (NSString *)getCostEventWithName:(APICostEvent)name { + switch (name) { + case APICostEventChannelUsage: return @"channelUsage"; + case APICostEventFirstFrameActual: return @"firstFrameActual"; + case APICostEventFirstFramePerceived: return @"firstFramePerceived"; + } +} + +- (void)writeLogWithContent:(NSString *)content level:(AgoraLogLevel)level { + [self.engine writeLog:level content:content]; +} + +- (void)cleanCache { + [self.durationEventStartMap removeAllObjects]; +} + +#pragma mark - Private Methods +- (void)reportCostEventWithTs:(NSInteger)ts name:(NSString *)name cost:(NSInteger)cost ext:(NSDictionary *)ext { + NSString *content = [NSString stringWithFormat:@"[APIReporter]reportCostEvent: %@ cost: %ld ms ext: %@", name, (long)cost, ext]; + [self debugApiPrint:content]; + [self writeLogWithContent:content level:AgoraLogLevelInfo]; + + NSDictionary *eventMap = @{ @"type": @(APIEventTypeCost), @"desc": name }; + NSDictionary *labelMap = @{ @"ts": @(ts), @"ext": ext }; + + NSString *event = [self convertToJSONString:eventMap]; + NSString *label = [self convertToJSONString:labelMap]; + + [self.engine sendCustomReportMessage:self.messsageId category:self.category event:event label:label value:cost]; +} + +- (void)configParameters { + [self.engine setParameters:@"{\"rtc.direct_send_custom_event\": true}"]; + [self.engine setParameters:@"{\"rtc.log_external_input\": true}"]; +} + +- (NSInteger)getCurrentTs { + return (NSInteger)([[NSDate date] timeIntervalSince1970] * 1000.0); +} + +- (NSString *)convertToJSONString:(NSDictionary *)dictionary { + NSError *error; + NSData *jsonData = [NSJSONSerialization dataWithJSONObject:dictionary options:0 error:&error]; + if (!jsonData) { + [self writeLogWithContent:[NSString stringWithFormat:@"[APIReporter]convert to json fail: %@ dictionary: %@", error, dictionary] level:AgoraLogLevelWarn]; + return nil; + } + NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding]; + return jsonString; +} + +- (void)debugApiPrint:(NSString *)message { +#if DEBUG + NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; + formatter.dateFormat = @"yyyy-MM-dd HH:mm:ss.SSS"; + NSString *timeString = [formatter stringFromDate:[NSDate date]]; + NSLog(@"%@ %@", timeString, message); +#endif +} + +@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard index f4fcda5fc..078ebc6c2 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Base.lproj/BytedEffect.storyboard @@ -1,9 +1,9 @@ - + - + @@ -105,14 +105,12 @@ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m index 3e749aed9..1046954fd 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/BytedEffectVC.m @@ -48,7 +48,7 @@ - (void)viewDidLoad { } - (void) initSDK { -#if __has_include("bef_effect_ai_api.h") +#if __has_include(BytesMoudle) [self.tipsLabel setHidden:YES]; [self.container setHidden:NO]; #else diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.h index ac48cee43..df5fc06af 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.h @@ -1,9 +1,6 @@ -// // BEEffectManager.h -// BytedEffectSDK -// -// Created by qun on 2021/5/17. -// +// EffectsARSDK + #ifndef BEEffectManager_h #define BEEffectManager_h @@ -12,9 +9,9 @@ #import #import #import "BELicenseHelper.h" -#if __has_include("bef_effect_ai_api.h") -#import "bef_effect_ai_api.h" -#import "bef_effect_ai_message_define.h" +#if __has_include() +#import +#import #endif #import "BEImageUtils.h" @@ -28,8 +25,8 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { BEEffectPart_2 = 2, BEEffectPart_3 = 3, BEEffectPart_4 = 4, - BEEffectPart_5 = 5, //鍏ㄥ眬鏌撳彂 - BEEffectPart_6 = 6, //娓呴櫎鏌撳彂鏁堟灉 + BEEffectPart_5 = 5, // {zh} 鍏ㄥ眬鏌撳彂 {en} Global hair color + BEEffectPart_6 = 6, // {zh} 娓呴櫎鏌撳彂鏁堟灉 {en} Clear hair color effect }; @protocol BEEffectManagerDelegate @@ -68,6 +65,10 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { @property (nonatomic, strong) id provider; @property (nonatomic, strong) id licenseProvider; @property (nonatomic, weak) id delegate; +@property (nonatomic, strong) NSString *resourcePath; +@property (atomic, weak) dispatch_queue_t renderQueue; +@property (nonatomic, strong) EAGLContext *glContext; +@property (nonatomic, assign, readonly) BOOL isSuccessLicense; // {zh} / @brief 鏋勯犲嚱鏁 {en} /@brief constructor // {zh} / @details 闇瑕佷紶鍏ヤ竴涓 BEEffectResourceProvider 瀹炵幇锛岀敤浜庢彁渚涘悇绉嶇礌鏉愮殑璺緞锛屽拰涓涓狟ELicenseProvider鐨勫疄鐜帮紝鐢ㄤ簬鑾峰彇license {en} /@details need to pass in a BEEffectResourceProvider implementation to provide the path of various materials, and a BELicenseProvider implementation to get license @@ -75,7 +76,7 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { // {zh} / @param provider 鐗规晥璧勬簮鏂囦欢鑾峰彇绫 {en} /@param provider effect resource file acquisition class - (instancetype)initWithResourceProvider:(id)resourceProvider licenseProvider:(id)licenseProvider; -#if __has_include("bef_effect_ai_api.h") +#if __has_include() // {zh} / @brief 鍒濆鍖 SDK {en} /@brief initialization SDK - (bef_effect_result_t)initTask; @@ -94,15 +95,15 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { // {zh} / @brief 璁剧疆licenseProvider {en} /@Briefly set licenseProvider // {zh} / @param licenseProvider 浼犲叆涓涓猯icenseProvider鐨勫疄鐜扮敤浜巐icense鐨勮幏鍙 {en} /@param licenseProvider is a BELicenseProvider implementation to provide the path of license, - +#endif // {zh} / @brief 璁剧疆婊ら暅璺緞 {en} /@Briefly set filter path // {zh} / @details 鐩稿 FilterResource.bundle/Filter 璺緞锛屼负 null 鏃跺叧闂护闀 {en} /@details Relative to FilterResource .bundle/Filter path, close filter when null // {zh} / @param path 鐩稿璺緞 {en} /@param path relative path - (void)setFilterPath:(NSString *) path; -/// @brief 璁剧疆婊ら暅缁濆璺緞 -/// @param path 婊ら暅绱犳潗鐨勬枃浠惰矾寰勶紝缁濆璺緞 +// {zh} / @brief 璁剧疆婊ら暅缁濆璺緞 {en} /@Brief Set the absolute path of the filter +// {zh} / @param path 婊ら暅绱犳潗鐨勬枃浠惰矾寰勶紝缁濆璺緞 {en} /@Param path The file path of the filter material, absolute path - (void)setFilterAbsolutePath:(NSString *)path; // {zh} / @brief 璁剧疆婊ら暅寮哄害 {en} /@Briefly set filter strength @@ -159,6 +160,7 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { // {zh} / @param majorRadius 瑙︽懜鑼冨洿 {en} @param majorRadius touch range // {zh} / @param pointerId 瑙︽懜鐐 id {en} /@param pointerId touch point id // {zh} / @param pointerCount 瑙︽懜鐐规暟閲 {en} @param pointerCount number of touch points +#if __has_include() - (BOOL)processTouchEvent:(bef_ai_touch_event_code)eventCode x:(float)x y:(float)y force:(float)force majorRadius:(float)majorRadius pointerId:(int)pointerId pointerCount:(int)pointerCount; // {zh} / @brief 澶勭悊鎵嬪娍浜嬩欢 {en} Handle gesture events briefly @@ -188,6 +190,7 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { // {zh} / @brief 鑾峰彇鐗规晥 SDK 涓殑浜鸿劯 mask 缁撴灉 {en} /@Brief Get the face mask results in the special effects SDK - (bef_ai_face_mask_info *)getFaceMaskInfo; #endif + // {zh} / @brief 鏄惁寮鍚苟琛屾覆鏌 {en} /@Brief whether to turn on parallel rendering // {zh} / @details 鐗规晥 SDK 鍐呴儴宸ヤ綔鍒嗕负涓ら儴鍒嗭紝绠楁硶妫娴嬪拰鐗规晥娓叉煋锛屽綋寮鍚苟琛屾覆鏌撲箣鍚庯紝 {en} /@Details The internal work of the special effects SDK is divided into two parts, algorithm detection and special effects rendering. When parallel rendering is turned on, // {zh} / 绠楁硶妫娴嬪拰鐗规晥娓叉煋灏嗗湪涓嶅悓绾跨▼鎵ц锛屼互鍏呭垎鍒╃敤澶氬绾跨▼杩涜鍔犻燂紝 {en} /Algorithm detection and effects rendering will be performed on different threads to make full use of multi-threads for acceleration, @@ -223,7 +226,9 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { // {zh} / @details 浼犲叆涓涓浐瀹氬悕瀛楃殑绾圭悊缁欏埌 SDK锛屼紶鍏 BEBuffer锛孲DK 浼氬皢鍏惰В鏋愭垚绾圭悊 {en} /@details pass a texture with a fixed name to the SDK, pass BEBuffer, and the SDK will parse it into a texture // {zh} / @param key 绾圭悊鍚嶇О {en} /@param key texture name // {zh} / @param buffer BEBuffer, 浠呮敮鎸 RGBA 鏍煎紡 {en} /@param buffer BEBuffer, only supports RGBA format +#if __has_include() - (BOOL)setRenderCacheTexture:(NSString *)key buffer:(BEBuffer *)buffer; +#endif - (void)loadResource:(int)timeout; @@ -240,6 +245,10 @@ typedef NS_ENUM(NSInteger, BEEffectPart) { - (UIImage*)getCapturedImageWithKey:(const char*) key; +// {zh} / @brief 寮鍚垨鍏抽棴寮哄埗浜鸿劯妫娴 {en} /@brief Enable or disable forced face detection +// {zh} /detection YES 寮鍚汉鑴告娴 NO鍏抽棴浜鸿劯妫娴 {en} /detection YES on face detection NO off face detection +- (void)forcedFaceDetection:(BOOL)detection; + @end #endif /* BEEffectManager_h */ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.mm b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.mm index f0fd890a8..d2ad46298 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.mm +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEEffectManager.mm @@ -1,18 +1,18 @@ -// // BEEffectManager.m // Core -// -// Created by qun on 2021/5/17. -// + #import "BEEffectManager.h" -#if __has_include("bef_effect_ai_api.h") -#import "bef_effect_ai_api.h" -#import "bef_effect_ai_message_define.h" -#import "bef_effect_ai_error_code_format.h" -#import "bef_effect_ai_version.h" +#if __has_include() +#import +#import +#import +#import #endif +#import "BETimeRecoder.h" #import "Core.h" +#import "BEImageUtils.h" +#import "BEGLUtils.h" #ifdef EFFECT_LOG_ENABLED typedef enum { @@ -26,11 +26,12 @@ BEF_LOG_LEVEL_FATAL = 7, BEF_LOG_LEVEL_SILENT = 8, }bef_log_level; -#if __has_include("bef_effect_ai_api.h") +#if __has_include() BEF_SDK_API void bef_effect_set_log_level(bef_effect_handle_t handle, bef_log_level logLevel); BEF_SDK_API typedef int(*logFileFuncPointer)(int logLevel, const char* msg); BEF_SDK_API bef_effect_result_t bef_effect_set_log_to_local_func(logFileFuncPointer pfunc); #endif + int effectLogCallback(int logLevel, const char* msg) { printf("[EffectSDK] %s\n", msg); return 0; @@ -41,9 +42,8 @@ int effectLogCallback(int logLevel, const char* msg) { #define BE_LOAD_RESOURCE_TIMEOUT true -#if __has_include("bef_effect_ai_api.h") +#if __has_include() @interface BEEffectManager () { -#if __has_include("bef_effect_ai_api.h") bef_effect_handle_t _handle; BOOL _effectOn; @@ -54,12 +54,16 @@ @interface BEEffectManager () { bef_ai_face_mask_info *_faceMaskInfo; bef_ai_mouth_mask_info *_mouthMaskInfo; bef_ai_teeth_mask_info *_teethMaskInfo; -#endif +// EAGLContext *_glContext; + #if BE_LOAD_RESOURCE_TIMEOUT NSMutableSet *_existResourcePathes; BOOL _needLoadResource; -#endif + BOOL _isInitSuccess; } +#else +} +#endif @end #endif @@ -71,8 +75,8 @@ @implementation BEEffectManager - (instancetype)initWithResourceProvider:(id)resourceProvider licenseProvider:(id)licenseProvider { self = [super init]; +#if __has_include() if (self) { -#if __has_include("bef_effect_ai_api.h") _faceInfo = nil; _handInfo = nil; _skeletonInfo = nil; @@ -83,17 +87,26 @@ - (instancetype)initWithResourceProvider:(id)resourceP #if BE_LOAD_RESOURCE_TIMEOUT _existResourcePathes = [NSMutableSet set]; _needLoadResource = NO; + _renderQueue = nil; #endif self.provider = resourceProvider; self.licenseProvider = licenseProvider; -#endif } +#endif return self; } - (int)initTask { -#if __has_include("bef_effect_ai_api.h") +#if __has_include() _effectOn = true; + _glContext = [EAGLContext currentContext]; // 杩愯鍦ㄤ富绾跨▼锛屼娇鐢ㄧ殑鏄痵elf.glView.context + if (_glContext == nil) { + NSLog(@"initTask is not run in thread with glContext!!!"); + _glContext = [BEGLUtils createContextWithDefaultAPI:kEAGLRenderingAPIOpenGLES3]; + } + if ([EAGLContext currentContext] != _glContext) { + [EAGLContext setCurrentContext: _glContext]; + } int ret = 0; ret = bef_effect_ai_create(&_handle); CHECK_RET_AND_RETURN(bef_effect_ai_create, ret) @@ -104,6 +117,7 @@ - (int)initTask { if (self.licenseProvider.licenseMode == OFFLINE_LICENSE) { ret = bef_effect_ai_check_license(_handle, self.licenseProvider.licensePath); CHECK_RET_AND_RETURN(bef_effect_ai_check_license, ret) + _isSuccessLicense = ret == 0; } else if (self.licenseProvider.licenseMode == ONLINE_LICENSE){ if (![self.licenseProvider checkLicenseResult: @"getLicensePath"]) @@ -121,8 +135,13 @@ - (int)initTask { CHECK_RET_AND_RETURN(bef_effect_ai_use_builtin_sensor, ret) ret = bef_effect_ai_init(_handle, 10, 10, self.provider.modelDirPath, ""); CHECK_RET_AND_RETURN(bef_effect_ai_init, ret) + + ret = bef_effect_ai_use_3buffer(_handle, false); + CHECK_RET_AND_RETURN(bef_effect_ai_use_3buffer, ret); + _msgDelegateManager = [[IRenderMsgDelegateManager alloc] init]; [self addMsgHandler:self]; + _isInitSuccess = ret == 0; return ret; #else return -1; @@ -130,40 +149,66 @@ - (int)initTask { } - (int)destroyTask { -#if __has_include("bef_effect_ai_api.h") +#if __has_include() + if ([EAGLContext currentContext] != _glContext) { + NSLog(@"effectsar init and destroy are not run in the same glContext"); + [EAGLContext setCurrentContext:_glContext]; + } [self removeMsgHandler:self]; bef_effect_ai_destroy(_handle); + [_msgDelegateManager destoryDelegate]; + _msgDelegateManager = nil; free(_faceInfo); free(_handInfo); free(_skeletonInfo); free(_faceMaskInfo); free(_mouthMaskInfo); free(_teethMaskInfo); -#endif + _isInitSuccess = NO; return 0; +#else + return -1; +#endif } -#if __has_include("bef_effect_ai_api.h") + #pragma mark - public +#if __has_include() - (bef_effect_result_t)processTexture:(GLuint)texture outputTexture:(GLuint)outputTexture width:(int)width height:(int)height rotate:(bef_ai_rotate_type)rotate timeStamp:(double)timeStamp { + if (!_isInitSuccess) { + return BEF_RESULT_FAIL; + } #if BE_LOAD_RESOURCE_TIMEOUT - if (_needLoadResource) { - _needLoadResource = NO; - [self loadResource:-1]; + if (_renderQueue) { + if (_needLoadResource) { + _needLoadResource = NO; + [self loadResource:-1]; + } } #endif + if ([EAGLContext currentContext] != _glContext) { + NSLog(@"effectsar init and process are not run in the same glContext"); + [EAGLContext setCurrentContext:_glContext]; + } + + RECORD_TIME(totalProcess) bef_effect_result_t ret = bef_effect_ai_set_width_height(_handle, width, height); - CHECK_RET_AND_RETURN(bef_effect_ai_set_width_height, ret); + CHECK_RET_AND_RETURN(bef_effect_ai_set_width_height, ret) ret = bef_effect_ai_set_orientation(_handle, rotate); - CHECK_RET_AND_RETURN(bef_effect_ai_set_orientation, ret); + CHECK_RET_AND_RETURN(bef_effect_ai_set_orientation, ret) + RECORD_TIME(algorithmProcess) ret = bef_effect_ai_algorithm_texture(_handle, texture, timeStamp); - CHECK_RET_AND_RETURN(bef_effect_ai_algorithm_texture, ret); + STOP_TIME(algorithmProcess) + CHECK_RET_AND_RETURN(bef_effect_ai_algorithm_texture, ret) + RECORD_TIME(effectProcess) ret = bef_effect_ai_process_texture(_handle, texture, outputTexture, timeStamp); - CHECK_RET_AND_RETURN(bef_effect_ai_process_texture, ret); + STOP_TIME(effectProcess) + CHECK_RET_AND_RETURN(bef_effect_ai_process_texture, ret) + STOP_TIME(totalProcess) return ret; } - (void) setFilterPath:(NSString *)path { - if ([self be_empty:path]) { + if (![self be_empty:path]) { path = [self.provider filterPath:path]; } @@ -186,9 +231,11 @@ -(void)setFilterIntensity:(float)intensity { CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_set_intensity, status, ;) } +#endif - (void)setStickerPath:(NSString *)path { - if ([self be_empty:path]) { +#if __has_include() + if (![self be_empty:path]) { path = [self.provider stickerPath:path]; } @@ -196,24 +243,29 @@ - (void)setStickerPath:(NSString *)path { status = bef_effect_ai_set_effect(_handle, [path UTF8String]); CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_set_effect, status, ;) +#endif } - (void)setStickerAbsolutePath:(NSString*)path { +#if __has_include() bef_effect_result_t status = BEF_RESULT_SUC; status = bef_effect_ai_set_effect(_handle, [path UTF8String]); CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_set_effect, status, ;) +#endif } - (void)setAvatarPath:(NSString*) path { +#if __has_include() bef_effect_result_t status = BEF_RESULT_SUC; status = bef_effect_ai_set_effect(_handle, [path UTF8String]); CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_set_effect, status, ;) - +#endif } +#if __has_include() - (void)releaseEffectManager { bef_effect_ai_destroy(_handle); } @@ -224,11 +276,12 @@ - (void)updateComposerNodes:(NSArray *)nodes { } - (void)updateComposerNodes:(NSArray *)nodes withTags:(NSArray *)tags { +#if __has_include() if (tags != nil && nodes.count != tags.count) { NSLog(@"bef_effect_ai_composer_set_nodes error: count of tags must equal to nodes"); return; } -#if __has_include("bef_effect_ai_api.h") + #if BE_LOAD_RESOURCE_TIMEOUT for (NSString *node in nodes) { if (![_existResourcePathes containsObject:node]) { @@ -238,7 +291,6 @@ - (void)updateComposerNodes:(NSArray *)nodes withTags:(NSArray *paths = [NSMutableArray arrayWithCapacity:nodes.count]; @@ -279,7 +331,7 @@ - (void)updateComposerNodes:(NSArray *)nodes withTags:(NSArray *)nodes withTags:(NSArray *)nodes withTags:(NSArray_needLoadResource) { + [self loadResource:-1]; + self->_needLoadResource = NO; + } + }); } #endif #endif @@ -321,7 +376,7 @@ - (void)appendComposerNodes:(NSArray *)nodes withTags:(NSArray) #if BE_LOAD_RESOURCE_TIMEOUT for (NSString *node in nodes) { if (![_existResourcePathes containsObject:node]) { @@ -332,10 +387,15 @@ - (void)appendComposerNodes:(NSArray *)nodes withTags:(NSArray *paths = [NSMutableArray arrayWithCapacity:nodes.count]; for (int i = 0; i < nodes.count; i++) { - [paths addObject:[self.provider composerNodePath:nodes[i]]]; + if ([self.resourcePath isEqualToString:@"sticker"]) { + [paths addObject:[self.provider stickerPath:nodes[i]]]; + } + else { + [paths addObject:[self.provider composerNodePath:nodes[i]]]; + } + } nodes = paths; @@ -371,7 +431,7 @@ - (void)appendComposerNodes:(NSArray *)nodes withTags:(NSArray) bef_effect_result_t result = BEF_RESULT_SUC; if (tags == nil) { result = bef_effect_ai_composer_append_nodes(_handle, (const char **)nodesPath, count); @@ -381,7 +441,6 @@ - (void)appendComposerNodes:(NSArray *)nodes withTags:(NSArray *)nodes withTags:(NSArray_needLoadResource) { + [self loadResource:-1]; + self->_needLoadResource = NO; + } + }); } #endif #endif } - (void)removeComposerNodes:(NSArray *)nodes { -#if __has_include("bef_effect_ai_api.h") +#if __has_include() #if BE_LOAD_RESOURCE_TIMEOUT for (NSString *node in nodes) { [_existResourcePathes removeObject:node]; } -#endif #endif NSMutableArray *paths = [NSMutableArray arrayWithCapacity:nodes.count]; for (int i = 0; i < nodes.count; i++) { - [paths addObject:[self.provider composerNodePath:nodes[i]]]; + if ([self.resourcePath isEqualToString:@"sticker"]) { + [paths addObject:[self.provider stickerPath:nodes[i]]]; + } + else { + [paths addObject:[self.provider composerNodePath:nodes[i]]]; + } } nodes = paths; @@ -439,30 +505,37 @@ - (void)removeComposerNodes:(NSArray *)nodes { count++; } -#if __has_include("bef_effect_ai_api.h") + bef_effect_result_t result = BEF_RESULT_SUC; result = bef_effect_ai_composer_remove_nodes(_handle, (const char **)nodesPath, count); if (result != BEF_RESULT_SUC) { NSLog(@"bef_effect_ai_composer_set_nodes error: %d", result); } -#endif + for (int i = 0; i < count; i++) { free(nodesPath[i]); } free(nodesPath); +#endif } - (void)updateComposerNodeIntensity:(NSString *)node key:(NSString *)key intensity:(float)intensity { -// node = [self.provider composerNodePath:node]; -#if __has_include("bef_effect_ai_api.h") + + if ([self.resourcePath isEqualToString:@"sticker"]) { + node = [self.provider stickerPath:node]; + } + else { + node = [self.provider composerNodePath:node]; + } +#if __has_include() bef_effect_result_t result = bef_effect_ai_composer_update_node(_handle, (const char *)[node UTF8String], (const char *)[key UTF8String], intensity); CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_composer_update_node, result, ;) #endif } +#if __has_include() - (NSArray *)availableFeatures { //Dynamic lookup feature availability -#if __has_include("bef_effect_ai_api.h") int feature_len = 60; char features[feature_len][BEF_EFFECT_FEATURE_LEN]; int *pf = &feature_len; @@ -483,21 +556,14 @@ - (void)updateComposerNodeIntensity:(NSString *)node key:(NSString *)key intensi } return @[]; } -#else - return @[]; -#endif } - (NSString *)sdkVersion { -#if __has_include("bef_effect_ai_api.h") char version[20]; bef_effect_ai_get_version(version, 20); return [NSString stringWithUTF8String:version]; -#else - return @""; -#endif } -#if __has_include("bef_effect_ai_api.h") + - (void)setFrontCamera:(BOOL)frontCamera { _frontCamera = frontCamera; bef_effect_result_t ret = bef_effect_ai_set_camera_device_position(_handle, frontCamera ? bef_ai_camera_position_front : bef_ai_camera_position_back); @@ -681,13 +747,13 @@ - (UIImage*)getCapturedImageWithKey:(const char*) key buf.format = BE_RGBA; BEImageUtils* imageUtils = [BEImageUtils new]; UIImage* img = [imageUtils transforBufferToUIImage:buf]; - //鐢变簬img鐨勬暟鎹湴鍧涓巄uffer涓鏍凤紝闇瑕佹繁鎷疯礉缁撴灉鍥 + // {zh} 鐢变簬img鐨勬暟鎹湴鍧涓巄uffer涓鏍凤紝闇瑕佹繁鎷疯礉缁撴灉鍥 {en} Since the data address of img is the same as that of buffer, deep copy of the result graph is required UIGraphicsBeginImageContext(img.size); [img drawInRect:CGRectMake(0, 0, img.size.width, img.size.height)]; UIImage *copiedImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); - //閲婃斁璐寸焊鍐呴儴buffer + // {zh} 閲婃斁璐寸焊鍐呴儴buffer {en} Release sticker internal buffer bef_effect_ai_release_captured_image(_handle, pImage); return copiedImage; } @@ -715,7 +781,6 @@ - (bef_ai_render_api_type)renderAPI { } return bef_ai_render_api_gles30; } -#endif - (BOOL)sethairColorByPart:(BEEffectPart)partIndex r:(CGFloat)r g:(CGFloat)g b:(CGFloat)b a:(CGFloat)a { NSDictionary *param = [[NSDictionary alloc] initWithObjectsAndKeys: @@ -725,19 +790,19 @@ - (BOOL)sethairColorByPart:(BEEffectPart)partIndex r:(CGFloat)r g:(CGFloat)g b:( [NSString stringWithFormat:@"%.3f",a],@"a", nil]; NSData *jsonData = [NSJSONSerialization dataWithJSONObject:param options:NSJSONWritingPrettyPrinted error:nil]; NSString *jsonString = [[NSString alloc] initWithData:jsonData encoding:NSUTF8StringEncoding]; -#if __has_include("bef_effect_ai_api.h") return [self sendMsg:BEEffectHairColor arg1:0 arg2:partIndex arg3:[jsonString UTF8String]]; -#else - return NO; -#endif } - (BOOL)sendCaptureMessage { -#if __has_include("bef_effect_ai_api.h") return [self sendMsg:BEEffectTakingPictures arg1:1 arg2:0 arg3:0]; -#else - return NO; -#endif } +// {zh} / @brief 寮鍚垨鍏抽棴寮哄埗浜鸿劯妫娴 {en} /@brief Enable or disable forced face detection +// {zh} /detection YES 寮鍚汉鑴告娴 NO鍏抽棴浜鸿劯妫娴 {en} /detection YES on face detection NO off face detection +- (void)forcedFaceDetection:(BOOL)detection +{ + bef_effect_result_t ret = bef_effect_ai_set_algorithm_force_detect(_handle,detection); + CHECK_RET_AND_RETURN_RESULT(bef_effect_ai_set_algorithm_force_detect, ret, ;) +} +#endif @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.h deleted file mode 100644 index a9a2d3cee..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.h +++ /dev/null @@ -1,165 +0,0 @@ -// Copyright (C) 2018 Beijing Bytedance Network Technology Co., Ltd. -#import -#import "BEResourceHelper.h" -#import "BEEffectManager.h" - -@class BEFrameProcessor; - -/// result type of process -/// can be composite -typedef NS_ENUM(NSInteger, BEProcessorResult) { - BETexture = 1 << 0, - BERawData = 1 << 1, - BECVPixelBuffer = 1 << 2, - BEImage = 1 << 3 -}; - -/// represent buffer result -@interface BEProcessResultBuffer : NSObject -/// raw data pointer -@property (nonatomic, assign) unsigned char *buffer; -/// with of buffer -@property (nonatomic, assign) int width; -/// height of buffer -@property (nonatomic, assign) int height; -/// bytes per row of buffer -@property (nonatomic, assign) int bytesPerRow; -/// format of buffer -@property (nonatomic, assign) BEFormatType format; - -@end - -/// output of (BEProcessResult *)process:(CVPixelBufferRef)pixelBuffer timeStamp:(double)timeStamp and (BEProcessResult *)process:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow timeStamp:(double)timeStamp -@interface BEProcessResult : NSObject -/// always set -@property (nonatomic, assign) GLuint texture; -/// avaliable when set BERawData/BECVPixelBuffer/BEImage to BEFrameProcessor's processorResult -@property (nonatomic, strong) BEProcessResultBuffer *buffer; -/// available when set BECVPixelBuffer to BEFrameProcessor's processorResult -@property (nonatomic, assign) CVPixelBufferRef pixelBuffer; -/// available when set BEImage to BEFrameProcessor's processResult -@property (nonatomic, assign) UIImage *image; -/// size of result -@property (nonatomic, assign) CGSize size; -@end - - -/// capture image delegate, will be invoked when set BEFrameProcessor's captureNextFrame YES -@protocol BECaptureDelegate - -- (void)onImageCapture:(UIImage *)image; - -@end - -@interface BEFrameProcessor : NSObject - -/// bind texture and CVPixelBuffer, accelerate pixel reading -@property (nonatomic, assign) BOOL pixelBufferAccelerate; - -/// dispath algorithm and effect render to different thread -@property (nonatomic, assign) BOOL usePipeline; - -/// process result type, buffer/CVPixelBuffer -@property (nonatomic, assign) NSInteger processorResult; - -/// process result format, if not set, will be the same to inputFormat -/// such as for processorResult BECVPixelBuffer and BERawData, BE_RGBA BE_BGRA BE_YUV420P BEYUV420V are available -/// for BEImage and BETexture, no available -@property (nonatomic, assign) BEFormatType outputFormat; - -/// get composer Mode, 0/1 -@property (nonatomic, readonly) int composerMode; - -/// capture next frame when set YES -@property (nonatomic, assign) BOOL captureNextFrame; - -/// capture frame delegate -@property (nonatomic, weak) id captureDelegate; - -/// init function -/// @param context gl context -/// @param delegate resource delegate, nullable -- (instancetype)initWithContext:(EAGLContext *)context resourceDelegate:(id)delegate; - -/// process CVPixelBuffer -/// @param pixelBuffer original pixelBuffer -/// @param timeStamp current time -- (BEProcessResult *)process:(CVPixelBufferRef)pixelBuffer timeStamp:(double)timeStamp; - -/// process buffer -/// @param buffer original buffer -/// @param width with of buffer -/// @param height height of buffer -/// @param bytesPerRow bytesPerRow of buffer -/// @param timeStamp current time -/// @param format pixel format, see BEFormatType -- (BEProcessResult *)process:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow timeStamp:(double)timeStamp format:(BEFormatType)format; - -/// process texture -/// @param texture original texture -/// @param width width of texture -/// @param height height of texture -/// @param timeStamp current time -- (BEProcessResult *)process:(GLuint)texture width:(int)width height:(int)height timeStamp:(double)timeStamp; - -/// set filter path -/// @param path relative path -- (void)setFilterPath:(NSString *)path; - -/// set filter intensity -/// @param intensity 0-1 -- (void)setFilterIntensity:(float)intensity; - -/// set sticker path -/// @param path relative path -- (void)setStickerPath:(NSString *)path; - -/// set composer mode -/// @param mode 0: exclusive between composer and sticker, 1: not exclusive between composer and sticker -- (void)setComposerMode:(int)mode; - -/// update composer nodes -/// @param nodes relative path of nodes -- (void)updateComposerNodes:(NSArray *)nodes; - -/// update composer node intensity -/// @param node relative path of node -/// @param key key of feature, such as smooth,white... -/// @param intensity 0-1 -- (void)updateComposerNodeIntensity:(NSString *)node key:(NSString *)key intensity:(CGFloat)intensity; - -/// set if effect is on -/// @param on YES: do render NO: not do render, just return origin texture/buffer/CVPixelBuffer -- (void)setEffectOn:(BOOL)on; - -/// get available features in sdk -- (NSArray *)availableFeatures; - -/// get sdk version -- (NSString *)sdkVersion; - -/// set camera position -/// @param isFront YES: texture/buffer/CVPxielBuffer is from front camera -- (BOOL)setCameraPosition:(BOOL)isFront; - -/// set image mode -/// @param imageMode YES for image process when reuse texture -- (BOOL)setImageMode:(BOOL)imageMode; - -/// process touch event -/// @param x x -/// @param y y -- (BOOL)processTouchEvent:(float)x y:(float)y; - -#if __has_include("bef_effect_ai_api.h") -/// get face detect result -- (bef_ai_face_info *)getFaceInfo; - -/// get hand detect result -- (bef_ai_hand_info *)getHandInfo; - -/// get skeleton detect result -- (bef_ai_skeleton_result *)getSkeletonInfo; -#endif - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.mm b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.mm deleted file mode 100644 index 24320bf41..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEFrameProcessor.mm +++ /dev/null @@ -1,499 +0,0 @@ -// Copyright (C) 2018 Beijing Bytedance Network Technology Co., Ltd. -#import "BEFrameProcessor.h" - -#import -#if __has_include("bef_effect_ai_api.h") -#import "RenderMsgDelegate.h" -#endif - -#import "BERender.h" -#import "BEEffectManager.h" -#import "BEResourceHelper.h" -#import "BEEffectResourceHelper.h" - - -@implementation BEProcessResultBuffer -@end - -@implementation BEProcessResult -@end - -#if __has_include("bef_effect_ai_api.h") -@interface BEFrameProcessor() { - - EAGLContext *_glContext; - - BOOL _effectOn; - BEEffectManager *_effectManager; - BERender *_render; - BEResourceHelper *_resourceHelper; - IRenderMsgDelegateManager *_manager; - BEFormatType _inputFormat; - - BOOL _shouldResetComposer; -#if __has_include("bef_effect_ai_api.h") - bef_ai_face_info *_faceInfo; - bef_ai_hand_info *_handInfo; - bef_ai_skeleton_result *_skeletonInfo; -#endif -} - -@end -#endif - -@implementation BEFrameProcessor - -/** - * license鏈夋晥鏃堕棿2019-03-01鍒2019-04-30 - * license鍙槸涓轰簡杩借釜浣跨敤鎯呭喌锛屽彲浠ラ殢鏃剁敵璇锋棤浠讳綍闄愬埗license - */ - -- (instancetype)initWithContext:(EAGLContext *)context resourceDelegate:(id)delegate { - self = [super init]; -#if __has_include("bef_effect_ai_api.h") - if (self) { - _glContext = context; - [EAGLContext setCurrentContext:context]; - - _effectOn = YES; - _shouldResetComposer = YES; - _pixelBufferAccelerate = YES; - _processorResult = BECVPixelBuffer; - _faceInfo = NULL; - _handInfo = NULL; - _skeletonInfo = NULL; - BEEffectResourceHelper *resourceHelper = [BEEffectResourceHelper new]; - _effectManager = [[BEEffectManager alloc] initWithResourceProvider:resourceHelper licenseProvider:[BELicenseHelper shareInstance]]; - int ret = [_effectManager initTask]; - NSLog(@"ret == %d", ret); - if (ret == BEF_RESULT_SUC) { - [self setEffectOn:true]; - } - _render = [[BERender alloc] init]; - _resourceHelper = [[BEResourceHelper alloc] init]; - _resourceHelper.delegate = delegate; - self.usePipeline = YES; - } -#endif - return self; -} - -- (void)dealloc { - NSLog(@"BEFrameProcessor dealloc %@", NSStringFromSelector(_cmd)); -#if __has_include("bef_effect_ai_api.h") - free(_faceInfo); - free(_handInfo); - free(_skeletonInfo); - [EAGLContext setCurrentContext:_glContext]; -#endif - [self be_releaseSDK]; -} - -/* - * 甯у鐞嗘祦绋 - */ -- (BEProcessResult *)process:(CVPixelBufferRef)pixelBuffer timeStamp:(double)timeStamp{ - CVPixelBufferLockBaseAddress(pixelBuffer, 0); -#if __has_include("bef_effect_ai_api.h") - BEPixelBufferInfo *info = [_render getCVPixelBufferInfo:pixelBuffer]; - if (info.format == BE_UNKNOW) { - NSLog(@"unknow pixelBuffer format, use format show in BEFormatType..."); - return nil; - } - _inputFormat = info.format; -#endif -#if __has_include("bef_effect_ai_api.h") - // 璁剧疆 OpenGL 鐜 , 闇瑕佷笌鍒濆鍖 SDK 鏃朵竴鑷 - if ([EAGLContext currentContext] != _glContext) { - [EAGLContext setCurrentContext:_glContext]; - } -#endif - - BEProcessResult *result; - if (_pixelBufferAccelerate) { -#if __has_include("bef_effect_ai_api.h") - GLuint inputTexture = [_render transforCVPixelBufferToTexture:pixelBuffer]; - [_render initOutputTextureAndCVPixelBufferWithWidth:info.width height:info.height format:info.format]; - result = [self process:inputTexture width:info.width height:info.height timeStamp:timeStamp fromPixelBuffer:YES]; -#endif - } else { -#if __has_include("bef_effect_ai_api.h") - int bytesPerRow = info.width * 4; - unsigned char *baseAddress = [_render transforCVPixelBufferToBuffer:pixelBuffer outputFormat:info.format]; - if (baseAddress == nil) { - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - return nil; - } - result = [self process:baseAddress width:info.width height:info.height bytesPerRow:bytesPerRow timeStamp:timeStamp format:info.format fromPixelBuffer:YES]; -#endif - } - - if ((_processorResult & BECVPixelBuffer)) { - if (_pixelBufferAccelerate) { -#if __has_include("bef_effect_ai_api.h") - result.pixelBuffer = [_render getOutputPixelBuffer]; -#endif - } else { - BEProcessResultBuffer *buffer = result.buffer; - if (buffer) { -#if __has_include("bef_effect_ai_api.h") - result.pixelBuffer = [_render transforBufferToCVPixelBuffer:buffer.buffer pixelBuffer:pixelBuffer width:buffer.width height:buffer.height bytesPerRow:buffer.bytesPerRow inputFormat:buffer.format outputFormat:[self be_outputFormat]]; -#endif - } - } - } - - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - return result; -} - -- (BEProcessResult *)process:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow timeStamp:(double)timeStamp format:(BEFormatType)format { - // 璁剧疆 OpenGL 鐜 , 闇瑕佷笌鍒濆鍖 SDK 鏃朵竴鑷 -#if __has_include("bef_effect_ai_api.h") - if ([EAGLContext currentContext] != _glContext) { - [EAGLContext setCurrentContext:_glContext]; - } - _inputFormat = format; -#endif - return [self process:buffer width:width height:height bytesPerRow:bytesPerRow timeStamp:timeStamp format:format fromPixelBuffer:NO]; -} - -- (BEProcessResult *)process:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow timeStamp:(double)timeStamp format:(BEFormatType)format fromPixelBuffer:(BOOL)fromPixelBuffer { -#if __has_include("bef_effect_ai_api.h") - // transfor buffer to texture - GLuint inputTexture = [_render transforBufferToTexture:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:format]; - - return [self process:inputTexture width:width height:height timeStamp:timeStamp fromPixelBuffer:fromPixelBuffer]; -#else - return nil; -#endif -} - -- (BEProcessResult *)process:(GLuint)texture width:(int)width height:(int)height timeStamp:(double)timeStamp { - // 璁剧疆 OpenGL 鐜 , 闇瑕佷笌鍒濆鍖 SDK 鏃朵竴鑷 -#if __has_include("bef_effect_ai_api.h") - if ([EAGLContext currentContext] != _glContext) { - [EAGLContext setCurrentContext:_glContext]; - } - - _inputFormat = BE_RGBA; - return [self process:texture width:width height:height timeStamp:timeStamp fromPixelBuffer:NO]; -#else - return nil; -#endif -} - -- (BEProcessResult *)process:(GLuint)texture width:(int)width height:(int)height timeStamp:(double)timeStamp fromPixelBuffer:(BOOL)fromPixelBuffer { - //璁剧疆鍚庣画缇庨浠ュ強鍏朵粬璇嗗埆鍔熻兘鐨勫熀鏈弬鏁 - // [_effectManager setWidth:width height:height orientation:[self getDeviceOrientation]]; -#if __has_include("bef_effect_ai_api.h") - GLuint textureResult; - if (_effectOn) { - GLuint outputTexutre = [_render getOutputTexture:width height:height]; - textureResult = [_effectManager processTexture:texture outputTexture:outputTexutre width:width height:height rotate:BEF_AI_CLOCKWISE_ROTATE_0 timeStamp:timeStamp]; - } else { - textureResult = texture; - } - - // transfor texture to buffer/CVPxielbuffer/UIImage with format be_outputFormat - BEProcessResult *result = [self be_transforTextureToResult:textureResult width:width height:height fromPixelBuffer:fromPixelBuffer]; - - // check and capture current frame, for taking photo - [self be_checkAndCaptureFrame:result]; - - return result; -#else - return nil; -#endif -} - -/* - * 璁剧疆婊ら暅寮哄害 - */ --(void)setFilterIntensity:(float)intensity{ -#if __has_include("bef_effect_ai_api.h") - [_effectManager setFilterIntensity:intensity]; -#endif -} - -/* - * 璁剧疆璐寸焊璧勬簮 - */ -- (void)setStickerPath:(NSString *)path { -#if __has_include("bef_effect_ai_api.h") - if (path != nil && ![path isEqualToString:@""]) { - _shouldResetComposer = true; - path = [_resourceHelper stickerPath:path]; - } - [_effectManager setStickerPath:path]; -#endif -} - -- (void)setComposerMode:(int)mode { - _composerMode = mode; - // [_effectManager setComposerMode:mode]; -} - -- (void)updateComposerNodes:(NSArray *)nodes { - [self be_checkAndSetComposer]; - -#if __has_include("bef_effect_ai_api.h") - NSMutableArray *paths = [NSMutableArray arrayWithCapacity:nodes.count]; - for (int i = 0; i < nodes.count; i++) { - NSString *path = [_resourceHelper composerNodePath:nodes[i]]; - if (path) { - [paths addObject:path]; - } - } - [_effectManager updateComposerNodes:paths]; -#endif -} - -- (void)updateComposerNodeIntensity:(NSString *)node key:(NSString *)key intensity:(CGFloat)intensity { -#if __has_include("bef_effect_ai_api.h") - [_effectManager updateComposerNodeIntensity:[_resourceHelper composerNodePath:node] key:key intensity:intensity]; -#endif -} - -/* - * 璁剧疆婊ら暅璧勬簮璺緞鍜岀郴鏁 - */ -- (void)setFilterPath:(NSString *)path { -#if __has_include("bef_effect_ai_api.h") - if (path != nil && ![path isEqualToString:@""]) { - path = [_resourceHelper filterPath:path]; - } - [_effectManager setFilterPath:path]; -#endif -} - -- (void)setEffectOn:(BOOL)on -{ -#if __has_include("bef_effect_ai_api.h") - _effectOn = on; -#endif -} - -- (NSArray *)availableFeatures { -#if __has_include("bef_effect_ai_api.h") - return [_effectManager availableFeatures]; -#else - return nil; -#endif -} - -- (NSString *)sdkVersion { -#if __has_include("bef_effect_ai_api.h") - return [_effectManager sdkVersion]; -#else - return nil; -#endif -} - -- (BOOL)setCameraPosition:(BOOL)isFront { -#if __has_include("bef_effect_ai_api.h") - [_effectManager setFrontCamera:isFront]; -#endif - return YES; -} - -- (BOOL)setImageMode:(BOOL)imageMode { - // return [_effectManager setImageMode:imageMode]; - return YES; -} - -- (BOOL)processTouchEvent:(float)x y:(float)y { - // return [_effectManager processTouchEvent:x y:y]; - return YES; -} - -#if __has_include("bef_effect_ai_api.h") -- (bef_ai_face_info *)getFaceInfo { - return [_effectManager getFaceInfo]; -} - -- (bef_ai_hand_info *)getHandInfo { - return [_effectManager getHandInfo]; -} - -- (bef_ai_skeleton_result *)getSkeletonInfo { - return [_effectManager getSkeletonInfo]; -} -#endif - -#pragma mark - RenderMsgDelegate -- (BOOL)msgProc:(unsigned int)unMsgID arg1:(int)nArg1 arg2:(int)nArg2 arg3:(const char *)cArg3 { -#ifdef DEBUG_LOG - NSLog(@"msg proc: %d, arg: %d in processor: %lu", unMsgID, nArg1, self.hash); -#endif - return NO; -} - -#pragma mark - setter -- (void)setUsePipeline:(BOOL)usePipeline { - _usePipeline = usePipeline; -#if __has_include("bef_effect_ai_api.h") - if (_effectManager != nil) { - [_effectManager setUsePipeline:usePipeline]; - } - if (_render != nil) { - _render.useCacheTexture = usePipeline; - } -#endif -} - -#pragma mark - private - -- (void)be_releaseSDK { - // 瑕佸湪opengl涓婁笅鏂囦腑璋冪敤 -#if __has_include("bef_effect_ai_api.h") - [_effectManager destroyTask]; -#endif -} - -- (void)be_checkAndSetComposer { - if ([self be_shouldResetComposer]) { - // [_effectManager initEffectCompose:[_resourceHelper composerPath]]; -#if __has_include("bef_effect_ai_api.h") - _shouldResetComposer = false; -#endif - } -} - -- (BOOL)be_shouldResetComposer { -#if __has_include("bef_effect_ai_api.h") - return _shouldResetComposer && _composerMode == 0; -#else - return NO; -#endif -} - -- (BEFormatType)be_outputFormat { - if (_outputFormat) { - return _outputFormat; - } -#if __has_include("bef_effect_ai_api.h") - return _inputFormat; -#else - return _outputFormat; -#endif -} - -- (BEProcessResult *)be_transforTextureToResult:(GLuint)texture width:(int)width height:(int)height fromPixelBuffer:(BOOL)fromPixelBuffer { - BEProcessResult *result = [BEProcessResult new]; - result.texture = texture; - result.size = CGSizeMake(width, height); - - BEProcessResultBuffer *buffer; - if (_processorResult & (BERawData | (BECVPixelBuffer & !_pixelBufferAccelerate) | BEImage)) { - buffer = [BEProcessResultBuffer new]; - buffer.format = [self be_outputFormat]; - buffer.width = width; - buffer.height = height; - int bytesPerRow = 0; -#if __has_include("bef_effect_ai_api.h") - buffer.buffer = [_render transforTextureToBuffer:texture width:width height:height outputFormat:[self be_outputFormat] bytesPerRowPointer:&bytesPerRow]; -#endif - buffer.bytesPerRow = bytesPerRow; - result.buffer = buffer; - } - if (!fromPixelBuffer && (_processorResult & BECVPixelBuffer)) { - if (buffer) { -#if __has_include("bef_effect_ai_api.h") - result.pixelBuffer = [_render transforBufferToCVPixelBuffer:buffer.buffer width:buffer.width height:buffer.height bytesPerRow:buffer.bytesPerRow inputFormat:buffer.format outputFormat:[self be_outputFormat]]; -#endif - } else { - NSLog(@"getCVPixelBuffer error: no buffer"); - } - } - if ((_processorResult & BEImage)) { - if (buffer) { -#if __has_include("bef_effect_ai_api.h") - result.image = [_render transforBufferToUIImage:buffer.buffer - width:buffer.width - height:buffer.height - bytesPerRow:buffer.bytesPerRow - inputFormat:buffer.format]; -#endif - } else { - NSLog(@"getImage error: no buffer"); - } - } - return result; -} - -- (void)be_checkAndCaptureFrame:(BEProcessResult *)result { - if (_captureNextFrame) { - - int width = result.size.width; - int height = result.size.height; - UIImage *image; - if (result.image) { - image = result.image; - } else if (result.buffer) { - - BEProcessResultBuffer *buffer = result.buffer; -#if __has_include("bef_effect_ai_api.h") - image = [_render transforBufferToUIImage:buffer.buffer - width:buffer.width - height:buffer.height - bytesPerRow:buffer.bytesPerRow - inputFormat:buffer.format]; -#endif - } else { - - int bytesPerRow; - BEFormatType format = BE_RGBA; -#if __has_include("bef_effect_ai_api.h") - unsigned char *buffer = [_render transforTextureToBuffer:result.texture - width:width - height:height - outputFormat:format - bytesPerRowPointer:&bytesPerRow]; - image = [_render transforBufferToUIImage:buffer - width:width - height:height - bytesPerRow:bytesPerRow - inputFormat:format]; -#endif - } - if (self.captureDelegate) { - if (image) { - [self.captureDelegate onImageCapture:image]; - } else { - NSLog(@"captureNextFrame error: no image"); - } - } - _captureNextFrame = NO; - } -} - -/* - * 鑾峰彇璁惧鏃嬭浆瑙掑害 - */ -- (int)getDeviceOrientation { -#if __has_include("bef_effect_ai_api.h") - UIDeviceOrientation orientation = [[UIDevice currentDevice] orientation]; - switch (orientation) { - case UIDeviceOrientationPortrait: - return BEF_AI_CLOCKWISE_ROTATE_0; - - case UIDeviceOrientationPortraitUpsideDown: - return BEF_AI_CLOCKWISE_ROTATE_180; - - case UIDeviceOrientationLandscapeLeft: - return BEF_AI_CLOCKWISE_ROTATE_270; - - case UIDeviceOrientationLandscapeRight: - return BEF_AI_CLOCKWISE_ROTATE_90; - - default: - return BEF_AI_CLOCKWISE_ROTATE_0; - } -#else - return -1; -#endif -} - -@end - diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.h index 16d309314..bd570a7a0 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.h @@ -1,116 +1,127 @@ -// // BEGLTexture.h -// BytedEffects -// -// Created by qun on 2021/1/19. -// Copyright 漏 2021 ailab. All rights reserved. -// +// EffectsARSDK + #ifndef BEGLTexture_h #define BEGLTexture_h #import #import +#import typedef NS_ENUM(NSInteger, BEGLTextureType) { - // {zh} / 閫氳繃 glGenTextures 鍒涘缓鐨勭汗鐞 {en} /Textures created by glGenTextures + // {zh} / 閫氳繃 glGenTextures 鍒涘缓鐨勭汗鐞 {en} /Textures created by glGenTextures BE_NORMAL_TEXTURE, - // {zh} / 涓 CVPixelBuffer 缁戝畾鐨勭汗鐞 {en} /Textures bound to CVPixelBuffer + // {zh} / 涓 CVPixelBuffer 缁戝畾鐨勭汗鐞 {en} /Textures bound to CVPixelBuffer BE_PIXEL_BUFFER_TEXTURE }; -// {zh} / OpenGL 绾圭悊鐨勫皝瑁咃紝瀹冨彲浠ユ槸鐩存帴閫氳繃 glGenTextures 鍒涘缓鐨勭汗鐞嗭紝 {en} /OpenGL texture encapsulation, it can be a texture created directly through glGenTextures, -// {zh} / 涔熷彲浠ユ槸閫氳繃 CVPixelBufferRef 鍒涘缓骞朵笌涔嬬粦瀹氱殑绾圭悊锛 {en} /It can also be a texture created and bound with CVPixelBufferRef, -// {zh} / 褰撲娇鐢 CVPixelBufferRef 鍒涘缓鏃讹紝浠呮敮鎸 kCVPixelFormatType_32BGRA 鏍煎紡鐨 CVPixelBufferRef {en} /When created with CVPixelBufferRef, only CVPixelBufferRef in kCVPixelFormatType_32BGRA format is supported +// {zh} / OpenGL 绾圭悊鐨勫皝瑁咃紝瀹冨彲浠ユ槸鐩存帴閫氳繃 glGenTextures 鍒涘缓鐨勭汗鐞嗭紝 {en} /OpenGL texture encapsulation, it can be a texture created directly through glGenTextures, +// {zh} / 涔熷彲浠ユ槸閫氳繃 CVPixelBufferRef 鍒涘缓骞朵笌涔嬬粦瀹氱殑绾圭悊锛 {en} /It can also be a texture created and bound with CVPixelBufferRef, +// {zh} / 褰撲娇鐢 CVPixelBufferRef 鍒涘缓鏃讹紝浠呮敮鎸 kCVPixelFormatType_32BGRA 鏍煎紡鐨 CVPixelBufferRef {en} /When created with CVPixelBufferRef, only CVPixelBufferRef in kCVPixelFormatType_32BGRA format is supported @protocol BEGLTexture -// {zh} / 绾圭悊 ID {en} /Texture ID +// {zh} / 绾圭悊 ID {en} /Texture ID @property (nonatomic) GLuint texture; -// {zh} / 绾圭悊绫诲瀷 {en} /Texture type +// uv绾圭悊ID锛屽湪缁戝畾鐨刾ixelbuffer鏄痽uv鏍煎紡鏃惰绾圭悊鍙锋湁鏁 +@property (nonatomic) GLuint uvTexture; + +// {zh} / 绾圭悊绫诲瀷 {en} /Texture type @property (nonatomic) BEGLTextureType type; -// {zh} / 鏄惁鏈夋晥 {en} /Is it effective +// {zh} / 鏄惁鏈夋晥 {en} /Is it effective @property (nonatomic) BOOL available; -// {zh} / 瀹 {en} /Width +// {zh} / 瀹 {en} /Width @property (nonatomic, readonly) int width; -// {zh} / 楂 {en} /High +// {zh} / 楂 {en} /High @property (nonatomic, readonly) int height; -// {zh} / @brief 鍒濆鍖 {en} /@brief initialization -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鍒濆鍖 {en} /@brief initialization +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height - (instancetype)initWithWidth:(int)width height:(int)height; -// {zh} / @brief 鏇存柊瀹介珮 {en} /@Brief update width and height -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鏇存柊瀹介珮 {en} /@Brief update width and height +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height - (void)updateWidth:(int)width height:(int)height; -// {zh} / @brief 閿姣佺汗鐞 {en} /@Briefly destroy texture +// {zh} / @brief 閿姣佺汗鐞 {en} /@Briefly destroy texture - (void)destroy; @end -// {zh} / 鏅 gl 绾圭悊鐨勫皝瑁 {en} /Ordinary gl texture encapsulation +// {zh} / 鏅 gl 绾圭悊鐨勫皝瑁 {en} /Ordinary gl texture encapsulation @interface BENormalGLTexture : NSObject -// {zh} / @brief 鏍规嵁绾圭悊鍙枫佸銆侀珮鍒濆鍖 {en} /@Brief initializes according to texture number, width, and height -// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鏍规嵁绾圭悊鍙枫佸銆侀珮鍒濆鍖 {en} /@Brief initializes according to texture number, width, and height +// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height - (instancetype)initWithTexture:(GLuint)texture width:(int)width height:(int)height; -// {zh} / @brief 鏍规嵁 buffer 鍒濆鍖 {en} /@Brief initialization based on buffer +// {zh} / @brief 鏍规嵁 buffer 鍒濆鍖 {en} /@Brief initialization based on buffer /// @param buffer buffer -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height -// {zh} / @param format buffer 鏍煎紡锛孏L_RGBA/GL_BGRA {en} /@param format buffer format, GL_RGBA/GL_BGRA +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height +// {zh} / @param format buffer 鏍煎紡锛孏L_RGBA/GL_BGRA {en} /@param format buffer format, GL_RGBA/GL_BGRA - (instancetype)initWithBuffer:(unsigned char *)buffer width:(int)width height:(int)height format:(GLenum)format; -// {zh} / @brief 鏍规嵁 buffer 鏇存柊绾圭悊鍐呭 {en} /@BriefUpdate texture content according to buffer +// {zh} / @brief 鏍规嵁 buffer 鏇存柊绾圭悊鍐呭 {en} /@BriefUpdate texture content according to buffer /// @param buffer buffer -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height -// {zh} / @param format buffer 鏍煎紡锛孏L_RGBA/GL_BGRA {en} /@param format buffer format, GL_RGBA/GL_BGRA +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height +// {zh} / @param format buffer 鏍煎紡锛孏L_RGBA/GL_BGRA {en} /@param format buffer format, GL_RGBA/GL_BGRA - (void)update:(unsigned char *)buffer width:(int)width height:(int)height format:(GLenum)format; -// {zh} / @brief 鏍规嵁绾圭悊鍙枫佸銆侀珮鏇存柊绾圭悊 {en} Update texture according to texture number, width, and height -// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鏍规嵁绾圭悊鍙枫佸銆侀珮鏇存柊绾圭悊 {en} Update texture according to texture number, width, and height +// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height - (void)updateTexture:(GLuint)texture width:(int)width height:(int)height; @end -// {zh} / 鏍规嵁 CVPixelBuffer 鐢熸垚鐨 gl 绾圭悊灏佽 {en} /Gl texture package generated according to CVPixelBuffer -// {zh} / 鍐呴儴瀹屾垚浜 CVPixelBuffer 涓 gl 绾圭悊鐨勭粦瀹氾紝褰撳畬鎴愬绾圭悊鐨勫鐞嗕箣鍚庯紝 {en} /Internally completed the binding of CVPixelBuffer and gl texture. After the texture is processed, -// {zh} / 鐩存帴璋冪敤 pixelBuffer 灏卞彲浠ュ緱鍒板鐞嗕箣鍚庣殑 CVPixelBuffer {en} /Call pixelBuffer directly to get the processed CVPixelBuffer +// {zh} / 鏍规嵁 CVPixelBuffer 鐢熸垚鐨 gl 绾圭悊灏佽 {en} /Gl texture package generated according to CVPixelBuffer +// {zh} / 鍐呴儴瀹屾垚浜 CVPixelBuffer 涓 gl 绾圭悊鍙 mtl 绾圭悊鐨勭粦瀹氾紝褰撳畬鎴愬绾圭悊鐨勫鐞嗕箣鍚庯紝 {en} /Internally completed the binding of CVPixelBuffer to gl texture and mtl texture. After the texture is processed, +// {zh} / 鐩存帴璋冪敤 pixelBuffer 灏卞彲浠ュ緱鍒板鐞嗕箣鍚庣殑 CVPixelBuffer {en} /Call pixelBuffer directly to get the processed CVPixelBuffer @interface BEPixelBufferGLTexture : NSObject -// {zh} / @brief 鏍规嵁 CVOpenGLESTextureCacheRef 鍒濆鍖 {en} CVOpenGLESTextureCacheRef initialization +@property (nonatomic) id mtlTexture; + +// {zh} / @brief 鏍规嵁 CVOpenGLESTextureCacheRef 鍒濆鍖 {en} CVOpenGLESTextureCacheRef initialization /// @param textureCache cache - (instancetype)initWithTextureCache:(CVOpenGLESTextureCacheRef)textureCache; -// {zh} / @brief 鏍规嵁瀹姐侀珮銆丆VOpenGLESTextureCacheRef 鍒濆鍖 {en} CVOpenGLESTextureCacheRef initialization based on width, height -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鏍规嵁 CVMetalTextureCacheRef 鍒濆鍖 {en} CVMetalTextureCacheRef initialization +- (instancetype)initWithMTKTextureCache:(CVMetalTextureCacheRef)textureCache; + +// {zh} / @brief 鏍规嵁瀹姐侀珮銆丆VOpenGLESTextureCacheRef 鍒濆鍖 {en} CVOpenGLESTextureCacheRef initialization based on width, height +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height /// @param textureCache cache - (instancetype)initWithWidth:(int)width height:(int)height textureCache:(CVOpenGLESTextureCacheRef)textureCache; -// {zh} / @brief 鏍规嵁 CVPixelBuffer 鍒濆鍖 {en} /@Briefing initialization based on CVPixelBuffer +// {zh} / @brief 鏍规嵁瀹姐侀珮銆丆VMetalTextureCacheRef 鍒濆鍖 {en} CVMetalTextureCacheRef initialization based on width, height +- (instancetype)initWithWidth:(int)width height:(int)height mtlTextureCache:(CVMetalTextureCacheRef)textureCache; + +// {zh} / @brief 鏍规嵁 CVPixelBuffer 鍒濆鍖 {en} /@Briefing initialization based on CVPixelBuffer /// @param pixelBuffer CVPixelBuffer /// @param textureCache cache - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer textureCache:(CVOpenGLESTextureCacheRef)textureCache; -// {zh} / @brief 鏇存柊 CVPixelBuffer {en} /@brief update CVPixelBuffer +// {zh} / @brief 鏍规嵁 CVPixelBuffer 鍒濆鍖 {en} /@Briefing initialization based on CVPixelBuffer +- (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer mtlTextureCache:(CVMetalTextureCacheRef)textureCache; + +// {zh} / @brief 鏇存柊 CVPixelBuffer {en} /@brief update CVPixelBuffer /// @param pixelBuffer CVPixelBuffer - (void)update:(CVPixelBufferRef)pixelBuffer; -// {zh} / @brief 鑾峰彇涓庝箣缁戝畾鐨 CVPixelBuffer {en} /@BriefGet the CVPixelBuffer bound with it +// {zh} / @brief 鑾峰彇涓庝箣缁戝畾鐨 CVPixelBuffer {en} /@BriefGet the CVPixelBuffer bound with it - (CVPixelBufferRef)pixelBuffer; @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.m index 4dc0d24b4..da3db0db0 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLTexture.m @@ -1,14 +1,17 @@ -// // BEGLTexture.m -// BytedEffects -// -// Created by qun on 2021/1/19. -// Copyright 漏 2021 ailab. All rights reserved. -// +// EffectsARSDK + #import "BEGLTexture.h" #import +#define GL_TEXTURE_SETTING(texture) glBindTexture(GL_TEXTURE_2D, texture); \ + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); \ + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); \ + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); \ + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); \ + glBindTexture(GL_TEXTURE_2D, 0); + @implementation BENormalGLTexture { } @@ -100,14 +103,22 @@ - (void)destroy { @end @implementation BEPixelBufferGLTexture { - CVOpenGLESTextureRef _cvTexture; CVPixelBufferRef _pixelBuffer; BOOL _needReleasePixelBuffer; + + CVOpenGLESTextureRef _cvTexture; + CVOpenGLESTextureRef _yuvTexture; CVOpenGLESTextureCacheRef _textureCache; + + CVMetalTextureRef _cvMTLTexture; + CVMetalTextureCacheRef _mtlTextureCache; + BOOL _needReleaseTextureCache; + BOOL _needReleaseMTLTextureCache; } @synthesize texture = _texture; +@synthesize uvTexture = _uvTexture; @synthesize type = _type; @synthesize available = _available; @synthesize width = _width; @@ -132,6 +143,16 @@ - (instancetype)initWithTextureCache:(CVOpenGLESTextureCacheRef)textureCache { return self; } +- (instancetype)initWithMTKTextureCache:(CVMetalTextureCacheRef)textureCache { + self = [super init]; + if (self) { + _type = BE_PIXEL_BUFFER_TEXTURE; + _mtlTextureCache = textureCache; + _needReleaseMTLTextureCache = NO; + } + return self; +} + - (instancetype)initWithWidth:(int)width height:(int)height { if (self = [super init]) { _type = BE_PIXEL_BUFFER_TEXTURE; @@ -150,6 +171,16 @@ - (instancetype)initWithWidth:(int)width height:(int)height textureCache:(CVOpen return self; } +- (instancetype)initWithWidth:(int)width height:(int)height mtlTextureCache:(CVMetalTextureCacheRef)textureCache { + if (self = [super init]) { + _mtlTextureCache = textureCache; + _needReleaseMTLTextureCache = NO; + _type = BE_PIXEL_BUFFER_TEXTURE; + [self update:[self createPxielBuffer:width height:height]]; + } + return self; +} + - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer textureCache:(CVOpenGLESTextureCacheRef)textureCache { if (self = [super init]) { _textureCache = textureCache; @@ -160,18 +191,43 @@ - (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer textureCache return self; } +- (instancetype)initWithCVPixelBuffer:(CVPixelBufferRef)pixelBuffer mtlTextureCache:(CVMetalTextureCacheRef)textureCache { + if (self = [super init]) { + _mtlTextureCache = textureCache; + _needReleaseMTLTextureCache = NO; + _type = BE_PIXEL_BUFFER_TEXTURE; + [self update:pixelBuffer]; + } + return self; +} + - (CVPixelBufferRef)createPxielBuffer:(int)width height:(int)height { CVPixelBufferRef pixelBuffer; - const void *keys[] = { - kCVPixelBufferOpenGLCompatibilityKey, - kCVPixelBufferIOSurfacePropertiesKey - }; - const void *values[] = { - (__bridge const void *)([NSNumber numberWithBool:YES]), - (__bridge const void *)([NSDictionary dictionary]) - }; - - CFDictionaryRef optionsDicitionary = CFDictionaryCreate(kCFAllocatorDefault, keys, values, 2, NULL, NULL); + CFDictionaryRef optionsDicitionary = nil; + // judge whether the device support metal + if (MTLCreateSystemDefaultDevice()) { + const void *keys[] = { + kCVPixelBufferOpenGLCompatibilityKey, + kCVPixelBufferMetalCompatibilityKey, + kCVPixelBufferIOSurfacePropertiesKey + }; + const void *values[] = { + (__bridge const void *)([NSNumber numberWithBool:YES]), + (__bridge const void *)([NSNumber numberWithBool:YES]), + (__bridge const void *)([NSDictionary dictionary]) + }; + optionsDicitionary = CFDictionaryCreate(kCFAllocatorDefault, keys, values, 3, NULL, NULL); + } else { + const void *keys[] = { + kCVPixelBufferOpenGLCompatibilityKey, + kCVPixelBufferIOSurfacePropertiesKey + }; + const void *values[] = { + (__bridge const void *)([NSNumber numberWithBool:YES]), + (__bridge const void *)([NSDictionary dictionary]) + }; + optionsDicitionary = CFDictionaryCreate(kCFAllocatorDefault, keys, values, 3, NULL, NULL); + } CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, width, height, kCVPixelFormatType_32BGRA, optionsDicitionary, &pixelBuffer); CFRelease(optionsDicitionary); @@ -204,6 +260,8 @@ - (void)update:(CVPixelBufferRef)pixelBuffer { _available = NO; return; } + + // gl texture if (!_textureCache) { _needReleaseTextureCache = YES; EAGLContext *context = [EAGLContext currentContext]; @@ -220,6 +278,12 @@ - (void)update:(CVPixelBufferRef)pixelBuffer { _cvTexture = nil; } + if (_yuvTexture) { + CFRelease(_yuvTexture); + _yuvTexture = nil; + } + + OSType pbType = CVPixelBufferGetPixelFormatType(pixelBuffer); CVPixelBufferLockBaseAddress(pixelBuffer, 0); int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(pixelBuffer); int width = (int) CVPixelBufferGetWidth(pixelBuffer); @@ -230,23 +294,76 @@ - (void)update:(CVPixelBufferRef)pixelBuffer { width = width + (int) iLeft + (int) iRight; height = height + (int) iTop + (int) iBottom; bytesPerRow = bytesPerRow + (int) iLeft + (int) iRight; - CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, width, height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &_cvTexture); - if (ret != kCVReturnSuccess || !_cvTexture) { - NSLog(@"create CVOpenGLESTextureRef fail: %d", ret); - _available = NO; - return; + CVReturn ret = kCVReturnSuccess; + + if (pbType == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange || pbType == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) { + // yuv + size_t planeCount = CVPixelBufferGetPlaneCount(pixelBuffer); + assert(planeCount == 2); + + CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_LUMINANCE, width, height, GL_LUMINANCE, GL_UNSIGNED_BYTE, 0, &_cvTexture); + if (ret != kCVReturnSuccess || !_cvTexture) { + NSLog(@"create CVOpenGLESTextureRef fail: %d", ret); + _available = NO; + return; + } + + _width = width; + _height = height; + _pixelBuffer = pixelBuffer; + _texture = CVOpenGLESTextureGetName(_cvTexture); + GL_TEXTURE_SETTING(_texture); + + ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_LUMINANCE_ALPHA, width/2, height/2, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, 1, &_yuvTexture); + if (ret != kCVReturnSuccess || !_yuvTexture) { + NSLog(@"create CVOpenGLESTextureRef fail: %d", ret); + _available = NO; + return; + } + _uvTexture = CVOpenGLESTextureGetName(_yuvTexture); + GL_TEXTURE_SETTING(_uvTexture); + } else { + // bgra + ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, width, height, GL_BGRA, GL_UNSIGNED_BYTE, 0, &_cvTexture); + if (ret != kCVReturnSuccess || !_cvTexture) { + NSLog(@"create CVOpenGLESTextureRef fail: %d", ret); + _available = NO; + return; + } + + _width = width; + _height = height; + _pixelBuffer = pixelBuffer; + _texture = CVOpenGLESTextureGetName(_cvTexture); + GL_TEXTURE_SETTING(_texture); + } + + // metal texture + id device = MTLCreateSystemDefaultDevice(); + if (device) { + if(!_mtlTextureCache) { + _needReleaseMTLTextureCache = YES; + ret = CVMetalTextureCacheCreate(kCFAllocatorDefault, NULL, device, NULL, &_mtlTextureCache); + if (ret != kCVReturnSuccess) { + NSLog(@"create CVMetalTextureCacheRef fail: %d", ret); + _available = NO; + return; + } + } + + ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _mtlTextureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &_cvMTLTexture); + if (ret != kCVReturnSuccess || !_cvMTLTexture) { + NSLog(@"create CVMetalTextureRef fail: %d", ret); + _available = NO; + return; + } + _mtlTexture = CVMetalTextureGetTexture(_cvMTLTexture); + if (_cvMTLTexture) { + CFRelease(_cvMTLTexture); + _cvMTLTexture = nil; + } } - _width = width; - _height = height; - _pixelBuffer = pixelBuffer; - _texture = CVOpenGLESTextureGetName(_cvTexture); - glBindTexture(GL_TEXTURE_2D, _texture); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glBindTexture(GL_TEXTURE_2D, 0); _available = YES; } @@ -259,6 +376,10 @@ - (void)destroy { CFRelease(_cvTexture); _cvTexture = nil; } + if (_cvMTLTexture) { + CFRelease(_cvMTLTexture); + _cvMTLTexture = nil; + } if (_pixelBuffer && _needReleasePixelBuffer) { NSLog(@"release pixelBuffer %@", _pixelBuffer); _needReleasePixelBuffer = NO; @@ -271,6 +392,12 @@ - (void)destroy { CFRelease(_textureCache); _textureCache = nil; } + if (_mtlTextureCache && _needReleaseMTLTextureCache) { + NSLog(@"release CVMetalTextureCache %@", _mtlTextureCache); + CVMetalTextureCacheFlush(_mtlTextureCache, 0); + CFRelease(_mtlTextureCache); + _mtlTextureCache = nil; + } _available = NO; } diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.h new file mode 100644 index 000000000..f56563556 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.h @@ -0,0 +1,19 @@ +// BEGLUtils.h +// EffectsARSDK + + +#ifndef BEGLUtils_h +#define BEGLUtils_h + +#import + +@interface BEGLUtils : NSObject + ++ (EAGLContext *)createContextWithDefaultAPI:(EAGLRenderingAPI)api; + ++ (EAGLContext *)createContextWithDefaultAPI:(EAGLRenderingAPI)api sharegroup:(EAGLSharegroup *)sharegroup; + +@end + + +#endif /* BEGLUtils_h */ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.m new file mode 100644 index 000000000..40e9d6f48 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEGLUtils.m @@ -0,0 +1,45 @@ +// BEGLUtils.m +// EffectsARSDK + + +#import "BEGLUtils.h" + +@implementation BEGLUtils + ++ (EAGLContext *)createContextWithDefaultAPI:(EAGLRenderingAPI)api { + while (api != 0) { + EAGLContext *context = [[EAGLContext alloc] initWithAPI:api]; + if (context != nil) { + return context; + } + NSLog(@"not support api %lu, use lower api %lu", (unsigned long)api, [self be_lowerAPI:api]); + api = [self be_lowerAPI:api]; + } + return nil; +} + ++ (EAGLContext *)createContextWithDefaultAPI:(EAGLRenderingAPI)api sharegroup:(EAGLSharegroup *)sharegroup { + while (api != 0) { + EAGLContext *context = [[EAGLContext alloc] initWithAPI:api sharegroup:sharegroup]; + if (context != nil) { + return context; + } + NSLog(@"not support api %lu, use lower api %lu", (unsigned long)api, [self be_lowerAPI:api]); + api = [self be_lowerAPI:api]; + } + return nil; +} + ++ (EAGLRenderingAPI)be_lowerAPI:(EAGLRenderingAPI)api { + switch (api) { + case kEAGLRenderingAPIOpenGLES3: + return kEAGLRenderingAPIOpenGLES2; + case kEAGLRenderingAPIOpenGLES2: + return kEAGLRenderingAPIOpenGLES1; + case kEAGLRenderingAPIOpenGLES1: + return 0; + } + return 0; +} + +@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.h index 53dd39d60..579ebc3dd 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.h @@ -1,18 +1,18 @@ #ifndef BEHttpRequestProvider_h #define BEHttpRequestProvider_h -#if __has_include("bef_effect_ai_api.h") -#include "BytedLicenseDefine.h" +#if __has_include() +#include #endif -#if __has_include("bef_effect_ai_api.h") -class BEHttpRequestProvider: public HttpRequestProvider +#if __has_include() +class BEHttpRequestProvider: public EffectsSDK::HttpRequestProvider { public: - bool getRequest(const RequestInfo* requestInfo, ResponseInfo& responseInfo) override; + bool getRequest(const EffectsSDK::RequestInfo* requestInfo, EffectsSDK::ResponseInfo& responseInfo) override; - bool postRequest(const RequestInfo* requestInfo, ResponseInfo& responseInfo) override; + bool postRequest(const EffectsSDK::RequestInfo* requestInfo, EffectsSDK::ResponseInfo& responseInfo) override; }; #endif diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.mm b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.mm index d58975ecc..20ee60748 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.mm +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEHttpRequestProvider.mm @@ -1,14 +1,14 @@ #import "BEHttpRequestProvider.h" #import -#if __has_include("bef_effect_ai_api.h") -//post璇锋眰鏆傛椂涓嶉渶瑕佸疄鐜 -bool BEHttpRequestProvider::getRequest(const RequestInfo* requestInfo, ResponseInfo& responseInfo) +#if __has_include() +// {zh} post璇锋眰鏆傛椂涓嶉渶瑕佸疄鐜 {en} The post request does not need to be implemented for the time being +bool BEHttpRequestProvider::getRequest(const EffectsSDK::RequestInfo* requestInfo, EffectsSDK::ResponseInfo& responseInfo) { return false; } -bool BEHttpRequestProvider::postRequest(const RequestInfo* requestInfo, ResponseInfo& responseInfo) +bool BEHttpRequestProvider::postRequest(const EffectsSDK::RequestInfo* requestInfo, EffectsSDK::ResponseInfo& responseInfo) { NSString* nsUrl = [[NSString alloc] initWithCString:requestInfo->url.c_str() encoding:NSUTF8StringEncoding]; NSURL *URL = [NSURL URLWithString:nsUrl]; diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.h index 0d5dec9df..e96533272 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.h @@ -1,60 +1,101 @@ -// // BEImageUtils.h -// BytedEffects -// -// Created by qun on 2021/2/2. -// Copyright 漏 2021 ailab. All rights reserved. -// +// EffectsARSDK + #ifndef BEImageUtils_h #define BEImageUtils_h #import #import +#import #import #import "BEGLTexture.h" -#import "BEPixelBufferInfo.h" +// {zh} / 鏁版嵁鏍煎紡 {en} /Data format +typedef NS_ENUM(NSInteger, BEFormatType) { + // {zh} 鏈煡鏍煎紡 {en} Unknown format + BE_UNKNOW, + // 8bit R G B A + BE_RGBA, + // 8bit B G R A + BE_BGRA, + // video range, 8bit Y1 Y2 Y3 Y4... U1 V1... + BE_YUV420V, + // full range, 8bit Y1 Y2 Y3 Y4... U1 V1... + BE_YUV420F, + // 8bit Y1 Y2 Y3 Y4... U1... V1... + BE_YUVY420, + BE_RGB, + BE_BGR +}; + + +typedef NS_ENUM(NSInteger, BEFlipOrientation) { + BE_FlipHorizontal, + + BE_FlipVertical +}; + +@interface BEPixelBufferInfo : NSObject + +@property (nonatomic, assign) BEFormatType format; +@property (nonatomic, assign) int width; +@property (nonatomic, assign) int height; +@property (nonatomic, assign) int bytesPerRow; + +@end @interface BEBuffer : NSObject -// {zh} / buffer 鎸囬拡锛岀敤浜 RGBA 鏍煎紡 {en} /Buffer pointer for RGBA format +// {zh} / buffer 鎸囬拡锛岀敤浜 RGBA 鏍煎紡 {en} /Buffer pointer for RGBA format @property (nonatomic, assign) unsigned char *buffer; -// {zh} / y buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡 {en} /Y buffer pointer, only for YUV format +// {zh} / y buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡 {en} /Y buffer pointer, only for YUV format @property (nonatomic, assign) unsigned char *yBuffer; -// {zh} / uv buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡 {en} /UV buffer pointer, only for YUV format +// {zh} / uv buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡 {en} /UV buffer pointer, only for YUV format @property (nonatomic, assign) unsigned char *uvBuffer; -// {zh} / 瀹斤紝鐢ㄤ簬 RGBA 鏍煎紡 {en} /Wide for RGBA format +// {zh} / u buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡(y420) {en} /U buffer pointer, only for YUV format(y420) +@property (nonatomic, assign) unsigned char *uBuffer; + +// {zh} / v buffer 鎸囬拡锛屽彧鐢ㄤ簬 YUV 鏍煎紡(y420) {en} /v buffer pointer, only for YUV format(y420) +@property (nonatomic, assign) unsigned char *vBuffer; + +// {zh} / 瀹斤紝鐢ㄤ簬 RGBA 鏍煎紡 {en} /Wide for RGBA format @property (nonatomic, assign) int width; -// {zh} / 楂橈紝鐢ㄤ簬 RGBA 鏍煎紡 {en} /High, for RGBA format +// {zh} / 楂橈紝鐢ㄤ簬 RGBA 鏍煎紡 {en} /High, for RGBA format @property (nonatomic, assign) int height; -// {zh} / y buffer 瀹斤紝鐢ㄤ簬 YUV 鏍煎紡 {en} /Y buffer width for YUV format +// {zh} / y buffer 瀹斤紝鐢ㄤ簬 YUV 鏍煎紡 {en} /Y buffer width for YUV format @property (nonatomic, assign) int yWidth; -// {zh} / y buffer 楂橈紝鐢ㄤ簬 YUV 鏍煎紡 {en} High/y buffer for YUV format +// {zh} / y buffer 楂橈紝鐢ㄤ簬 YUV 鏍煎紡 {en} High/y buffer for YUV format @property (nonatomic, assign) int yHeight; -// {zh} / uv buffer 瀹斤紝鐢ㄤ簬 YUV 鏍煎紡 {en} Wide/uv buffer for YUV format +// {zh} / uv buffer 瀹斤紝鐢ㄤ簬 YUV 鏍煎紡 {en} Wide/uv buffer for YUV format @property (nonatomic, assign) int uvWidth; -// {zh} / uv buffer 楂橈紝鐢ㄤ簬 YUV 鏍煎紡 {en} High/uv buffer for YUV format +// {zh} / uv buffer 楂橈紝鐢ㄤ簬 YUV 鏍煎紡 {en} High/uv buffer for YUV format @property (nonatomic, assign) int uvHeight; -// {zh} / 琛屽锛岀敤浜 RGBA 鏍煎紡 {en} /Line width for RGBA format +// {zh} / 琛屽锛岀敤浜 RGBA 鏍煎紡 {en} /Line width for RGBA format @property (nonatomic, assign) int bytesPerRow; -// {zh} / y buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /Y buffer line width for YUV format +// {zh} / y buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /Y buffer line width for YUV format @property (nonatomic, assign) int yBytesPerRow; -// {zh} / uv buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /UV buffer line width for YUV format +// {zh} / uv buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /UV buffer line width for YUV format @property (nonatomic, assign) int uvBytesPerRow; -// {zh} / 鏍煎紡 {en} /Format +// {zh} / u buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /U buffer line width for YUV format +@property (nonatomic, assign) int uBytesPerRow; + +// {zh} / v buffer 琛屽锛岀敤浜 YUV 鏍煎紡 {en} /V buffer line width for YUV format +@property (nonatomic, assign) int vBytesPerRow; + +// {zh} / 鏍煎紡 {en} /Format @property (nonatomic, assign) BEFormatType format; @end @@ -63,124 +104,141 @@ #pragma mark - Init output texture and get -// {zh} / @brief 鍒濆鍖栦竴涓笌 CVPixelBufferRef 缁戝畾鐨勭汗鐞 {en} /@brief initializes a texture bound to CVPixelBufferRef -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height -// {zh} / @param format 鏍煎紡锛屼粎鏀寔 BE_BGRA/BE_YUV420F/BE_YUV420V {en} /@param format, only support BE_BGRA/BE_YUV420F/BE_YUV420V -- (BEPixelBufferGLTexture *)getOutputPixelBufferGLTextureWithWidth:(int)width height:(int)height format:(BEFormatType)format; +// {zh} / @brief 鍒濆鍖栦竴涓笌 CVPixelBufferRef 缁戝畾鐨勭汗鐞 {en} /@brief initializes a texture bound to CVPixelBufferRef +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height +// {zh} / @param format 鏍煎紡锛屼粎鏀寔 BE_BGRA/BE_YUV420F/BE_YUV420V {en} /@param format, only support BE_BGRA/BE_YUV420F/BE_YUV420V +- (BEPixelBufferGLTexture *)getOutputPixelBufferGLTextureWithWidth:(int)width height:(int)height format:(BEFormatType)format withPipeline:(BOOL)usepipeline; -// {zh} / @brief 寮鍚汗鐞嗙紦瀛 {en} /@brief open texture cache -// {zh} / @details 褰撳紑鍚箣鍚庯紝璋冪敤 getOutputPixelBufferGLTextureWithWidth:height:format: {en} /@details When turned on, call getOutputPixelBufferGLTextureWithWidth: height: format: -// {zh} / 鏃讹紝浼氬惊鐜緭鍑轰笁涓笉鍚岀殑绾圭悊锛屼繚璇佷换鎰忚繛缁殑 3 甯х汗鐞嗕笉浼氶噸澶嶏紝鐢ㄤ簬 SDK 鐨勫苟琛屾覆鏌 {en} /Hour, three different textures will be output in a loop to ensure that any consecutive 3 frames of textures will not be repeated, which is used for parallel rendering of SDK -// {zh} / @param useCache 鏄惁寮鍚汗鐞嗙紦瀛 {en} /@param useCache whether to open texture cache +// {zh} / @brief 寮鍚汗鐞嗙紦瀛 {en} /@brief open texture cache +// {zh} / @details 褰撳紑鍚箣鍚庯紝璋冪敤 getOutputPixelBufferGLTextureWithWidth:height:format: {en} /@details When turned on, call getOutputPixelBufferGLTextureWithWidth: height: format: +// {zh} / 鏃讹紝浼氬惊鐜緭鍑轰笁涓笉鍚岀殑绾圭悊锛屼繚璇佷换鎰忚繛缁殑 3 甯х汗鐞嗕笉浼氶噸澶嶏紝鐢ㄤ簬 SDK 鐨勫苟琛屾覆鏌 {en} /Hour, three different textures will be output in a loop to ensure that any consecutive 3 frames of textures will not be repeated, which is used for parallel rendering of SDK +// {zh} / @param useCache 鏄惁寮鍚汗鐞嗙紦瀛 {en} /@param useCache whether to open texture cache - (void)setUseCachedTexture:(BOOL)useCache; #pragma mark - CVPixelBuffer to others -// {zh} / @brief CVPixelBuffer 杞 BEBuffer {en} /@Briefing CVPixelBuffer to BEBuffer +// {zh} / @brief CVPixelBuffer 杞 BEBuffer {en} /@Briefing CVPixelBuffer to BEBuffer /// @param pixelBuffer CVPixelBuffer -// {zh} / @param outputFormat 杈撳嚭 BEBuffer 鏍煎紡 {en} /@param outputFormat output BEBuffer format +// {zh} / @param outputFormat 杈撳嚭 BEBuffer 鏍煎紡 {en} /@param outputFormat output BEBuffer format - (BEBuffer *)transforCVPixelBufferToBuffer:(CVPixelBufferRef)pixelBuffer outputFormat:(BEFormatType)outputFormat; -// {zh} / @brief CVPixelBuffer 杞 绾圭悊 {en} /@Briefing CVPixelBuffer, texture +// {zh} / @brief CVPixelBuffer 杞 绾圭悊 {en} /@Briefing CVPixelBuffer, texture /// @param pixelBuffer CVPixelBuffer - (BEPixelBufferGLTexture *)transforCVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer; -// {zh} / @brief CVPixelBuffer 杞 CVPixelBuffer {en} /@Briefing CVPixelBuffer to CVPixelBuffer -// {zh} / @param pixelBuffer 杈撳叆 CVPixelBuffer {en} /@param pixelBuffer Enter CVPixelBuffer -// {zh} / @param outputFormat 杈撳嚭 CVPixelBuffer 鏍煎紡 {en} /@param outputFormat output CVPixelBuffer format +// {zh} / @brief CVPixelBuffer 杞 metal绾圭悊 {en} /@Briefing CVPixelBuffer to metal texture +/// @param pixelBuffer CVPixelBuffer +- (id)transformCVPixelBufferToMTLTexture:(CVPixelBufferRef)pixelBuffer; + +// {zh} / @brief CVPixelBuffer 杞 CVPixelBuffer {en} /@Briefing CVPixelBuffer to CVPixelBuffer +// {zh} / @param pixelBuffer 杈撳叆 CVPixelBuffer {en} /@param pixelBuffer Enter CVPixelBuffer +// {zh} / @param outputFormat 杈撳嚭 CVPixelBuffer 鏍煎紡 {en} /@param outputFormat output CVPixelBuffer format - (CVPixelBufferRef)transforCVPixelBufferToCVPixelBuffer:(CVPixelBufferRef)pixelBuffer outputFormat:(BEFormatType)outputFormat; -// {zh} / @brief 鏃嬭浆 CVPixelBuffer {en} /@Briefing Rotate CVPixelBuffer -// {zh} / @details 杈撳嚭鐨 CVPixelBuffer 闇瑕佹墜鍔ㄨ皟鐢 CVPixelBufferRelease 閲婃斁 {en} /@details The output CVPixelBuffer needs to be released manually by calling CVPixelBufferRelease +// {zh} / @brief 鏃嬭浆 CVPixelBuffer {en} /@Briefing Rotate CVPixelBuffer +// {zh} / @details 杈撳嚭鐨 CVPixelBuffer 闇瑕佹墜鍔ㄨ皟鐢 CVPixelBufferRelease 閲婃斁 {en} /@details The output CVPixelBuffer needs to be released manually by calling CVPixelBufferRelease /// @param pixelBuffer CVPixelBuffer -// {zh} / @param rotation 鏃嬭浆瑙掑害锛90/180/270 {en} /@param rotation angle, 90/180/270 +// {zh} / @param rotation 鏃嬭浆瑙掑害锛90/180/270 {en} /@param rotation angle, 90/180/270 - (CVPixelBufferRef)rotateCVPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:(int)rotation; + +- (CVPixelBufferRef)reflectCVPixelBuffer:(CVPixelBufferRef)pixelBuffer orientation:(BEFlipOrientation)orient; + #pragma mark - BEBuffer to others -// {zh} / @brief BEBuffer 杞 CVPixelBuffer {en} @Briefing BEBuffer to CVPixelBuffer +// {zh} / @brief BEBuffer 杞 CVPixelBuffer {en} @Briefing BEBuffer to CVPixelBuffer /// @param buffer BEBuffer -// {zh} / @param outputFormat 杈撳嚭鏍煎紡 {en} /@param outputFormat output format +// {zh} / @param outputFormat 杈撳嚭鏍煎紡 {en} /@param outputFormat output format - (CVPixelBufferRef)transforBufferToCVPixelBuffer:(BEBuffer *)buffer outputFormat:(BEFormatType)outputFormat; -// {zh} / @brief BEBuffer 杞 CVPixelBuffer {en} @Briefing BEBuffer to CVPixelBuffer -// {zh} / @details 灏 BEBuffer 鐨勫唴瀹瑰鍒跺埌宸插瓨鍦ㄧ殑鐩爣 CVPixleBuffer 涓紝鍙互鍚屾椂杩涜鏍煎紡杞崲 {en} /@details Copy the contents of the BEBuffer to the existing target CVPixleBuffer, which can be formatted at the same time +// {zh} / @brief BEBuffer 杞 CVPixelBuffer {en} @Briefing BEBuffer to CVPixelBuffer +// {zh} / @details 灏 BEBuffer 鐨勫唴瀹瑰鍒跺埌宸插瓨鍦ㄧ殑鐩爣 CVPixleBuffer 涓紝鍙互鍚屾椂杩涜鏍煎紡杞崲 {en} /@details Copy the contents of the BEBuffer to the existing target CVPixleBuffer, which can be formatted at the same time /// @param buffer BEBuffer -// {zh} / @param pixelBuffer 鐩爣 CVPixelBuffer {en} /@param pixelBuffer Target CVPixelBuffer +// {zh} / @param pixelBuffer 鐩爣 CVPixelBuffer {en} /@param pixelBuffer Target CVPixelBuffer - (BOOL)transforBufferToCVPixelBuffer:(BEBuffer *)buffer pixelBuffer:(CVPixelBufferRef)pixelBuffer; -// {zh} / @brief BEBuffer 杞 BEBuffer {en} /@Briefing BEBuffer to BEBuffer -// {zh} / @param inputBuffer 杈撳叆BEBuffer {en} /@param inputBuffer Enter BEBuffer -// {zh} / @param outputFormat 杈撳嚭鏍煎紡 {en} /@param outputFormat output format +// {zh} / @brief BEBuffer 杞 BEBuffer {en} /@Briefing BEBuffer to BEBuffer +// {zh} / @param inputBuffer 杈撳叆BEBuffer {en} /@param inputBuffer Enter BEBuffer +// {zh} / @param outputFormat 杈撳嚭鏍煎紡 {en} /@param outputFormat output format - (BEBuffer *)transforBufferToBuffer:(BEBuffer *)inputBuffer outputFormat:(BEFormatType)outputFormat; -// {zh} / @brief BEBuffer 杞 BEBuffer {en} /@Briefing BEBuffer to BEBuffer -// {zh} / @details 灏 BEBuffer 鐨勫唴瀹瑰鍒跺埌宸插瓨鍦ㄧ殑鐩爣 BEBuffer 涓紝鍙互鍚屾椂杩涜鏍煎紡杞崲 {en} /@details Copy the contents of the BEBuffer to the existing target BEBuffer, and format conversion can be performed at the same time -// {zh} / @param inputBuffer 杈撳叆 BEBuffer {en} /@param inputBuffer Enter BEBuffer -// {zh} / @param outputBuffer 杈撳嚭 BEBuffer {en} /@param outputBuffer output BEBuffer +// {zh} / @brief BEBuffer 杞 BEBuffer {en} /@Briefing BEBuffer to BEBuffer +// {zh} / @details 灏 BEBuffer 鐨勫唴瀹瑰鍒跺埌宸插瓨鍦ㄧ殑鐩爣 BEBuffer 涓紝鍙互鍚屾椂杩涜鏍煎紡杞崲 {en} /@details Copy the contents of the BEBuffer to the existing target BEBuffer, and format conversion can be performed at the same time +// {zh} / @param inputBuffer 杈撳叆 BEBuffer {en} /@param inputBuffer Enter BEBuffer +// {zh} / @param outputBuffer 杈撳嚭 BEBuffer {en} /@param outputBuffer output BEBuffer - (BOOL)transforBufferToBuffer:(BEBuffer *)inputBuffer outputBuffer:(BEBuffer *)outputBuffer; -// {zh} / @brief 鏃嬭浆 BEBuffer {en} @Briefing BEBuffer -// {zh} / @param inputBuffer 杈撳叆 BEBuffer {en} /@param inputBuffer Enter BEBuffer -// {zh} / @param outputBuffer 杈撳嚭 BEBuffer {en} /@param outputBuffer output BEBuffer -// {zh} / @param rotation 鏃嬭浆瑙掑害锛90/180/270 {en} /@param rotation angle, 90/180/270 +// {zh} / @brief 鏃嬭浆 BEBuffer {en} @Briefing BEBuffer +// {zh} / @param inputBuffer 杈撳叆 BEBuffer {en} /@param inputBuffer Enter BEBuffer +// {zh} / @param outputBuffer 杈撳嚭 BEBuffer {en} /@param outputBuffer output BEBuffer +// {zh} / @param rotation 鏃嬭浆瑙掑害锛90/180/270 {en} /@param rotation angle, 90/180/270 - (BOOL)rotateBufferToBuffer:(BEBuffer *)inputBuffer outputBuffer:(BEBuffer *)outputBuffer rotation:(int)rotation; -// {zh} / @brief BEBuffer 杞 绾圭悊 {en} /@Brief BEBuffer, texture +// {zh} / @brief BEBuffer 杞 绾圭悊 {en} /@Brief BEBuffer, texture /// @param buffer BEBuffer - (id)transforBufferToTexture:(BEBuffer *)buffer; -// {zh} / @brief BEBuffer 杞 UIImage {en} @Briefing BEBuffer to UIImage +// {zh} / @brief BEBuffer 杞 UIImage {en} @Briefing BEBuffer to UIImage /// @param buffer BEBuffer - (UIImage *)transforBufferToUIImage:(BEBuffer *)buffer; #pragma mark - Texture to others -// {zh} / @brief 绾圭悊杞 BEBuffer {en} /@brief texture to BEBuffer -// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID -// {zh} / @param widht 瀹 {en} /@param widht -// {zh} / @param height 楂 {en} /@param height -// {zh} / @param outputFormat 杈撳嚭 BEBuffer 鏍煎紡锛屼粎鏀寔 RGBA/BGRA {en} /@param outputFormat output BEBuffer format, only supports RGBA/BGRA +// {zh} / @brief 绾圭悊杞 BEBuffer {en} /@brief texture to BEBuffer +// {zh} / @param texture 绾圭悊 ID {en} /@param texture texture ID +// {zh} / @param widht 瀹 {en} /@param widht +// {zh} / @param height 楂 {en} /@param height +// {zh} / @param outputFormat 杈撳嚭 BEBuffer 鏍煎紡锛屼粎鏀寔 RGBA/BGRA {en} /@param outputFormat output BEBuffer format, only supports RGBA/BGRA - (BEBuffer *)transforTextureToBEBuffer:(GLuint)texture width:(int)widht height:(int)height outputFormat:(BEFormatType)outputFormat; #pragma mark - UIImage to others -// {zh} / @brief UIImage 杞 BEBuffer {en} @Briefing UIImage to BEBuffer +// {zh} / @brief UIImage 杞 BEBuffer {en} @Briefing UIImage to BEBuffer /// @param image UIImage - (BEBuffer *)transforUIImageToBEBuffer:(UIImage *)image; #pragma mark - Utils -// {zh} / @brief 鑾峰彇 CVPxielBuffer 鏍煎紡 {en} /@Briefing Get the CVPxielBuffer format +// {zh} / @brief 鑾峰彇 CVPxielBuffer 鏍煎紡 {en} /@Briefing Get the CVPxielBuffer format /// @param pixelBuffer CVPixelBuffer - (BEFormatType)getCVPixelBufferFormat:(CVPixelBufferRef)pixelBuffer; -// {zh} / @brief OSType 杞 BEFormatType {en} @Briefing OSType to BEFormatType +// {zh} / @brief OSType 杞 BEFormatType {en} @Briefing OSType to BEFormatType /// @param type OSType - (BEFormatType)getFormatForOSType:(OSType)type; -// {zh} / @brief BEFormatType 杞 OSType {en} @Briefing BEFormatType to OSType +// {zh} / @brief BEFormatType 杞 OSType {en} @Briefing BEFormatType to OSType /// @param format BEFormatType - (OSType)getOsType:(BEFormatType)format; -// {zh} / @brief BEFormatType 杞 Glenum {en} @Briefing BEFormatType to Glenum +// {zh} / @brief BEFormatType 杞 Glenum {en} @Briefing BEFormatType to Glenum /// @param format BEFormatType - (GLenum)getGlFormat:(BEFormatType)format; -// {zh} / @brief 鑾峰彇 CVPixelBuffer 淇℃伅 {en} /@Briefing for CVPixelBuffer information +// {zh} / @brief 鑾峰彇 CVPixelBuffer 淇℃伅 {en} /@Briefing for CVPixelBuffer information /// @param pixelBuffer CVPixelBuffer - (BEPixelBufferInfo *)getCVPixelBufferInfo:(CVPixelBufferRef)pixelBuffer; -// {zh} / @brief 鍒涘缓 BEBuffer {en} /@Briefing Create BEBuffer -// {zh} / @details 鍙互鏍规嵁瀹姐侀珮銆乥ytesPerRow銆佹牸寮忕瓑淇℃伅璁$畻鍑烘墍闇鐨勫ぇ灏忥紝 {en} /@Details can calculate the required size based on information such as width, height, bytesPerRow, format, etc. -// {zh} / 澶栭儴鏃犻渶鑰冭檻鍐呭瓨閲婃斁 {en} /External no need to consider memory release -// {zh} / @param width 瀹 {en} /@param width -// {zh} / @param height 楂 {en} /@param height +// {zh} / @brief 鍒涘缓 BEBuffer {en} /@Briefing Create BEBuffer +// {zh} / @details 鍙互鏍规嵁瀹姐侀珮銆乥ytesPerRow銆佹牸寮忕瓑淇℃伅璁$畻鍑烘墍闇鐨勫ぇ灏忥紝 {en} /@Details can calculate the required size based on information such as width, height, bytesPerRow, format, etc. +// {zh} / 澶栭儴鏃犻渶鑰冭檻鍐呭瓨閲婃斁 {en} /External no need to consider memory release +// {zh} / @param width 瀹 {en} /@param width +// {zh} / @param height 楂 {en} /@param height /// @param bytesPerRow bytesPerRow /// @param format BEFormatType - (BEBuffer *)allocBufferWithWidth:(int)width height:(int)height bytesPerRow:(int)bytesPerRow format:(BEFormatType)format; +// {zh} / @brief 鎷疯礉pixelbuffer锛岃皟鐢ㄨ呴渶瑕佺鐞嗚繑鍥瀊uffer鐨勭敓鍛藉懆鏈 {en} /@Briefing Copy CVPixelBuffer, revoker should be resposible for the life cycle. +// {zh} / @param pixelBuffer 婧恇uffer {en} /@param src pixelBuffer +- (CVPixelBufferRef)copyCVPixelBuffer:(CVPixelBufferRef)pixelBuffer; + +// change default settings ++ (void)setTextureCacheNum:(int)num; ++ (void)setUseCachedPixelBuffer:(bool)use; ++ (int)textureCacheNum; ++ (bool)useCachedPixelBuffer; + @end #endif /* BEImageUtils_h */ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.m index 56a19eb13..77b0718be 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.m +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEImageUtils.m @@ -1,17 +1,19 @@ -// // BEImageUtils.m -// BytedEffects -// -// Created by qun on 2021/2/2. -// Copyright 漏 2021 ailab. All rights reserved. -// +// EffectsARSDK + #import "BEImageUtils.h" #import +#import "BEGLTexture.h" #import "BEOpenGLRenderHelper.h" -static const int TEXTURE_CACHE_NUM = 3; -static const int MAX_MALLOC_CACHE = 3; +static int TEXTURE_CACHE_NUM = 3; +static int MAX_MALLOC_CACHE = 3; + +static bool USE_CACHE_PIXEL_BUFFER = true; + +@implementation BEPixelBufferInfo +@end @implementation BEBuffer @end @@ -52,11 +54,6 @@ - (instancetype)init - (void)dealloc { // release input/output texture - if (_textureCache) { - CVOpenGLESTextureCacheFlush(_textureCache, 0); - CFRelease(_textureCache); - _textureCache = nil; - } for (id texture in _inputTextures) { [texture destroy]; } @@ -65,6 +62,11 @@ - (void)dealloc [texture destroy]; } [_outputTextures removeAllObjects]; + if (_textureCache) { + CVOpenGLESTextureCacheFlush(_textureCache, 0); + CFRelease(_textureCache); + _textureCache = nil; + } // release malloced memory for (NSValue *value in _mallocDict.allValues) { unsigned char *pointer = [value pointerValue]; @@ -78,11 +80,14 @@ - (void)dealloc } for (NSValue *value in self.pixelBufferPoolDict.allValues) { CVPixelBufferPoolRef pool = [value pointerValue]; + CVPixelBufferPoolFlush(pool, kCVPixelBufferPoolFlushExcessBuffers); CVPixelBufferPoolRelease(pool); } + [self.pixelBufferPoolDict removeAllObjects]; + self.pixelBufferPoolDict = nil; } -- (BEPixelBufferGLTexture *)getOutputPixelBufferGLTextureWithWidth:(int)width height:(int)height format:(BEFormatType)format { +- (BEPixelBufferGLTexture *)getOutputPixelBufferGLTextureWithWidth:(int)width height:(int)height format:(BEFormatType)format withPipeline:(BOOL)usepipeline { if (format != BE_BGRA) { NSLog(@"this method only supports BE_BRGA format, please use BE_BGRA"); return nil; @@ -102,7 +107,7 @@ - (BEPixelBufferGLTexture *)getOutputPixelBufferGLTextureWithWidth:(int)width he [_outputTexture updateWidth:width height:height]; - if (_useCacheTexture) { + if (_useCacheTexture && usepipeline) { // If use pipeline, return last output texture if we can. // To resolve problems like size changed between two continuous frames int lastTextureIndex = (_textureIndex + TEXTURE_CACHE_NUM - 1) % TEXTURE_CACHE_NUM; @@ -169,13 +174,47 @@ - (CVPixelBufferRef)rotateCVPixelBuffer:(CVPixelBufferRef)pixelBuffer rotation:( return outputPixelBuffer; } -- (id)transforCVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer { +- (CVPixelBufferRef)reflectCVPixelBuffer:(CVPixelBufferRef)pixelBuffer orientation:(BEFlipOrientation)orient +{ BEPixelBufferInfo *info = [self getCVPixelBufferInfo:pixelBuffer]; - if (info.format != BE_BGRA) { - pixelBuffer = [self transforCVPixelBufferToCVPixelBuffer:pixelBuffer outputFormat:BE_BGRA]; - NSLog(@"this method only supports BRGA format CVPixelBuffer, convert it to BGRA CVPixelBuffer internal"); + + int outputWidth = info.width; + int outputHeight = info.height; + + CVPixelBufferRef outputPixelBuffer = [self be_createPixelBufferFromPool:[self getOsType:info.format] heigth:outputHeight width:outputWidth]; + + + BEBuffer *inputBuffer = [self be_getBufferFromCVPixelBuffer:pixelBuffer]; + BEBuffer *outputBuffer = [self be_getBufferFromCVPixelBuffer:outputPixelBuffer]; + + vImage_Buffer src, dest; + { + src.width = inputBuffer.width; + src.height = inputBuffer.height; + src.data = inputBuffer.buffer; + src.rowBytes = inputBuffer.bytesPerRow; + dest.width = outputBuffer.width; + dest.height = outputBuffer.height; + dest.data = outputBuffer.buffer; + dest.rowBytes = outputBuffer.bytesPerRow; } + if (orient == BE_FlipVertical) { + vImageVerticalReflect_ARGB8888(&src, &dest, kvImageNoFlags); + } else { + vImageHorizontalReflect_ARGB8888(&src, &dest, kvImageNoFlags); + } + return outputPixelBuffer; +} + + +- (id)transforCVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer { + BEPixelBufferInfo *info = [self getCVPixelBufferInfo:pixelBuffer]; +// if (info.format != BE_BGRA) { +// pixelBuffer = [self transforCVPixelBufferToCVPixelBuffer:pixelBuffer outputFormat:BE_BGRA]; +//// NSLog(@"this method only supports BRGA format CVPixelBuffer, convert it to BGRA CVPixelBuffer internal"); +// } + if (_useCacheTexture) { _textureIndex = (_textureIndex + 1) % TEXTURE_CACHE_NUM; } else { @@ -226,7 +265,7 @@ - (BEBuffer *)transforBufferToBuffer:(BEBuffer *)inputBuffer outputFormat:(BEFor BEBuffer *buffer = nil; if ([self be_isRgba:outputFormat]) { if ([self be_isRgba:inputBuffer.format]) { - buffer = [self allocBufferWithWidth:inputBuffer.width height:inputBuffer.height bytesPerRow:inputBuffer.bytesPerRow format:outputFormat]; + buffer = [self allocBufferWithWidth:inputBuffer.width height:inputBuffer.height bytesPerRow:inputBuffer.width * 4 format:outputFormat]; } else { buffer = [self allocBufferWithWidth:inputBuffer.width height:inputBuffer.height bytesPerRow:inputBuffer.width * 4 format:outputFormat]; } @@ -236,6 +275,10 @@ - (BEBuffer *)transforBufferToBuffer:(BEBuffer *)inputBuffer outputFormat:(BEFor } else { buffer = [self allocBufferWithWidth:inputBuffer.width height:inputBuffer.height bytesPerRow:inputBuffer.bytesPerRow format:outputFormat]; } + } else if ([self be_isRgb:outputFormat]) { + if ([self be_isRgba:inputBuffer.format]) { + buffer = [self allocBufferWithWidth:inputBuffer.width height:inputBuffer.height bytesPerRow:inputBuffer.width * 3 format:outputFormat]; + } } if (buffer == nil) { return nil; @@ -312,8 +355,73 @@ - (BOOL)transforBufferToBuffer:(BEBuffer *)inputBuffer outputBuffer:(BEBuffer *) bgraBuffer.rowBytes = outputBuffer.bytesPerRow; BOOL result = [self be_convertYuvToRgba:&yBuffer yvBuffer:&uvBuffer rgbaBuffer:&bgraBuffer inputFormat:inputBuffer.format outputFormat:outputBuffer.format]; return result; + } else if ([self be_isYuv420Planar:inputBuffer.format]) { + vImage_Buffer yBuffer; + yBuffer.data = inputBuffer.yBuffer; + yBuffer.width = inputBuffer.yWidth; + yBuffer.height = inputBuffer.yHeight; + yBuffer.rowBytes = inputBuffer.yBytesPerRow; + vImage_Buffer uBuffer; + uBuffer.data = inputBuffer.uBuffer; + uBuffer.width = inputBuffer.uvWidth; + uBuffer.height = inputBuffer.uvHeight; + uBuffer.rowBytes = inputBuffer.uBytesPerRow; + vImage_Buffer vBuffer; + vBuffer.data = inputBuffer.vBuffer; + vBuffer.width = inputBuffer.uvWidth; + vBuffer.height = inputBuffer.uvHeight; + vBuffer.rowBytes = inputBuffer.vBytesPerRow; + vImage_Buffer bgraBuffer; + bgraBuffer.data = outputBuffer.buffer; + bgraBuffer.width = outputBuffer.width; + bgraBuffer.height = outputBuffer.height; + bgraBuffer.rowBytes = outputBuffer.bytesPerRow; + BOOL result = [self be_convertYuvToRgba:&yBuffer uBuffer:&uBuffer vBuffer:&vBuffer rgbaBuffer:&bgraBuffer inputFormat:inputBuffer.format outputFormat:outputBuffer.format]; + return result; + } + } else if ([self be_isYuv420Planar:outputBuffer.format]) { + if ([self be_isRgba:inputBuffer.format]) { + vImage_Buffer rgbaBuffer; + rgbaBuffer.data = inputBuffer.buffer; + rgbaBuffer.width = inputBuffer.width; + rgbaBuffer.height = inputBuffer.height; + rgbaBuffer.rowBytes = inputBuffer.bytesPerRow; + vImage_Buffer yBuffer; + yBuffer.data = outputBuffer.yBuffer; + yBuffer.width = outputBuffer.yWidth; + yBuffer.height = outputBuffer.yHeight; + yBuffer.rowBytes = outputBuffer.yBytesPerRow; + vImage_Buffer uBuffer; + uBuffer.data = outputBuffer.uBuffer; + uBuffer.width = outputBuffer.uvWidth; + uBuffer.height = outputBuffer.uvHeight; + uBuffer.rowBytes = outputBuffer.uBytesPerRow; + vImage_Buffer vBuffer; + vBuffer.data = outputBuffer.vBuffer; + vBuffer.width = outputBuffer.uvWidth; + vBuffer.height = outputBuffer.uvHeight; + vBuffer.rowBytes = outputBuffer.vBytesPerRow; + + BOOL result = [self be_convertRgbaToYuv:&rgbaBuffer yBuffer:&yBuffer uBuffer:&uBuffer vBuffer:&vBuffer inputFormat:inputBuffer.format outputFormat:outputBuffer.format]; + return result; + } + } else if ([self be_isRgb:outputBuffer.format]) { + if ([self be_isRgba:inputBuffer.format]) { + vImage_Buffer bgraBuffer; + bgraBuffer.data = inputBuffer.buffer; + bgraBuffer.width = inputBuffer.width; + bgraBuffer.height = inputBuffer.height; + bgraBuffer.rowBytes = inputBuffer.bytesPerRow; + vImage_Buffer bgrBuffer; + bgrBuffer.data = outputBuffer.buffer; + bgrBuffer.width = outputBuffer.width; + bgrBuffer.height = outputBuffer.height; + bgrBuffer.rowBytes = outputBuffer.bytesPerRow; + BOOL result = [self be_convertBgraToBgr:&bgraBuffer outputBuffer:&bgrBuffer inputFormat:inputBuffer.format outputFormat:outputBuffer.format]; + return result; } } + return NO; } @@ -368,6 +476,26 @@ - (BOOL)rotateBufferToBuffer:(BEBuffer *)inputBuffer outputBuffer:(BEBuffer *)ou return texture; } +- (id)transformCVPixelBufferToMTLTexture:(CVPixelBufferRef)pixelBuffer{ + size_t width = CVPixelBufferGetWidth(pixelBuffer); + size_t height = CVPixelBufferGetHeight(pixelBuffer); + id device = MTLCreateSystemDefaultDevice(); + CVMetalTextureCacheRef _textureCache; + CVMetalTextureCacheCreate(NULL, NULL, device, NULL, &_textureCache); + + CVMetalTextureRef tmpTexture = NULL; + CVReturn ret = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _textureCache, pixelBuffer, NULL, MTLPixelFormatBGRA8Unorm, width, height, 0, &tmpTexture); + if (ret != kCVReturnSuccess) { + NSLog(@"MetalTextureCreate error: %d", ret); + return nil; + } + id mtlTexture = CVMetalTextureGetTexture(tmpTexture); + CFRelease(tmpTexture); + + return mtlTexture; +} + + - (UIImage *)transforBufferToUIImage:(BEBuffer *)buffer { if (![self be_isRgba:buffer.format]) { buffer = [self transforBufferToBuffer:buffer outputFormat:BE_BGRA]; @@ -388,7 +516,7 @@ - (UIImage *)transforBufferToUIImage:(BEBuffer *)buffer { if (buffer.format == BE_RGBA) { bitmapInfo = kCGBitmapByteOrderDefault|kCGImageAlphaLast; } else { - bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaFirst; + bitmapInfo = kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst; } CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; @@ -408,6 +536,8 @@ - (UIImage *)transforBufferToUIImage:(BEBuffer *)buffer { CGDataProviderRelease(provider); CGColorSpaceRelease(colorSpaceRef); CGImageRelease(imageRef); + NSData *data = UIImageJPEGRepresentation(uiImage, 1); + uiImage = [UIImage imageWithData:data]; return uiImage; } @@ -426,6 +556,8 @@ - (BEFormatType)getFormatForOSType:(OSType)type { return BE_YUV420F; case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: return BE_YUV420V; + case kCVPixelFormatType_420YpCbCr8Planar: + return BE_YUVY420; default: return BE_UNKNOW; break; @@ -480,7 +612,7 @@ - (BEBuffer *)allocBufferWithWidth:(int)width height:(int)height bytesPerRow:(in buffer.bytesPerRow = bytesPerRow; buffer.format = format; if ([self be_isRgba:format]) { - buffer.buffer = [self be_mallocBufferWithSize:bytesPerRow * height * 4]; + buffer.buffer = [self be_mallocBufferWithSize:bytesPerRow * height]; return buffer; } else if ([self be_isYuv420:format]) { buffer.yBuffer = [self be_mallocBufferWithSize:bytesPerRow * height]; @@ -492,6 +624,9 @@ - (BEBuffer *)allocBufferWithWidth:(int)width height:(int)height bytesPerRow:(in buffer.uvHeight = height / 2; buffer.uvBytesPerRow = bytesPerRow; return buffer; + } else if ([self be_isRgb:format]) { + buffer.buffer = [self be_mallocBufferWithSize:bytesPerRow * height * 3]; + return buffer; } return nil; } @@ -526,8 +661,45 @@ - (BEBuffer *)transforTextureToBEBuffer:(GLuint)texture width:(int)widht height: return buffer; } +- (CVPixelBufferRef)copyCVPixelBuffer:(CVPixelBufferRef)pixelBuffer { + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + int bufferWidth = (int)CVPixelBufferGetWidth(pixelBuffer); + int bufferHeight = (int)CVPixelBufferGetHeight(pixelBuffer); + size_t bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer); + uint8_t *baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer); + OSType format = CVPixelBufferGetPixelFormatType(pixelBuffer); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + CVPixelBufferRef pixelBufferCopy = [self be_createPixelBufferFromPool:format heigth:bufferHeight width:bufferWidth]; + CVPixelBufferLockBaseAddress(pixelBufferCopy, 0); + uint8_t *copyBaseAddress = CVPixelBufferGetBaseAddress(pixelBufferCopy); + memcpy(copyBaseAddress, baseAddress, bufferHeight * bytesPerRow); + CVPixelBufferUnlockBaseAddress(pixelBufferCopy, 0); + return pixelBufferCopy; +} + #pragma mark - private +- (BOOL)be_convertBgraToBgr:(vImage_Buffer *)inputBuffer outputBuffer:(vImage_Buffer *)outputBuffer inputFormat:(BEFormatType)inputFormat + outputFormat:(BEFormatType)outputFormat { + if (![self be_isRgba:inputFormat] || ![self be_isRgb:outputFormat]) { + return NO; + } + vImage_Error error = kvImageNoError; + if (inputFormat == BE_BGRA && outputFormat == BE_BGR) + error = vImageConvert_BGRA8888toBGR888(inputBuffer, outputBuffer, kvImageNoFlags); + else if (inputFormat == BE_BGRA && outputFormat == BE_RGB) + error = vImageConvert_BGRA8888toRGB888(inputBuffer, outputBuffer, kvImageNoFlags); + else if (inputFormat == BE_RGBA && outputFormat == BE_BGR) + error = vImageConvert_RGBA8888toBGR888(inputBuffer, outputBuffer, kvImageNoFlags); + else if (inputFormat == BE_RGBA && outputFormat == BE_RGB) + error = vImageConvert_RGBA8888toRGB888(inputBuffer, outputBuffer, kvImageNoFlags); + if (error != kvImageNoError) { + NSLog(@"be_convertBgraToBgr error: %ld", error); + } + return error == kvImageNoError; +} + - (BOOL)be_convertRgbaToBgra:(vImage_Buffer *)inputBuffer outputBuffer:(vImage_Buffer *)outputBuffer inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { if (![self be_isRgba:inputFormat] || ![self be_isRgba:outputFormat]) { return NO; @@ -584,6 +756,41 @@ - (BOOL)be_convertRgbaToYuv:(vImage_Buffer *)inputBuffer yBuffer:(vImage_Buffer return YES; } +- (BOOL)be_convertRgbaToYuv:(vImage_Buffer *)inputBuffer + yBuffer:(vImage_Buffer *)yBuffer + uBuffer:(vImage_Buffer *)uBuffer + vBuffer:(vImage_Buffer *)vBuffer + inputFormat:(BEFormatType)inputFormat + outputFormat:(BEFormatType)outputFormat { + if (![self be_isRgba:inputFormat] || ![self be_isYuv420Planar:outputFormat]) { + return NO; + } + uint8_t map[4] = {1, 2, 3, 0}; + [self be_permuteMap:map format:inputFormat]; + vImage_YpCbCrPixelRange pixelRange; + [self be_yuvPixelRange:&pixelRange format:outputFormat]; + + vImageARGBType argbType = kvImageARGB8888; + vImageYpCbCrType yuvType = kvImage420Yp8_Cb8_Cr8; + vImage_ARGBToYpCbCr conversionInfo; + vImage_Flags flags = kvImageNoFlags; + + vImage_Error error = vImageConvert_ARGBToYpCbCr_GenerateConversion(kvImage_ARGBToYpCbCrMatrix_ITU_R_601_4, &pixelRange, &conversionInfo, argbType, yuvType, flags); + if (error != kvImageNoError) { + NSLog(@"vImageConvert_ARGBToYpCbCr_GenerateConversion error: %ld", error); + return NO; + } + + error = vImageConvert_ARGB8888To420Yp8_Cb8_Cr8(inputBuffer, yBuffer, uBuffer, vBuffer, &conversionInfo, map, flags); + if (error != kvImageNoError) { + NSLog(@"vImageConvert_ARGB8888To420Yp8_Cb8_Cr8 error: %ld", error); + return NO; + } + + return YES; +} + + - (BOOL)be_convertYuvToRgba:(vImage_Buffer *)yBuffer yvBuffer:(vImage_Buffer *)uvBuffer rgbaBuffer:(vImage_Buffer *)rgbaBuffer inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { if (![self be_isYuv420:inputFormat] || ![self be_isRgba:outputFormat]) { return NO; @@ -614,6 +821,36 @@ - (BOOL)be_convertYuvToRgba:(vImage_Buffer *)yBuffer yvBuffer:(vImage_Buffer *)u return YES; } +- (BOOL)be_convertYuvToRgba:(vImage_Buffer *)yBuffer uBuffer:(vImage_Buffer *)uBuffer vBuffer:(vImage_Buffer *)vBuffer rgbaBuffer:(vImage_Buffer *)rgbaBuffer inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { + if (![self be_isYuv420Planar:inputFormat] || ![self be_isRgba:outputFormat]) { + return NO; + } + + uint8_t map[4] = {1, 2, 3, 0}; + [self be_permuteMap:map format:outputFormat]; + vImage_YpCbCrPixelRange pixelRange; + [self be_yuvPixelRange:&pixelRange format:inputFormat]; + + vImageARGBType argbType = kvImageARGB8888; + vImageYpCbCrType yuvType = kvImage420Yp8_Cb8_Cr8; + vImage_YpCbCrToARGB conversionInfo; + vImage_Flags flags = kvImageNoFlags; + + vImage_Error error = vImageConvert_YpCbCrToARGB_GenerateConversion(kvImage_YpCbCrToARGBMatrix_ITU_R_601_4, &pixelRange, &conversionInfo, yuvType, argbType, flags); + if (error != kvImageNoError) { + NSLog(@"vImageConvert_YpCbCrToARGB_GenerateConversion error: %ld", error); + return NO; + } + + error = vImageConvert_420Yp8_Cb8_Cr8ToARGB8888(yBuffer, uBuffer, vBuffer, rgbaBuffer, &conversionInfo, map, 255, flags); + if (error != kvImageNoError) { + NSLog(@"vImageConvert_420Yp8_Cb8_Cr8ToARGB8888 error: %ld", error); + return NO; + } + + return YES; +} + - (BEBuffer *)be_getBufferFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer { BEBuffer *buffer = [[BEBuffer alloc] init]; BEPixelBufferInfo *info = [self getCVPixelBufferInfo:pixelBuffer]; @@ -635,16 +872,37 @@ - (BEBuffer *)be_getBufferFromCVPixelBuffer:(CVPixelBufferRef)pixelBuffer { buffer.yHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); buffer.uvWidth = (int)CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); buffer.uvHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + } else if ([self be_isYuv420Planar:info.format]) { + buffer.yBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); + buffer.yBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); + buffer.uBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); + buffer.uBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); + buffer.vBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 2); + buffer.vBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 2); + + buffer.yWidth = (int)CVPixelBufferGetWidthOfPlane(pixelBuffer, 0); + buffer.yHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 0); + buffer.uvWidth = (int)CVPixelBufferGetWidthOfPlane(pixelBuffer, 1); + buffer.uvHeight = (int)CVPixelBufferGetHeightOfPlane(pixelBuffer, 1); + } CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); return buffer; } +- (BOOL)be_isRgb:(BEFormatType)format { + return format == BE_RGB || format == BE_BGR; +} + - (BOOL)be_isRgba:(BEFormatType)format { return format == BE_RGBA || format == BE_BGRA; } +- (BOOL)be_isYuv420Planar:(BEFormatType)format { + return format == BE_YUVY420; +} + - (BOOL)be_isYuv420:(BEFormatType)format { return format == BE_YUV420F || format == BE_YUV420V; } @@ -690,6 +948,16 @@ - (void)be_yuvPixelRange:(vImage_YpCbCrPixelRange *)pixelRange format:(BEFormatT pixelRange->CbCrMax = 240; pixelRange->CbCrMin = 16; break; + case BE_YUVY420: + pixelRange->Yp_bias = 16; + pixelRange->CbCr_bias = 128; + pixelRange->YpRangeMax = 235; + pixelRange->CbCrRangeMax = 240; + pixelRange->YpMax = 235; + pixelRange->YpMin = 16; + pixelRange->CbCrMax = 240; + pixelRange->CbCrMin = 16; + break; default: break; } @@ -710,7 +978,7 @@ - (unsigned char *)be_mallocBufferWithSize:(int)size { } - (CVPixelBufferRef)be_createCVPixelBufferWithWidth:(int)width height:(int)height format:(BEFormatType)format { - if (_cachedPixelBuffer != nil) { + if (_cachedPixelBuffer != nil && USE_CACHE_PIXEL_BUFFER) { BEPixelBufferInfo *info = [self getCVPixelBufferInfo:_cachedPixelBuffer]; if (info.format == format && info.width == width && info.height == height) { return _cachedPixelBuffer; @@ -720,7 +988,9 @@ - (CVPixelBufferRef)be_createCVPixelBufferWithWidth:(int)width height:(int)heigh } NSLog(@"create CVPixelBuffer"); CVPixelBufferRef pixelBuffer = [self be_createPixelBufferFromPool:[self getOsType:format] heigth:height width:width]; - _cachedPixelBuffer = pixelBuffer; + if (USE_CACHE_PIXEL_BUFFER) { + _cachedPixelBuffer = pixelBuffer; + } return pixelBuffer; } @@ -754,27 +1024,24 @@ - (CVPixelBufferPoolRef)be_createPixelBufferPool:(OSType)type heigth:(int)height NSMutableDictionary* attributes = [NSMutableDictionary dictionary]; - [attributes setObject:[NSNumber numberWithBool:YES] forKey:(NSString*)kCVPixelBufferOpenGLCompatibilityKey]; + [attributes setObject:CFBridgingRelease((__bridge_retained CFNumberRef)[NSNumber numberWithBool:YES]) forKey:(NSString*)kCVPixelBufferOpenGLCompatibilityKey]; + if (MTLCreateSystemDefaultDevice()) { + [attributes setObject:CFBridgingRelease((__bridge_retained CFNumberRef)[NSNumber numberWithBool:YES]) forKey:(NSString*)kCVPixelBufferMetalCompatibilityKey]; + } [attributes setObject:[NSNumber numberWithInt:type] forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey]; [attributes setObject:[NSNumber numberWithInt:width] forKey: (NSString*)kCVPixelBufferWidthKey]; [attributes setObject:[NSNumber numberWithInt:height] forKey: (NSString*)kCVPixelBufferHeightKey]; [attributes setObject:@(16) forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey]; [attributes setObject:[NSDictionary dictionary] forKey:(NSString*)kCVPixelBufferIOSurfacePropertiesKey]; - + CVReturn ret = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef)attributes, &pool); + [attributes removeAllObjects]; if (ret != kCVReturnSuccess){ NSLog(@"Create pixbuffer pool failed %d", ret); return NULL; } - - CVPixelBufferRef buffer; - ret = CVPixelBufferPoolCreatePixelBuffer(NULL, pool, &buffer); - if (ret != kCVReturnSuccess){ - NSLog(@"Create pixbuffer from pixelbuffer pool failed %d", ret); - return NULL; - } - + return pool; } @@ -806,4 +1073,21 @@ - (BEOpenGLRenderHelper *)renderHelper { return _renderHelper; } ++ (void)setTextureCacheNum:(int)num { + TEXTURE_CACHE_NUM = num; + MAX_MALLOC_CACHE = num; +} + ++ (void)setUseCachedPixelBuffer:(bool)use { + USE_CACHE_PIXEL_BUFFER = use; +} + ++ (int)textureCacheNum { + return TEXTURE_CACHE_NUM; +} + ++ (bool)useCachedPixelBuffer { + return USE_CACHE_PIXEL_BUFFER; +} + @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.h index 1316cde30..4497b628e 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.h @@ -1,9 +1,6 @@ -// // BEEffectResourceHelper.h // Effect -// -// Created by qun on 2021/5/18. -// + #ifndef BELicenseHelper_h #define BELicenseHelper_h @@ -29,6 +26,9 @@ typedef NS_ENUM(NSInteger, LICENSE_MODE_ENUM) { - (bool)checkLicenseOK:(const char *) filePath; +- (bool)deleteCacheFile; + +- (bool)checkLicense; @end @interface BELicenseHelper : NSObject @@ -39,6 +39,8 @@ typedef NS_ENUM(NSInteger, LICENSE_MODE_ENUM) { +(instancetype) shareInstance; +- (void)setParam:(NSString*)key value:(NSString*) value; + @end #endif /* BELicenseHelper_h */ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.mm b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.mm index 56306038b..3d7dac0f0 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.mm +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BELicenseHelper.mm @@ -1,29 +1,34 @@ -// // BELicenseHelper.m // BECore -// -// Created by bytedance on 2021/9/2. -// + #import "BELicenseHelper.h" +#if __has_include() +#import +#import +#endif #import "BEHttpRequestProvider.h" -#import "Core.h" #import #import -#if __has_include("bef_effect_ai_api.h") -#import "bef_effect_ai_license_wrapper.h" -#import "bef_effect_ai_api.h" -#import "BytedLicenseDefine.h" -#endif +#import "BundleUtil.h" +#import "Config.h" using namespace std; +static NSString *OFFLIN_LICENSE_PATH = @"LicenseBag"; +static NSString *OFFLIN_BUNDLE = @"bundle"; +static NSString *LICENSE_URL = @"https://cv.iccvlog.com/cv_tob/v1/api/sdk/tob_license/getlicense"; +static NSString *KEY = @"cv_test_online1"; +static NSString *SECRET = @"e479f002-4018-11eb-a1e0-b8599f494dc4"; +static LICENSE_MODE_ENUM LICENSE_MODE = OFFLINE_LICENSE; +BOOL overSeasVersion = NO; + @interface BELicenseHelper() { std::string _licenseFilePath; LICENSE_MODE_ENUM _licenseMode; -#if __has_include("bef_effect_ai_api.h") - IBytedLicenseProvider* _licenseProvider; - BEHttpRequestProvider* _requestProvider; +#if __has_include() + EffectsSDK::LicenseProvider* _licenseProvider; + EffectsSDK::HttpRequestProvider* _requestProvider; #endif } @end @@ -53,9 +58,18 @@ -(id) copyWithZone:(struct _NSZone *)zone return [BELicenseHelper shareInstance] ; } +- (void)setParam:(NSString*)key value:(NSString*) value{ +#if __has_include() + if (_licenseProvider == nil) + return; + + _licenseProvider->setParam([key UTF8String], [value UTF8String]); +#endif +} + - (id)init { self = [super init]; -#if __has_include("bef_effect_ai_api.h") +#if __has_include() if (self) { _errorCode = 0; _licenseMode = LICENSE_MODE; @@ -63,10 +77,10 @@ - (id)init { if (_licenseMode == ONLINE_LICENSE) { _licenseProvider->setParam("mode", "ONLINE"); - _licenseProvider->setParam("url", "https://cv-tob.bytedance.com/v1/api/sdk/tob_license/getlicense"); - _licenseProvider->setParam("key", ONLINE_LICENSE_KEY); - _licenseProvider->setParam("secret", ONLINE_LICENSE_SECRET); - NSString *licenseName = [NSString stringWithFormat:@"/%s", LICENSE_NAME]; + _licenseProvider->setParam("url", [[self licenseUrl] UTF8String]); + _licenseProvider->setParam("key", [[self licenseKey] UTF8String]); + _licenseProvider->setParam("secret", [[self licenseSecret] UTF8String]); + NSString *licenseName = [NSString stringWithFormat:@"/%s", "license.bag"]; NSString *licensePath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES) lastObject]; licensePath = [licensePath stringByAppendingString:licenseName]; _licenseProvider->setParam("licensePath", [licensePath UTF8String]); @@ -74,51 +88,113 @@ - (id)init { else { _licenseProvider->setParam("mode", "OFFLINE"); - NSString *licenseName = [NSString stringWithFormat:@"/%s", LICENSE_NAME]; - NSString* licensePath = [[NSBundle mainBundle] pathForResource:OFFLIN_LICENSE_PATH ofType:OFFLIN_BUNDLE]; - licensePath = [licensePath stringByAppendingString:licenseName]; + NSString* licensePath = [self getLicensePath]; _licenseProvider->setParam("licensePath", [licensePath UTF8String]); } _requestProvider = new BEHttpRequestProvider; _licenseProvider->registerHttpProvider(_requestProvider); } #endif + return self; } +- (NSString *)getLicensePath { + NSString *licensePath = @""; + NSString *licenseName = @""; + NSBundle *bundle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; + licensePath = [bundle pathForResource:OFFLIN_LICENSE_PATH ofType:OFFLIN_BUNDLE]; + NSString *bundleIdentifier = [[NSBundle mainBundle] bundleIdentifier]; + NSArray *licenseArray = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:licensePath error:nil]; + for (NSString *license in licenseArray) { + if ([license containsString:bundleIdentifier]) { + licenseName = [NSString stringWithFormat:@"/%@", license]; + break; + } + } + + licensePath = [licensePath stringByAppendingString:licenseName]; + return licensePath; +} + +- (NSString *)licenseUrl { + NSUserDefaults *def = [NSUserDefaults standardUserDefaults]; + if ([[def objectForKey:@"licenseUrl"] isEqual: @""] || [def objectForKey:@"licenseUrl"] == nil) { + [def synchronize]; + if (overSeasVersion) + LICENSE_URL = @"https://cv-tob.byteintl.com/v1/api/sdk/tob_license/getlicense"; + return LICENSE_URL; + } + else { + NSString *licenseUrl = [def objectForKey:@"licenseUrl"]; + [def synchronize]; + return licenseUrl; + } +} + +- (NSString *)licenseKey { + NSUserDefaults *def = [NSUserDefaults standardUserDefaults]; + if ([[def objectForKey:@"licenseKey"] isEqual: @""] || [def objectForKey:@"licenseKey"] == nil) { + [def synchronize]; + if (overSeasVersion) + KEY = @"biz_license_tool_test_key6f4411ef1eb14a858e51bfcdfbe68a60"; + return KEY; + } + else { + NSString *licenseKey = [def objectForKey:@"licenseKey"]; + [def synchronize]; + return licenseKey; + } +} + +- (NSString *)licenseSecret { + NSUserDefaults *def = [NSUserDefaults standardUserDefaults]; + if ([[def objectForKey:@"licenseSecret"] isEqual: @""] || [def objectForKey:@"licenseSecret"] == nil) { + [def synchronize]; + + if (overSeasVersion) + SECRET = @"969f0a51ae465c4b21f30c59bcb08ea4"; + return SECRET; + } + else { + NSString *licenseSecret = [def objectForKey:@"licenseSecret"]; + [def synchronize]; + return licenseSecret; + } +} + -(void)dealloc { -#if __has_include("bef_effect_ai_api.h") +#if __has_include() delete _licenseProvider; delete _requestProvider; #endif } +#if __has_include() - (const char *)licensePath { _errorCode = 0; _errorMsg = @""; -#if __has_include("bef_effect_ai_api.h") std::map params; - _licenseProvider->getLicenseWithParams(params, false, [](const char* retmsg, int retSize, ErrorInfo error, void* userdata){ + _licenseProvider->getLicenseWithParams(params, false, [](const char* retmsg, int retSize, EffectsSDK::ErrorInfo error, void* userdata){ BELicenseHelper* pThis = CFBridgingRelease(userdata); pThis.errorCode = error.errorCode; pThis.errorMsg = [[NSString alloc] initWithCString:error.errorMsg.c_str() encoding:NSUTF8StringEncoding]; }, (void*)CFBridgingRetain(self)); + if (![self checkLicenseResult: @"getLicensePath"]) return ""; _licenseFilePath = _licenseProvider->getParam("licensePath"); return _licenseFilePath.c_str(); -#else - return ""; -#endif } +#endif +#if __has_include() - (const char *)updateLicensePath { _errorCode = 0; _errorMsg = @""; -#if __has_include("bef_effect_ai_api.h") std::map params; - _licenseProvider->updateLicenseWithParams(params, false, [](const char* retmsg, int retSize, ErrorInfo error, void* userdata){ + _licenseProvider->updateLicenseWithParams(params, false, [](const char* retmsg, int retSize, EffectsSDK::ErrorInfo error, void* userdata){ BELicenseHelper* pThis = CFBridgingRelease(userdata); pThis.errorCode = error.errorCode; pThis.errorMsg = [[NSString alloc] initWithCString:error.errorMsg.c_str() encoding:NSUTF8StringEncoding]; @@ -129,15 +205,18 @@ - (const char *)updateLicensePath { _licenseFilePath = _licenseProvider->getParam("licensePath"); return _licenseFilePath.c_str(); -#else - return ""; -#endif } +#endif - (LICENSE_MODE_ENUM) licenseMode{ return _licenseMode; } +- (bool)checkLicense { + NSString* licensePath = [self getLicensePath]; + return [self checkLicenseOK:[licensePath UTF8String]]; +} + - (bool)checkLicenseResult:(NSString*) msg { if (_errorCode != 0) { if ([_errorMsg length] > 0) { @@ -155,7 +234,7 @@ - (bool)checkLicenseResult:(NSString*) msg { } - (bool)checkLicenseOK:(const char *) filePath { -#if __has_include("bef_effect_ai_api.h") +#if __has_include() bef_effect_handle_t effectHandle = 0; int ret = bef_effect_ai_create(&effectHandle); // this property will be held by a singleton, and only got once, @@ -169,10 +248,23 @@ - (bool)checkLicenseOK:(const char *) filePath { { return false; } - +#endif return true; -#else - return false; +} + +- (bool)deleteCacheFile { +#if __has_include() + std::string filePath = _licenseProvider->getParam("licensePath"); + if (!filePath.empty()) { + NSString *path = [[NSString alloc] initWithUTF8String:filePath.c_str()]; + NSFileManager *fileManager = [NSFileManager defaultManager]; + BOOL isDelete = [fileManager removeItemAtPath:path error:nil]; + if (!isDelete) { + return false; + } + } #endif + return true; } + @end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.h deleted file mode 100644 index aa7f6aabe..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.h +++ /dev/null @@ -1,37 +0,0 @@ -// -// BEPixelBufferInfo.h -// BeautifyExample -// -// Created by zhaoyongqiang on 2022/8/19. -// Copyright 漏 2022 Agora. All rights reserved. -// - -#import - -NS_ASSUME_NONNULL_BEGIN - -// {zh} / 鏁版嵁鏍煎紡 {en} /Data format -typedef NS_ENUM(NSInteger, BEFormatType) { - // {zh} 鏈煡鏍煎紡 {en} Unknown format - BE_UNKNOW, - // 8bit R G B A - BE_RGBA, - // 8bit B G R A - BE_BGRA, - // video range, 8bit Y1 Y2 Y3 Y4... U1 V1... - BE_YUV420V, - // full range, 8bit Y1 Y2 Y3 Y4... U1 V1... - BE_YUV420F -}; - - -@interface BEPixelBufferInfo : NSObject - -@property (nonatomic, assign) BEFormatType format; -@property (nonatomic, assign) int width; -@property (nonatomic, assign) int height; -@property (nonatomic, assign) int bytesPerRow; - -@end - -NS_ASSUME_NONNULL_END diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.m deleted file mode 100644 index 1a1be1e9a..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEPixelBufferInfo.m +++ /dev/null @@ -1,13 +0,0 @@ -// -// BEPixelBufferInfo.m -// BeautifyExample -// -// Created by zhaoyongqiang on 2022/8/19. -// Copyright 漏 2022 Agora. All rights reserved. -// - -#import "BEPixelBufferInfo.h" - -@implementation BEPixelBufferInfo - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.h deleted file mode 100644 index 4709a9cd9..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.h +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright (C) 2019 Beijing Bytedance Network Technology Co., Ltd. -#import -#import -#import -#import "BEPixelBufferInfo.h" - -@interface BERender : NSObject - -/// use several cached texture -@property(nonatomic, assign) bool useCacheTexture; - -/// init output texture binding CVPixelBuffer -/// @param width with -/// @param height height -/// @param format format -- (BOOL)initOutputTextureAndCVPixelBufferWithWidth:(int)width height:(int)height format:(BEFormatType)format; - -/// get output CVPixelBuffer binding output texture -- (CVPixelBufferRef)getOutputPixelBuffer; - -/// transfor CVPixelBuffer to buffer -/// @param pixelBuffer CVPixelBuffer -/// @param outputFormat format of buffer -- (unsigned char *)transforCVPixelBufferToBuffer:(CVPixelBufferRef)pixelBuffer outputFormat:(BEFormatType)outputFormat; - -/// transfor CVPixelBuffer to texture -/// @param pixelBuffer original CVPixelBuffer -- (GLuint)transforCVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer; - -/// transfor texture to buffer -/// @param texture texture -/// @param width with of texture -/// @param height height of texture -/// @param outputFormat format of buffer -/// @param bytesPerRowPointer pointer of bytesPerRow, would be changed according to outputFormat -- (unsigned char *)transforTextureToBuffer:(GLuint)texture width:(int)width height:(int)height outputFormat:(BEFormatType)outputFormat bytesPerRowPointer:(int *)bytesPerRowPointer; - -/// transfor buffer to texture -/// @param buffer buffer -/// @param width with of buffer -/// @param height height of buffer -/// @param bytesPerRow bytesPerRow of buffer -/// @param inputFormat format of buffer -- (GLuint)transforBufferToTexture:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat; - -/// transfor buffer to CVPixelBuffer without CVPixelBuffer -/// @param buffer buffer -/// @param width with of buffer -/// @param height height of buffer -/// @param bytesPerRow bytesPerRow of buffer -/// @param inputFormat format of buffer -/// @param outputFormat format of CVPixelBuffer -- (CVPixelBufferRef)transforBufferToCVPixelBuffer:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat; - -/// transfor buffer to CVPixelBuffer with exsiting CVPixelBuffer -/// @param buffer buffer -/// @param pixelBuffer CVPixelBuffer -/// @param width wiht of buffer -/// @param height height of buffer -/// @param bytesPerRow bytesPerRow of buffer -/// @param inputFormat format of buffer -/// @param outputFormat format of CVPixelBuffer -- (CVPixelBufferRef)transforBufferToCVPixelBuffer:(unsigned char *)buffer pixelBuffer:(CVPixelBufferRef)pixelBuffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat; - -/// transfor buffer to UIImage -/// @param buffer buffer -/// @param width width of buffer -/// @param height height of buffer -/// @param bytesPerRow bytesPerRow of buffer -/// @param inputFormat format of buffer -- (UIImage *)transforBufferToUIImage:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat; - -/// generate output texture -/// @param width with of texture -/// @param height height of texture -- (GLuint)getOutputTexture:(int)width height:(int)height; - -/// get format of CVPixelBuffer -/// @param pixelBuffer CVPixelBuffer -- (BEFormatType)getCVPixelBufferFormat:(CVPixelBufferRef)pixelBuffer; - -/// get glFormat from BEFormatType -/// @param format BEFormatType -- (GLenum)getGlFormat:(BEFormatType)format; - -/// get CVPixelBuffer info -/// @param pixelBuffer pixelBuffer -- (BEPixelBufferInfo *)getCVPixelBufferInfo:(CVPixelBufferRef)pixelBuffer; -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.mm b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.mm deleted file mode 100644 index f4d10aa14..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERender.mm +++ /dev/null @@ -1,774 +0,0 @@ -// Copyright (C) 2019 Beijing Bytedance Network Technology Co., Ltd. -#import "BERender.h" - -#if __has_include("bef_effect_ai_api.h") -#import "bef_effect_ai_yuv_process.h" -#endif - -#import "BERenderHelper.h" - -static const int TEXTURE_CACHE_NUM = 3; - -@interface BERender () { - BERenderHelper *_renderHelper; - GLuint _frameBuffer; - GLuint _textureInputs[TEXTURE_CACHE_NUM]; - GLuint _textureOutputs[TEXTURE_CACHE_NUM]; - int _textureIndex; - - CIContext *_ciContext; - CVPixelBufferRef _cachedPixelBuffer; - unsigned char *_buffOutPointer; - unsigned int _buffOutPointerLength; - unsigned char *_yuvBufferOutPointer; - unsigned int _yuvBufferOutPointerLength; - - // for CVPixelBuffer/Texture binding - CVOpenGLESTextureCacheRef _cvTextureCaches[TEXTURE_CACHE_NUM]; - CVOpenGLESTextureRef _cvTextureInputs[TEXTURE_CACHE_NUM]; - CVOpenGLESTextureRef _cvTextureOutputs[TEXTURE_CACHE_NUM]; - CVPixelBufferRef _outputPixelBuffers[TEXTURE_CACHE_NUM]; -} - -@property (nonatomic, readwrite) NSString *triggerAction; -@property (nonatomic, assign) BOOL effectEnable; - -@property (nonatomic, assign) GLuint currentTexture; -@end - -@implementation BERender - -static NSString* LICENSE_PATH; - -- (instancetype)init { - self = [super init]; - if (self){ - _renderHelper = [[BERenderHelper alloc] init]; - glGenFramebuffers(1, &_frameBuffer); - - _cachedPixelBuffer = NULL; - _buffOutPointer = NULL; - _textureIndex = 0; - _useCacheTexture = NO; - } - return self; -} - -- (void)dealloc { - free(_buffOutPointer); - free(_yuvBufferOutPointer); - if (_cachedPixelBuffer) { - CVPixelBufferRelease(_cachedPixelBuffer); - _cachedPixelBuffer = NULL; - } - for (int i = 0; i < TEXTURE_CACHE_NUM; i++) { - if (_cvTextureInputs[i] != NULL) { - CFRelease(_cvTextureInputs[i]); - _cvTextureInputs[i] = NULL; - } - - if (_cvTextureCaches[i] != NULL) { - CFRelease(_cvTextureCaches[i]); - _cvTextureCaches[i] = NULL; - } - - if (_outputPixelBuffers[i]) { - CVPixelBufferRelease(_outputPixelBuffers[i]); - _outputPixelBuffers[i] = NULL; - } - - if (_cvTextureOutputs[i]) { - CFRelease(_cvTextureOutputs[i]); - _cvTextureOutputs[i] = NULL; - } - } - glDeleteFramebuffers(1, &_frameBuffer); - glDeleteTextures(TEXTURE_CACHE_NUM, _textureInputs); - glDeleteTextures(TEXTURE_CACHE_NUM, _textureOutputs); -} - -- (unsigned char *)transforCVPixelBufferToBuffer:(CVPixelBufferRef)pixelBuffer outputFormat:(BEFormatType)outputFormat { - BEFormatType inputFormat = [self getCVPixelBufferFormat:pixelBuffer]; - if (inputFormat == BE_UNKNOW) { - return nil; - } - - unsigned char *result = nil; - CVPixelBufferLockBaseAddress(pixelBuffer, 0); - if ([self be_isYuv420:inputFormat]) { - if ([self be_isRgba:outputFormat]) { - result = [self be_CVPixelBufferYuvToRgba:pixelBuffer inputFormat:inputFormat outputFormat:outputFormat]; - } else { - [self be_notSupportNow]; - } - } else if ([self be_isRgba:inputFormat]) { - BEPixelBufferInfo *info = [self getCVPixelBufferInfo:pixelBuffer]; - if ([self be_isRgba:outputFormat]) { - result = [self be_transforRgbaToRgba:(unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer) inputFormat:inputFormat outputFormat:outputFormat width:info.width height:info.height bytesPerRow:info.bytesPerRow]; - } else if ([self be_isYuv420:outputFormat]) { - result = [self be_transforRgbaToYuv:(unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer) width:info.width height:info.height bytesPerRow:info.bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - } - else { - [self be_notSupportNow]; - } - } else { - [self be_notSupportNow]; - } - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - return result; -} - -- (CVPixelBufferRef)getOutputPixelBuffer { - if (_cvTextureCaches[_textureIndex] == NULL || _cvTextureOutputs[_textureIndex] == NULL || _outputPixelBuffers[_textureIndex] == NULL) { - return NULL; - } - CVOpenGLESTextureCacheFlush(_cvTextureCaches[_textureIndex], 0); - return _outputPixelBuffers[_textureIndex]; -} - -- (BOOL)initOutputTextureAndCVPixelBufferWithWidth:(int)width height:(int)height format:(BEFormatType)format { - if (_outputPixelBuffers[_textureIndex] != NULL) { - BEPixelBufferInfo *info = [self getCVPixelBufferInfo:_outputPixelBuffers[_textureIndex]]; - if (info.width == width && info.height == height && info.format == format) { - return YES; - } else { - CVPixelBufferRelease(_outputPixelBuffers[_textureIndex]); - _outputPixelBuffers[_textureIndex] = NULL; - } - } - - if (![self be_isRgba:format]) { - [self be_notSupportNow]; - return NO; - } - - // create texture cache - if (!_cvTextureCaches[_textureIndex]) { - EAGLContext *glContext = [EAGLContext currentContext]; - CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, glContext, NULL, &_cvTextureCaches[_textureIndex]); - if (ret != kCVReturnSuccess) { - NSLog(@"create texture cache error"); - return NO; - } - } - - if (_cvTextureOutputs[_textureIndex]) { - CFRelease(_cvTextureOutputs[_textureIndex]); - _cvTextureOutputs[_textureIndex] = NULL; - } - - CVPixelBufferRef pixelBuffer = [self be_createCVPixelBufferWithWidth:width height:height format:format]; - if (pixelBuffer == NULL) { - NSLog(@"create pxiel buffer error"); - return NO; - } - _outputPixelBuffers[_textureIndex] = pixelBuffer; - CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, _cvTextureCaches[_textureIndex], pixelBuffer, NULL, GL_TEXTURE_2D, GL_RGBA, width, height, [self getGlFormat:format], GL_UNSIGNED_BYTE, 0, &_cvTextureOutputs[_textureIndex]); - if (ret != kCVReturnSuccess) { - NSLog(@"create texture from image error"); - return NO; - } - - _textureOutputs[_textureIndex] = CVOpenGLESTextureGetName(_cvTextureOutputs[_textureIndex]); - if (!glIsTexture(_textureOutputs[_textureIndex])) { - NSLog(@"get glTexture error"); - return NO; - } - glBindTexture(CVOpenGLESTextureGetTarget(_cvTextureOutputs[_textureIndex]), _textureOutputs[_textureIndex]); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glBindTexture(GL_TEXTURE_2D, 0); - return YES; -} - -- (GLuint)transforCVPixelBufferToTexture:(CVPixelBufferRef)pixelBuffer { - BEPixelBufferInfo *info = [self getCVPixelBufferInfo:pixelBuffer]; - if (![self be_isRgba:info.format]) { - [self be_notSupportNow]; - return -1; - } - - if (_useCacheTexture) { - _textureIndex = (_textureIndex + 1) % TEXTURE_CACHE_NUM; - } else { - _textureIndex = 0; - } - - if (!_cvTextureCaches[_textureIndex]) { - EAGLContext *glContext = [EAGLContext currentContext]; - CVReturn ret = CVOpenGLESTextureCacheCreate(kCFAllocatorDefault, NULL, glContext, NULL, &_cvTextureCaches[_textureIndex]); - if (ret != kCVReturnSuccess) { - NSLog(@"create texture cache error"); - return NO; - } - } - if (_cvTextureInputs[_textureIndex]) { - CFRelease(_cvTextureInputs[_textureIndex]); - _cvTextureInputs[_textureIndex] = NULL; - } - - CVReturn ret = CVOpenGLESTextureCacheCreateTextureFromImage(kCFAllocatorDefault, - _cvTextureCaches[_textureIndex], - pixelBuffer, - NULL, - GL_TEXTURE_2D, - GL_RGBA, - info.width, - info.height, - [self getGlFormat:info.format], - GL_UNSIGNED_BYTE, - 0, - &_cvTextureInputs[_textureIndex]); - if (!_cvTextureInputs[_textureIndex] || ret != kCVReturnSuccess) { - return -1; - } - _textureInputs[_textureIndex] = CVOpenGLESTextureGetName(_cvTextureInputs[_textureIndex]); - glBindTexture(GL_TEXTURE_2D , _textureInputs[_textureIndex]); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glBindTexture(GL_TEXTURE_2D, 0); - return _textureInputs[_textureIndex]; -} - -- (unsigned char *)transforTextureToBuffer:(GLuint)texture width:(int)width height:(int)height outputFormat:(BEFormatType)outputFormat bytesPerRowPointer:(int *)bytesPerRowPointer { - GLenum glFormat = [self getGlFormat:outputFormat]; - unsigned char *buffer = [self be_transforTextureToRgbaBuffer:texture width:width height:height format:glFormat]; - if ([self be_isRgba:outputFormat]) { - *bytesPerRowPointer = width * 4; - return buffer; - } - if ([self be_isYuv420:outputFormat]) { - *bytesPerRowPointer = width; - return [self be_transforRgbaToYuv:buffer width:width height:height bytesPerRow:width * 4 inputFormat:BE_RGBA outputFormat:outputFormat]; - } - - [self be_notSupportNow]; - return nil; -} - -- (GLuint)transforBufferToTexture:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat { - if (_useCacheTexture) { - _textureIndex = (_textureIndex + 1) % TEXTURE_CACHE_NUM; - } else { - _useCacheTexture = 0; - } - -// NSLog(@"use texture index: %d", _textureIndex); - unsigned char *rgbaBuffer = nullptr; - GLenum glFormat = GL_RGBA; - if ([self be_isRgba:inputFormat]) { - rgbaBuffer = buffer; - glFormat = [self getGlFormat:inputFormat]; - } else if ([self be_isYuv420:inputFormat]) { - rgbaBuffer = [self be_transforYuvToRgba:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:BE_RGBA]; - glFormat = GL_RGBA; - } else { - [self be_notSupportNow]; - } - return [self be_transforRgbaBufferToTexture:rgbaBuffer width:width height:height format:glFormat]; -} - -- (CVPixelBufferRef)transforBufferToCVPixelBuffer:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - if (_cachedPixelBuffer != NULL) { - BEPixelBufferInfo *info = [self getCVPixelBufferInfo:_cachedPixelBuffer]; - if (info.width != width || info.height != height || info.format != outputFormat) { - CVPixelBufferRelease(_cachedPixelBuffer); - _cachedPixelBuffer = NULL; - } - } - if (_cachedPixelBuffer == NULL) { - _cachedPixelBuffer = [self be_createCVPixelBufferWithWidth:width height:height format:outputFormat]; - } - CVPixelBufferRef pixelBuffer = _cachedPixelBuffer; - if (!pixelBuffer) { - return nil; - } - return [self transforBufferToCVPixelBuffer:buffer pixelBuffer:pixelBuffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; -} - -- (CVPixelBufferRef)transforBufferToCVPixelBuffer:(unsigned char *)buffer pixelBuffer:(CVPixelBufferRef)pixelBuffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - CVPixelBufferLockBaseAddress(pixelBuffer, 0); - if ([self be_isRgba:inputFormat]) { - [self be_transforRgbaToCVPixelBuffer:pixelBuffer buffer:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - } else if ([self be_isYuv420:inputFormat]) { - [self be_transforYuvToCVPixelBuffer:pixelBuffer buffer:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - } else { - [self be_notSupportNow]; - } - CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); - return pixelBuffer; -} - -- (UIImage *)transforBufferToUIImage:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat { - if ([self be_isRgba:inputFormat]) { - return [self be_transforRgbaBufferToUIImage:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat]; - } else if ([self be_isYuv420:inputFormat]) { - unsigned char *rgbaBuffer = [self be_transforYuvToRgba:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:BE_RGBA]; - bytesPerRow = width * 4; - return [self be_transforRgbaBufferToUIImage:rgbaBuffer width:width height:height bytesPerRow:bytesPerRow inputFormat:BE_RGBA]; - } - [self be_notSupportNow]; - return nil; -} - -- (GLuint)be_transforRgbaBufferToTexture:(unsigned char *)buffer width:(int)width height:(int)height format:(GLenum)format { - GLuint textureInput = _textureInputs[_textureIndex]; - - if (glIsTexture(textureInput)) { - glBindTexture(GL_TEXTURE_2D, textureInput); - glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, format, GL_UNSIGNED_BYTE, buffer); - glBindTexture(GL_TEXTURE_2D, 0); - return textureInput; - } - - NSLog(@"gen input texture"); - glGenTextures(1, &textureInput); - glBindTexture(GL_TEXTURE_2D, textureInput); - - // 鍔犺浇鐩告満鏁版嵁鍒扮汗鐞 - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, format, GL_UNSIGNED_BYTE, buffer); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glBindTexture(GL_TEXTURE_2D, 0); - _textureInputs[_textureIndex] = textureInput; - return textureInput; -} - -- (GLuint)getOutputTexture:(int)width height:(int)height { - GLuint textureOutput = _textureOutputs[_textureIndex]; - - if (glIsTexture(textureOutput)) { - return textureOutput; - } - - NSLog(@"gen output texture"); - glGenTextures(1, &textureOutput); - glBindTexture(GL_TEXTURE_2D, textureOutput); - - // 涓鸿緭鍑虹汗鐞嗗紑杈熺┖闂 - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, NULL); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); - glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); - glBindTexture(GL_TEXTURE_2D, 0); - _textureOutputs[_textureIndex] = textureOutput; - return textureOutput; -} - -- (BEFormatType)getCVPixelBufferFormat:(CVPixelBufferRef)pixelBuffer { - OSType type = CVPixelBufferGetPixelFormatType(pixelBuffer); - switch (type) { - case kCVPixelFormatType_32BGRA: - return BE_BGRA; - case kCVPixelFormatType_32RGBA: - return BE_RGBA; - case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: - return BE_YUV420F; - case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: - return BE_YUV420V; - default: - return BE_UNKNOW; - break; - } -} - -- (OSType)getOsType:(BEFormatType)format { - switch (format) { - case BE_RGBA: - return kCVPixelFormatType_32RGBA; - case BE_BGRA: - return kCVPixelFormatType_32BGRA; - case BE_YUV420F: - return kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; - case BE_YUV420V: - return kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; - default: - return kCVPixelFormatType_32BGRA; - break; - } -} - -- (GLenum)getGlFormat:(BEFormatType)format { - switch (format) { - case BE_RGBA: - return GL_RGBA; - case BE_BGRA: - return GL_BGRA; - default: - return GL_RGBA; - break; - } -} - -- (BEPixelBufferInfo *)getCVPixelBufferInfo:(CVPixelBufferRef)pixelBuffer { - int bytesPerRow = (int) CVPixelBufferGetBytesPerRow(pixelBuffer); - int width = (int) CVPixelBufferGetWidth(pixelBuffer); - int height = (int) CVPixelBufferGetHeight(pixelBuffer); - size_t iTop, iBottom, iLeft, iRight; - CVPixelBufferGetExtendedPixels(pixelBuffer, &iLeft, &iRight, &iTop, &iBottom); - width = width + (int) iLeft + (int) iRight; - height = height + (int) iTop + (int) iBottom; - bytesPerRow = bytesPerRow + (int) iLeft + (int) iRight; - - BEPixelBufferInfo *info = [BEPixelBufferInfo new]; - info.format = [self getCVPixelBufferFormat:pixelBuffer]; - info.width = width; - info.height = height; - info.bytesPerRow = bytesPerRow; - return info; -} - -#pragma mark - setter -- (void)setUseCacheTexture:(bool)useCacheTexture { - _useCacheTexture = useCacheTexture; - if (!useCacheTexture) { - _textureIndex = 0; - } -} - -#pragma mark - private - -- (unsigned char *)be_mallocBufferWithWidth:(int)width height:(int)height { - if (_buffOutPointer == NULL || _buffOutPointerLength != width * height) { - if (_buffOutPointer != NULL) { - free(_buffOutPointer); - } - _buffOutPointer = (unsigned char *)malloc(width * height * 4 * sizeof(unsigned char)); - _buffOutPointerLength = width * height; - NSLog(@"malloc size %d", width * height * 4); - } - return _buffOutPointer; -} - -- (unsigned char *)be_mallocYuvBuffer:(int)size { - if (_yuvBufferOutPointer == NULL || _yuvBufferOutPointerLength != size) { - if (_yuvBufferOutPointer != NULL) { - free(_yuvBufferOutPointer); - } - _yuvBufferOutPointer = (unsigned char *)malloc(size * sizeof(unsigned char)); - _yuvBufferOutPointerLength = size; - NSLog(@"malloc size %d", size); - } - return _yuvBufferOutPointer; -} - -- (CVPixelBufferRef)be_createCVPixelBufferWithWidth:(int)width height:(int)height format:(BEFormatType)format { - CVPixelBufferRef pixelBuffer; - const void *keys[] = { - kCVPixelBufferOpenGLCompatibilityKey, - kCVPixelBufferIOSurfacePropertiesKey - }; - const void *values[] = { - (__bridge const void *)([NSNumber numberWithBool:YES]), - (__bridge const void *)([NSDictionary dictionary]) - }; - - CFDictionaryRef optionsDicitionary = CFDictionaryCreate(kCFAllocatorDefault, keys, values, 2, NULL, NULL); - - CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, width, height, [self getOsType:format], optionsDicitionary, &pixelBuffer); - CFRelease(optionsDicitionary); - if (res != kCVReturnSuccess) { - NSLog(@"CVPixelBufferCreate error: %d", res); - } - return pixelBuffer; -} - -- (unsigned char *)be_CVPixelBufferYuvToRgba:(CVPixelBufferRef)pixelBuffer inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - if (([self be_isYuv420:inputFormat]) && ([self be_isRgba:outputFormat])) { - unsigned int width = (unsigned int)CVPixelBufferGetWidth(pixelBuffer); - unsigned int height = (unsigned int)CVPixelBufferGetHeight(pixelBuffer); - - uint8_t *yBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); - unsigned int yPitch = (unsigned int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - uint8_t *uvBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); - unsigned int uvPitch = (unsigned int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); - - return [self be_transforYuvToRgba:yBuffer uvBuffer:uvBuffer width:width height:height yBytesPerRow:yPitch uvBytesPerRow:uvPitch inputFormat:inputFormat outputFormat:outputFormat]; - } else { - [self be_notSupportNow]; - } - return nil; -} - -- (unsigned char *)be_transforRgbaToRgba:(unsigned char *)buffer inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow { - int realBytesPerRow = width * 4; - BOOL aligned = bytesPerRow != realBytesPerRow; - if (inputFormat == outputFormat && !aligned) { - return buffer; - } - if ([self be_isRgba:inputFormat] && [self be_isRgba:outputFormat]) { - unsigned char *result = [self be_mallocBufferWithWidth:width height:height]; - if (aligned) { - [self be_copyBufferFrom:buffer to:result bytesPerRowFrom:bytesPerRow bytesPerRowTo:realBytesPerRow height:height]; - } else { - memcpy(result, buffer, width * height * 4); - } - if (inputFormat != outputFormat) { - for (int i = 0; i < bytesPerRow * height; i += 4) { - int16_t r = result[i]; - int16_t b = result[i+2]; - result[i] = b; - result[i+2] = r; - } - } - return result; - } else { - [self be_notSupportNow]; - } - return nil; -} - -- (unsigned char *)be_transforTextureToRgbaBuffer:(GLuint)texture width:(int)width height:(int)height format:(GLenum)format { - unsigned char *buffer = [self be_mallocBufferWithWidth:width height:height]; - [_renderHelper textureToImage:texture withBuffer:buffer Width:width height:height format:format rotation:0]; - return buffer; -} - -- (unsigned char *)be_transforRgbaToYuv:(unsigned char *)rgbaBuffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - if ([self be_isRgba:inputFormat] && [self be_isYuv420:outputFormat]) { - unsigned char *yBuffer = [self be_mallocYuvBuffer:width * height * 3/2]; - if (inputFormat == BE_RGBA) { -#if __has_include("bef_effect_ai_api.h") - cvt_rgba2yuv(rgbaBuffer, yBuffer, BEF_AI_PIX_FMT_NV12, width, height); -#endif - return yBuffer; - } - - unsigned char *uvBuffer = &yBuffer[width * height]; - - for (int j = 0; j < height; j++) { - unsigned char *rgbaLine = &rgbaBuffer[bytesPerRow * j]; - unsigned char *yBufferLine = &yBuffer[width * j]; - unsigned char *uvBufferLine = &uvBuffer[width * (j >> 1)]; - for (int i = 0; i < width; i++) { - int p = i * 4; - int16_t r = rgbaLine[p + 0]; - int16_t g = rgbaLine[p + 1]; - int16_t b = rgbaLine[p + 2]; - - if (inputFormat == BE_BGRA) { - int16_t tmp = r; - r = b; - b = tmp; - } - - yBufferLine[i] = (int16_t)(0.299 * r + 0.587 * g + 0.114 * b); - if (outputFormat == BE_YUV420V) { - yBufferLine[i] = yBufferLine[i] * 0.875 + 16; - } - if ((j & 1) == 0 && (i & 1) == 0) { - uvBufferLine[i & ~1] = (int16_t)(-0.169 * r - 0.331 * g + 0.5 * b + 128); - uvBufferLine[i | 1] = (int16_t)(0.5 * r - 0.419 * g - 0.081 * b + 128); - } - } - } - return yBuffer; - } - - [self be_notSupportNow]; - return nil; -} - -- (unsigned char *)be_transforYuvToRgba:(unsigned char *)yBuffer uvBuffer:(unsigned char *)uvBuffer width:(int)width height:(int)height yBytesPerRow:(int)yBytesPerRow uvBytesPerRow:(int)uvBytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - if ([self be_isYuv420:inputFormat] && [self be_isRgba:outputFormat]) { - int bytesPerPixel = 4; - unsigned char *rgbaOut = [self be_mallocBufferWithWidth:width height:height]; - - unsigned int yPitch = yBytesPerRow; - unsigned int uvPitch = uvBytesPerRow; - - for (int j = 0; j < height; j++) { - uint8_t *rgbaBufferLine = &rgbaOut[j * width * bytesPerPixel]; - uint8_t *yBufferLine = &yBuffer[j * yPitch]; - uint8_t *uvBufferLine = &uvBuffer[(j >> 1) * uvPitch]; - - for (int i = 0; i < width; i++) { - int16_t y = yBufferLine[i]; - // transfor 420f to 420v - if (inputFormat == BE_YUV420F) { - y = y * 0.875 + 16; - } - int16_t u = uvBufferLine[i & ~1] - 128; - int16_t v = uvBufferLine[i | 1] - 128; - - uint8_t *rgba = &rgbaBufferLine[i * bytesPerPixel]; - int16_t r = (int16_t)roundf( y + v * 1.4 ); - int16_t g = (int16_t)roundf( y + u * -0.343 + v * -0.711 ); - int16_t b = (int16_t)roundf( y + u * 1.765); - - if (outputFormat == BE_BGRA) { - int16_t tmp = r; - r = b; - b = tmp; - } - - rgba[0] = r; - rgba[1] = g; - rgba[2] = b; - rgba[3] = 0xff; - } - } - return rgbaOut; - } - [self be_notSupportNow]; - return nil; -} - -- (unsigned char *)be_transforYuvToRgba:(unsigned char *)yuvBuffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - if ([self be_isYuv420:inputFormat] && [self be_isRgba:outputFormat]) { - if (outputFormat == BE_RGBA) { - unsigned char *rgbaBuffer = [self be_mallocBufferWithWidth:width height:height]; -#if __has_include("bef_effect_ai_api.h") - cvt_yuv2rgba(yuvBuffer, rgbaBuffer, BEF_AI_PIX_FMT_NV12, width, height, width, height, BEF_AI_CLOCKWISE_ROTATE_0, false); -#endif - return rgbaBuffer; - } - - unsigned char *yBuffer = yuvBuffer; - unsigned char *uvBuffer = yuvBuffer + bytesPerRow * height; - return [self be_transforYuvToRgba:yBuffer uvBuffer:uvBuffer width:width height:height yBytesPerRow:bytesPerRow uvBytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - } - [self be_notSupportNow]; - return nil; -} - -- (void)be_transforRgbaToCVPixelBuffer:(CVPixelBufferRef)pixelBuffer buffer:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - int pixelBufferWidth = (int)CVPixelBufferGetWidth(pixelBuffer); - int pixelBufferHeight = (int)CVPixelBufferGetHeight(pixelBuffer); - if (width != pixelBufferWidth || height != pixelBufferHeight) { - NSLog(@"wrong state: width %d height: %d pixelBufferWidth %d pixelBufferHeight %d", width, height, pixelBufferWidth, pixelBufferHeight); - } - if ([self be_isRgba:outputFormat]) { - int pixelBufferBytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer); - unsigned char *baseAddress = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer); - buffer = [self be_transforRgbaToRgba:buffer inputFormat:inputFormat outputFormat:outputFormat width:width height:height bytesPerRow:bytesPerRow]; - if (pixelBufferBytesPerRow != bytesPerRow) { - unsigned char *from = buffer; - unsigned char *to = baseAddress; - for (int i = 0; i < height; i++) { - memcpy(to, from, bytesPerRow); - from += bytesPerRow; - to += pixelBufferBytesPerRow; - } - } else { - memcpy(baseAddress, buffer, height * bytesPerRow); - } - } else if ([self be_isYuv420:outputFormat]) { - unsigned char *yuvBuffer = [self be_transforRgbaToYuv:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - bytesPerRow = width; - unsigned char *yBaseAddress = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); - int yBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - [self be_copyBufferFrom:yuvBuffer to:yBaseAddress bytesPerRowFrom:bytesPerRow bytesPerRowTo:yBytesPerRow height:height]; - unsigned char *uvBaseAddress = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); - int uvBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); - [self be_copyBufferFrom:yuvBuffer + bytesPerRow * height to:uvBaseAddress bytesPerRowFrom:bytesPerRow bytesPerRowTo:uvBytesPerRow height:height/2]; - - } else { - [self be_notSupportNow]; - } -} - -- (void)be_transforYuvToCVPixelBuffer:(CVPixelBufferRef)pixelBuffer buffer:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat outputFormat:(BEFormatType)outputFormat { - int pixelBufferWidth = (int)CVPixelBufferGetWidth(pixelBuffer); - int pixelBufferHeight = (int)CVPixelBufferGetHeight(pixelBuffer); - if (width != pixelBufferWidth || height != pixelBufferHeight) { - NSLog(@"wrong state: width %d height: %d pixelBufferWidth %d pixelBufferHeight %d", width, height, pixelBufferWidth, pixelBufferHeight); - } - if ([self be_isYuv420:inputFormat] && [self be_isRgba:outputFormat]) { - unsigned char *rgbaBuffer = [self be_transforYuvToRgba:buffer width:width height:height bytesPerRow:bytesPerRow inputFormat:inputFormat outputFormat:outputFormat]; - unsigned char *baseAddress = (unsigned char *)CVPixelBufferGetBaseAddress(pixelBuffer); - int pixelBufferBytesPerRow = (int)CVPixelBufferGetBytesPerRow(pixelBuffer); - if (pixelBufferBytesPerRow != width * 4) { - [self be_copyBufferFrom:rgbaBuffer to:baseAddress bytesPerRowFrom:width * 4 bytesPerRowTo:pixelBufferBytesPerRow height:height]; - } else { - memcpy(baseAddress, rgbaBuffer, width * 4 * height); - } - - } else if ([self be_isYuv420:inputFormat] && [self be_isYuv420:outputFormat]) { - unsigned char *yBuffer = buffer; - unsigned char *uvBuffer = buffer + bytesPerRow * height; - unsigned char *yPixelBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0); - int yPixelBufferBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0); - unsigned char *uvPixelBuffer = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1); - int uvPixelBufferBytesPerRow = (int)CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1); - [self be_copyBufferFrom:yBuffer to:yPixelBuffer bytesPerRowFrom:bytesPerRow bytesPerRowTo:yPixelBufferBytesPerRow height:height]; - [self be_copyBufferFrom:uvBuffer to:uvPixelBuffer bytesPerRowFrom:bytesPerRow bytesPerRowTo:uvPixelBufferBytesPerRow height:height/2]; - } -} - -- (UIImage *)be_transforRgbaBufferToUIImage:(unsigned char *)buffer width:(int)width height:(int)height bytesPerRow:(int)bytesPerRow inputFormat:(BEFormatType)inputFormat { - if ([self be_isRgba:inputFormat]) { - CGDataProviderRef provider = CGDataProviderCreateWithData( - NULL, - buffer, - height * bytesPerRow, - NULL); - - CGColorSpaceRef colorSpaceRef = CGColorSpaceCreateDeviceRGB(); - CGBitmapInfo bitmapInfo; - if (inputFormat == BE_RGBA) { - bitmapInfo = kCGBitmapByteOrderDefault|kCGImageAlphaPremultipliedLast; - } else { - bitmapInfo = kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst; - } - CGColorRenderingIntent renderingIntent = kCGRenderingIntentDefault; - - CGImageRef imageRef = CGImageCreate(width, - height, - 8, - 4 * 8, - bytesPerRow, - colorSpaceRef, - bitmapInfo, - provider, - NULL, - NO, - renderingIntent); - - UIImage *uiImage = [UIImage imageWithCGImage:imageRef]; - CGDataProviderRelease(provider); - CGColorSpaceRelease(colorSpaceRef); - CGImageRelease(imageRef); - return uiImage; - } - [self be_notSupportNow]; - return nil; -} - -- (void)be_notSupportNow { - NSLog(@"not support now"); -} - -- (BOOL)be_isRgba:(BEFormatType)format { - return format == BE_RGBA || format == BE_BGRA; -} - -- (BOOL)be_isYuv420:(BEFormatType)format { - return format == BE_YUV420F || format == BE_YUV420V; -} - -- (void)be_copyBufferFrom:(unsigned char *)from to:(unsigned char *)to bytesPerRowFrom:(int)bytesPerRowFrom bytesPerRowTo:(int)bytesPserRowTo height:(int)height { - int copyLength = bytesPserRowTo > bytesPerRowFrom ? bytesPerRowFrom : bytesPserRowTo; - for (int i = 0; i < height; i++) { - memcpy(to, from, copyLength); - from += bytesPerRowFrom; - to += bytesPserRowTo; - } -} - -- (BERenderHelper*)renderHelper { - if (!_renderHelper){ - _renderHelper = [[BERenderHelper alloc] init]; - } - return _renderHelper; -} - -@end - diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.h deleted file mode 100644 index d383a0eee..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.h +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (C) 2019 Beijing Bytedance Network Technology Co., Ltd. - -#import -#import - -@interface BERenderHelper : NSObject - -/// transfor texture to buffer -/// @param texture texture -/// @param buffer buffer -/// @param rWidth width of buffer -/// @param rHeight height of buffer -- (void)textureToImage:(GLuint)texture withBuffer:(unsigned char*)buffer Width:(int)rWidth height:(int)rHeight; - -/// transfor texture to buffer -/// @param texture texture -/// @param buffer buffer -/// @param rWidth width of buffer -/// @param rHeight height of buffer -/// @param format pixel format, such as GL_RGBA,GL_BGRA... -- (void)textureToImage:(GLuint)texture withBuffer:(unsigned char*)buffer Width:(int)rWidth height:(int)rHeight format:(GLenum)format; - -/// transfor texture to buffer -/// @param texture texture -/// @param buffer buffer -/// @param rWidth width of buffer -/// @param rHeight height of buffer -/// @param format pixel format, such as GL_RGBA,GL_BGRA... -/// @param rotation rotation of buffer, 0: 0藲, 1: 90藲, 2: 180藲, 3: 270藲 -- (void)textureToImage:(GLuint)texture withBuffer:(unsigned char*)buffer Width:(int)rWidth height:(int)rHeight format:(GLenum)format rotation:(int)rotation; -+ (int) compileShader:(NSString *)shaderString withType:(GLenum)shaderType; -@end - diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.m deleted file mode 100644 index 2ea6aee7f..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BERenderHelper.m +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright (C) 2019 Beijing Bytedance Network Technology Co., Ltd. -#import "BERenderHelper.h" - -#define TTF_STRINGIZE(x) #x -#define TTF_STRINGIZE2(x) TTF_STRINGIZE(x) -#define TTF_SHADER_STRING(text) @ TTF_STRINGIZE2(text) - -static NSString *const CAMREA_RESIZE_VERTEX = TTF_SHADER_STRING -( -attribute vec4 position; -attribute vec4 inputTextureCoordinate; -varying vec2 textureCoordinate; -void main(){ - textureCoordinate = inputTextureCoordinate.xy; - gl_Position = position; -} -); - -static NSString *const CAMREA_RESIZE_FRAGMENT = TTF_SHADER_STRING -( - precision mediump float; - varying highp vec2 textureCoordinate; - uniform sampler2D inputImageTexture; - void main() - { - gl_FragColor = texture2D(inputImageTexture, textureCoordinate); - } -); - -@interface BERenderHelper (){ - GLuint _resizeProgram; - GLuint _resizeLocation; - GLuint _resizeInputImageTexture; - GLuint _resizeTextureCoordinate; - GLuint _resizeTexture; - - //涓轰簡resize buffer - GLuint _frameBuffer; -} - -@end - -static float TEXTURE_RORATION_0[] = {0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f,}; -static float TEXTURE_ROTATED_90[] = {0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f, 0.0f,}; -static float TEXTURE_ROTATED_180[] = {1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f,}; -static float TEXTURE_ROTATED_270[] = {1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f,}; -static float CUBE[] = {-1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f,}; - -@implementation BERenderHelper - -- (instancetype) init -{ - self = [super init]; - if (self) { - [self loadResizeShader]; - glGenFramebuffers(1, &_frameBuffer); - glGenTextures(1, &_resizeTexture); - } - - return self; -} - --(void)dealloc{ - glDeleteFramebuffers(1, &_frameBuffer); - glDeleteTextures(1, &_resizeTexture); -} - -+ (int) compileShader:(NSString *)shaderString withType:(GLenum)shaderType { - GLuint shaderHandle = glCreateShader(shaderType); - const char * shaderStringUTF8 = [shaderString UTF8String]; - - int shaderStringLength = (int) [shaderString length]; - glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength); - glCompileShader(shaderHandle); - GLint success; - glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &success); - - if (success == GL_FALSE){ - NSLog(@"BErenderHelper compiler shader error: %s", shaderStringUTF8); - return 0; - } - return shaderHandle; -} - -/* - * load resize shader - */ -- (void) loadResizeShader{ - GLuint vertexShader = [BERenderHelper compileShader:CAMREA_RESIZE_VERTEX withType:GL_VERTEX_SHADER]; - GLuint fragmentShader = [BERenderHelper compileShader:CAMREA_RESIZE_FRAGMENT withType:GL_FRAGMENT_SHADER]; - - _resizeProgram = glCreateProgram(); - glAttachShader(_resizeProgram, vertexShader); - glAttachShader(_resizeProgram, fragmentShader); - glLinkProgram(_resizeProgram); - - GLint linkSuccess; - glGetProgramiv(_resizeProgram, GL_LINK_STATUS, &linkSuccess); - if (linkSuccess == GL_FALSE){ - NSLog(@"BERenderHelper link shader error"); - } - - glUseProgram(_resizeProgram); - _resizeLocation = glGetAttribLocation(_resizeProgram, "position"); - _resizeTextureCoordinate = glGetAttribLocation(_resizeProgram, "inputTextureCoordinate"); - _resizeInputImageTexture = glGetUniformLocation(_resizeProgram, "inputImageTexture"); - - if (vertexShader) - glDeleteShader(vertexShader); - - if (fragmentShader) - glDeleteShader(fragmentShader); -} - -/* - * transfer a image th buffer - */ -- (void)textureToImage:(GLuint)texture withBuffer:(unsigned char*)buffer Width:(int)rWidth height:(int)rHeight { - [self textureToImage:texture withBuffer:buffer Width:rWidth height:rHeight format:GL_RGBA]; -} - -- (void) textureToImage:(GLuint)texture withBuffer:(unsigned char*)buffer Width:(int)rWidth height:(int)rHeight format:(GLenum)format { - [self textureToImage:texture withBuffer:buffer Width:rWidth height:rHeight format:format rotation:0]; -} - -- (void)textureToImage:(GLuint)texture withBuffer:(unsigned char *)buffer Width:(int)rWidth height:(int)rHeight format:(GLenum)format rotation:(int)rotation { - glBindTexture(GL_TEXTURE_2D, _resizeTexture); - glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rWidth, rHeight, 0, format, GL_UNSIGNED_BYTE, NULL); - - glBindFramebuffer(GL_FRAMEBUFFER, _frameBuffer); - glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, _resizeTexture, 0); - - glUseProgram(_resizeProgram); - glVertexAttribPointer(_resizeLocation, 2, GL_FLOAT, false, 0, CUBE); - glEnableVertexAttribArray(_resizeLocation); - float *rota = TEXTURE_RORATION_0; - if (rotation == 1) { - rota = TEXTURE_ROTATED_90; - } else if (rotation == 2) { - rota = TEXTURE_ROTATED_180; - } else if (rotation == 3) { - rota = TEXTURE_ROTATED_270; - } - glVertexAttribPointer(_resizeTextureCoordinate, 2, GL_FLOAT, false, 0, rota); - glEnableVertexAttribArray(_resizeTextureCoordinate); - - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, texture); - glUniform1i(_resizeInputImageTexture, 0); - glViewport(0, 0, rWidth, rHeight); - glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); - - glDisableVertexAttribArray(_resizeLocation); - glDisableVertexAttribArray(_resizeTextureCoordinate); - glActiveTexture(GL_TEXTURE0); - glBindTexture(GL_TEXTURE_2D, 0); - - glReadPixels(0, 0, rWidth, rHeight, format, GL_UNSIGNED_BYTE, buffer); - glBindFramebuffer(GL_FRAMEBUFFER, 0); - [self checkGLError]; -} - -- (void)checkGLError { - int error = glGetError(); - if (error != GL_NO_ERROR) { - NSLog(@"checkGLError %d", error); - @throw [NSException exceptionWithName:@"GLError" reason:@"error " userInfo:nil]; - } -} - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.h deleted file mode 100644 index a0a06b284..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.h +++ /dev/null @@ -1,50 +0,0 @@ -// -// BEResourceHelper.h -// BytedEffects -// -// Created by QunZhang on 2019/10/22. -// Copyright 漏 2019 ailab. All rights reserved. -// - -#import - -@protocol BEResourceHelperDelegate - -@optional - -/// path of license dir -- (NSString *)licenseDirPath; - -/// path of composer node -- (NSString *)composerNodeDirPath; - -/// path of filter dir -- (NSString *)filterDirPath; - -/// path of sticker dir -- (NSString *)stickerDirPath; - -/// path of composer -- (NSString *)composerDirPath; - -/// path of model dir -- (NSString *)modelDirPath; - -/// license name -- (NSString *)licenseName; - -@end - -@interface BEResourceHelper : NSObject - -@property (nonatomic, weak) id delegate; - -- (NSString *)licensePath; -- (NSString *)composerNodePath:(NSString *)nodeName; -- (NSString *)filterPath:(NSString *)filterName; -- (NSString *)stickerPath:(NSString *)stickerName; -- (NSString *)modelPath:(NSString *)modelName; -- (NSString *)composerPath; -- (NSString *)modelDirPath; - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.m deleted file mode 100644 index 3cde465d5..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BEResourceHelper.m +++ /dev/null @@ -1,110 +0,0 @@ -// -// BEResourceHelper.m -// BytedEffects -// -// Created by QunZhang on 2019/10/22. -// Copyright 漏 2019 ailab. All rights reserved. -// - -#import "BEResourceHelper.h" -#import "BELicenseHelper.h" -#import "BundleUtil.h" -#import "Core.h" - -static NSString *LICENSE_PATH = @"LicenseBag"; -static NSString *COMPOSER_PATH = @"ComposeMakeup"; -static NSString *FILTER_PATH = @"FilterResource"; -static NSString *STICKER_PATH = @"StickerResource"; -static NSString *MODEL_PATH = @"ModelResource"; - -static NSString *BUNDLE = @"bundle"; - -@interface BEResourceHelper () { - NSString *_licensePrefix; - NSString *_composerPrefix; - NSString *_filterPrefix; - NSString *_stickerPrefix; -} - -@end - -@implementation BEResourceHelper - -- (NSString *)licensePath { -// const char *license = [BELicenseHelper shareInstance].licensePath; -// return [[NSString alloc]initWithCString:license encoding:(NSUTF8StringEncoding)]; - NSString *licenseName; - if ([self.delegate respondsToSelector:@selector(licenseName)]) { - licenseName = [self.delegate licenseName]; - } else { - licenseName = [[NSString alloc] initWithCString:LICENSE_NAME encoding:NSUTF8StringEncoding]; - } - if ([self.delegate respondsToSelector:@selector(licenseDirPath)]) { - return [[self.delegate licenseDirPath] stringByAppendingString:licenseName]; - } - if (!_licensePrefix) { - _licensePrefix = [[NSBundle mainBundle] pathForResource:LICENSE_PATH ofType:BUNDLE]; - } - return [_licensePrefix stringByAppendingString:licenseName]; -} - -- (NSString *)composerNodePath:(NSString *)nodeName { - if ([self.delegate respondsToSelector:@selector(composerNodeDirPath)]) { - return [self.delegate composerNodeDirPath]; - } - if (!_composerPrefix) { - NSBundle *budle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; - _composerPrefix = [[budle pathForResource:COMPOSER_PATH ofType:BUNDLE] stringByAppendingString:@"/ComposeMakeup/"]; - } - if ([nodeName containsString:_composerPrefix]) { - return nodeName; - } - return [_composerPrefix stringByAppendingString:nodeName]; -} - -- (NSString *)filterPath:(NSString *)filterName { - if ([self.delegate respondsToSelector:@selector(filterDirPath)]) { - return [[self.delegate filterDirPath] stringByAppendingString:filterName]; - } - if (!_filterPrefix) { - NSBundle *budle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; - _filterPrefix = [[budle pathForResource:FILTER_PATH ofType:BUNDLE] stringByAppendingFormat:@"/Filter/"]; - } - return [_filterPrefix stringByAppendingString:filterName]; -} - -- (NSString *)stickerPath:(NSString *)stickerName { - if ([self.delegate respondsToSelector:@selector(stickerDirPath)]) { - return [[self.delegate stickerDirPath] stringByAppendingString:stickerName]; - } - if (!_stickerPrefix) { - NSBundle *budle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; - _stickerPrefix = [[budle pathForResource:STICKER_PATH ofType:BUNDLE] stringByAppendingString:@"/stickers/"]; - } - return [_stickerPrefix stringByAppendingString:stickerName]; -} - -- (NSString *)modelPath:(NSString *)modelName { - return [[self modelDirPath] stringByAppendingString:modelName]; -} - -- (NSString *)composerPath { - if ([self.delegate respondsToSelector:@selector(composerDirPath)]) { - return [self.delegate composerDirPath]; - } - if (!_composerPrefix) { - NSBundle *budle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; - _composerPrefix = [[budle pathForResource:COMPOSER_PATH ofType:BUNDLE] stringByAppendingString:@"/ComposeMakeup/"]; - } - return [_composerPrefix stringByAppendingString:@"/composer"]; -} - -- (NSString *)modelDirPath { - if ([self.delegate respondsToSelector:@selector(modelDirPath)]) { - return [self.delegate modelDirPath]; - } - NSBundle *budle = [BundleUtil bundleWithBundleName:@"ByteEffectLib" podName:@"bytedEffect"]; - return [budle pathForResource:MODEL_PATH ofType:BUNDLE]; -} - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.h new file mode 100644 index 000000000..4e4f75a2c --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.h @@ -0,0 +1,31 @@ +// BETimeRecoder.h +// EffectsARSDK + + +#import + +#if TIME_LOG +#define RECORD_TIME(NAME) double _##NAME = [NSDate date].timeIntervalSince1970; +#else +#define RECORD_TIME(NAME) +#endif + +#if TIME_LOG +#define STOP_TIME(NAME) NSLog(@"TimeRecoder %s %f", #NAME, ([NSDate date].timeIntervalSince1970 - _##NAME) * 1000); +#else +#define STOP_TIME(NAME) +#endif + +@interface BETimeRecoder : NSObject + +// {zh} / @brief 寮濮嬭褰曡楁椂 {en} /@Brief start recording time +// {zh} / @param tag 鏍囩 {en} /@param tag ++ (void)record:(NSString *)tag; + +// {zh} / @brief 鍋滄璁板綍鑰楁椂 {en} /@Briefing Stop Recording Time-consuming +// {zh} / @param tag 鏍囩 {en} /@param tag ++ (void)stop:(NSString *)tag; + ++ (void)be_recordOnce:(NSString *)tag interval:(double)interval; + +@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.m new file mode 100644 index 000000000..bad367ecf --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/BETimeRecoder.m @@ -0,0 +1,47 @@ +// BETimeRecoder.m +// EffectsARSDK + + +#import "BETimeRecoder.h" + +static NSMutableDictionary *be_startTime; + +@interface BETimeRecoder () + +@end + +@implementation BETimeRecoder + ++ (void)initialize +{ + if (self == [BETimeRecoder class]) { + be_startTime = [NSMutableDictionary dictionary]; + } +} + ++ (void)record:(NSString *)tag { + [be_startTime setObject:[NSNumber numberWithDouble:[NSDate date].timeIntervalSince1970] forKey:tag]; +} + ++ (void)stop:(NSString *)tag { + NSNumber *start = [be_startTime objectForKey:tag]; + if (start == nil) { + [self be_startNotFound:tag]; + return; + } + [be_startTime removeObjectForKey:tag]; + double s = [start doubleValue]; + double e = [NSDate date].timeIntervalSince1970; + [self be_recordOnce:tag interval:e - s]; +} + +#pragma mark - private ++ (void)be_startNotFound:(NSString *)tag { + NSLog(@"call record with tag %@ first", tag); +} + ++ (void)be_recordOnce:(NSString *)tag interval:(double)interval { + NSLog(@"TimeRecoder %@ %f", tag, interval * 1000); +} + +@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h deleted file mode 100755 index 51b548b2a..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.h +++ /dev/null @@ -1,31 +0,0 @@ -// -// ByteDanceFilter.h -// FULiveDemo -// -// Created by 鍒樻磱 on 2017/8/18. -// Copyright 漏 2017骞 鍒樻磱. All rights reserved. -// - -#import -#import -#import - - -@protocol VideoFilterDelegate - -- (CVPixelBufferRef)processFrame:(CVPixelBufferRef)frame timeStamp:(double)timeStamp; - -@end - -@interface ByteDanceFilter : NSObject - -@property (nonatomic, assign) BOOL enabled; - -+ (ByteDanceFilter *)shareManager; - -- (void)setBuauty: (BOOL)isSelected; -- (void)setMakeup: (BOOL)isSelected; -- (void)setSticker: (BOOL)isSelected; -- (void)setFilter: (BOOL)isSelected; - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m deleted file mode 100755 index 328190ef3..000000000 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/ByteDanceFilter.m +++ /dev/null @@ -1,101 +0,0 @@ -// -// ByteDanceFilter.m -// FULiveDemo -// -// Created by 鍒樻磱 on 2017/8/18. -// Copyright 漏 2017骞 鍒樻磱. All rights reserved. -// - -#import "ByteDanceFilter.h" -#import "BEFrameProcessor.h" - -@interface ByteDanceFilter(){ - BEFrameProcessor *_processor; -} - -@end - -static ByteDanceFilter *shareManager = NULL; - -@implementation ByteDanceFilter - -+ (ByteDanceFilter *)shareManager -{ - __block ByteDanceFilter *shareManager; - static dispatch_once_t onceToken; - dispatch_once(&onceToken, ^{ - shareManager = [[ByteDanceFilter alloc] init]; - }); - return shareManager; -} - -- (instancetype)init -{ - if (self = [super init]) { - EAGLContext *context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; - [EAGLContext setCurrentContext:context]; - _processor = [[BEFrameProcessor alloc] initWithContext:context resourceDelegate:nil]; - _processor.processorResult = BECVPixelBuffer; - - [_processor setEffectOn:YES]; -// [_processor setFilterPath:@"Filter_32_Po10"]; -// [_processor setStickerPath:@"test_sticker"]; - [_processor updateComposerNodes:@[@"/beauty_IOS_lite"]]; - } - - return self; -} - -- (void)setBuauty: (BOOL)isSelected { -#if __has_include("bef_effect_ai_api.h") - if (isSelected) { - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0.6]; - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0.6]; - } else { - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"whiten" intensity:0]; - [_processor updateComposerNodeIntensity:@"/beauty_IOS_lite" key:@"smooth" intensity:0]; - } -#endif -} -- (void)setMakeup: (BOOL)isSelected { -#if __has_include("bef_effect_ai_api.h") - if (isSelected) { - [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0.6]; - } else { - [_processor updateComposerNodeIntensity:@"/style_makeup/tianmei" key:@"Makeup_ALL" intensity:0]; - } -#endif -} -- (void)setSticker: (BOOL)isSelected { -#if __has_include("bef_effect_ai_api.h") - if (isSelected) { - [_processor setStickerPath:@"wochaotian"]; - } else { - [_processor setStickerPath:@""]; - } -#endif -} -- (void)setFilter: (BOOL)isSelected { -#if __has_include("bef_effect_ai_api.h") - if (isSelected) { - [_processor setFilterPath:@"Filter_02_14"]; - [_processor setFilterIntensity:0.4]; - } else { - [_processor setFilterIntensity:0]; - } -#endif -} - - -#pragma mark - VideoFilterDelegate -/// process your video frame here -- (CVPixelBufferRef)processFrame:(CVPixelBufferRef)frame timeStamp:(double)timeStamp{ - if(self.enabled) { - BEProcessResult *result = [_processor process:frame timeStamp:timeStamp]; - return result.pixelBuffer; - } - return frame; -} - - -@end diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Config.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Config.h new file mode 100644 index 000000000..5d0a43ae5 --- /dev/null +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Config.h @@ -0,0 +1,15 @@ +// Config.h +// BECore + + +#ifndef Config_h +#define Config_h + +#define LICENSE_NAME ((const char *)"Agora_test_20241014_20241214_io.agora.entfull_4.5.0_2060.licbag") + +#define DEBUG_LOG false +#define TIME_LOG false +#define BEF_AUTO_TEST false +#define ENABLE_STICKER_TEST true + +#endif /* Config_h */ diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h index 069aafd07..3bc4b0031 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/Core.h @@ -1,12 +1,5 @@ #import "macro.h" - -#define LICENSE_NAME ((const char *)"agora_test_20220805_20230208_io.agora.entfull_4.2.3.licbag") -#define ONLINE_LICENSE_KEY ((const char *)"jiaoyang_test") -#define ONLINE_LICENSE_SECRET ((const char *)"04273924-9a77-11eb-94da-0c42a1b32a30") - -static NSString *OFFLIN_LICENSE_PATH = @"LicenseBag"; -static NSString *OFFLIN_BUNDLE = @"bundle"; -static LICENSE_MODE_ENUM LICENSE_MODE = ONLINE_LICENSE; +#import "Config.h" #define CHECK_RET_AND_RETURN(MSG, ret) \ if (ret != 0 && ret != -11 && ret != 1) {\ @@ -64,8 +57,3 @@ if (ret != 0 && ret != -11 && ret != 1) {\ #else #define BELog(fmt, ...) #endif - - -#define BEColorWithARGBHex(hex) [UIColor colorWithRed:((hex&0xFF0000)>>16)/255.0 green:((hex&0x00FF00)>>8)/255.0 blue:((hex&0x0000FF))/255.0 alpha:((hex&0xFF000000)>>24)/255.0] -#define BEColorWithRGBAHex(hex,alpha) [UIColor colorWithRed:((hex&0xFF0000)>>16)/255.0 green:((hex&0x00FF00)>>8)/255.0 blue:((hex&0x0000FF))/255.0 alpha:alpha] -#define BEColorWithRGBHex(hex) [UIColor colorWithRed:((hex&0xFF0000)>>16)/255.0 green:((hex&0x00FF00)>>8)/255.0 blue:((hex&0x0000FF))/255.0 alpha:1] diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/macro.h b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/macro.h index 0ab9c0368..8f02ffdce 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/macro.h +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/ByteBeautify/Manager/macro.h @@ -1,11 +1,13 @@ // writen by update_ios_core_macro.py, don't edit it manually +#define BEF_CONFUSION_TOB TRUE #define BEF_EFFECT_TOB TRUE #define BEF_HAND_TOB TRUE #define BEF_FACE_TOB TRUE #define BEF_SKELETON_TOB TRUE #define BEF_PET_FACE_TOB TRUE #define BEF_PORTRAIT_MATTING_TOB TRUE +#define BEF_SALIENCY_MATTING_TOB TRUE #define BEF_HEAD_SEG_TOB TRUE #define BEF_HAIR_PARSE_TOB TRUE #define BEF_SKY_SEG_TOB TRUE @@ -24,7 +26,21 @@ #define BEF_SKIN_SEGMENTATION_TOB TRUE #define BEF_CHROMA_KEYING_TOB TRUE #define BEF_BACH_SKELETON_TOB TRUE +#define BEF_SLAM_TOB TRUE +#define BEF_FACEFITTING_TOB TRUE +#define BEF_LICENSE_CAKE_TOB TRUE +#define BEF_AVABOOST_TOB TRUE +#define BEF_OBJECT_TRACKING_TOB TRUE +#define BEF_AVATAR_SKELETON_3D_TOB TRUE #define BEF_LENS_TOB TRUE +#define BEF_LENS_PHOTO_NIGHT_SCENE_TOB TRUE #define BEF_LENS_VIDEO_SR_TOB TRUE #define BEF_LENS_NIGHT_SCENE_TOB TRUE -#define BEF_LENS_ADAPTIVE_SHARPEN_TOB TRUE \ No newline at end of file +#define BEF_LENS_ADAPTIVE_SHARPEN_TOB TRUE +#define BEF_LENS_ONEKEY_ENHANCE_TOB TRUE +#define BEF_LENS_VIDEO_VIF_TOB TRUE +#define BEF_LENS_VIDA_TOB TRUE +#define BEF_LENS_TAINT_DETECT_TOB TRUE +#define BEF_LENS_CINE_MOVE_TOB TRUE +#define BEF_LENS_VIDEO_DEFLICKER_TOB TRUE +#define BEF_LENS_VIDEO_HDR_TOB TRUE diff --git a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard index 89ef62879..7b11c4cb7 100644 --- a/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard +++ b/iOS/APIExample/APIExample/Examples/Advanced/ThirdBeautify/FUBeautify/Base.lproj/FUBeautify.storyboard @@ -1,9 +1,9 @@ - + - + @@ -33,15 +33,14 @@