diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java index 2e05ce96b..c6806b9b6 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeauty.java @@ -6,6 +6,7 @@ import android.view.View; import android.view.ViewGroup; import android.view.ViewParent; +import android.widget.Toast; import androidx.annotation.NonNull; import androidx.annotation.Nullable; @@ -82,7 +83,15 @@ public void onViewCreated(@NonNull View view, @Nullable Bundle savedInstanceStat ByteDanceBeautySDK.INSTANCE.getRenderManager(), new EventCallback(beautyStats -> null, () -> { - ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); + boolean authSuccess = ByteDanceBeautySDK.INSTANCE.initEffect(requireContext()); + if(!authSuccess){ + runOnUIThread(new Runnable() { + @Override + public void run() { + Toast.makeText(getContext(), "auth failed", Toast.LENGTH_SHORT).show(); + } + }); + } return null; }, () -> { diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt index ea6440e91..ca3833ecc 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/ByteDanceBeautySDK.kt @@ -3,7 +3,7 @@ package io.agora.api.example.examples.advanced.beauty import android.content.Context import android.util.Log import com.effectsar.labcv.effectsdk.RenderManager -import io.agora.api.example.utils.FileUtils +import io.agora.api.example.examples.advanced.beauty.utils.FileUtils import io.agora.beautyapi.bytedance.ByteDanceBeautyAPI import java.io.File @@ -37,21 +37,20 @@ object ByteDanceBeautySDK { assetsPath = "beauty_bytedance" // copy license - licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle" - FileUtils.copyFilesFromAssets(context, "$assetsPath/LicenseBag.bundle", licensePath) - licensePath += "/$LICENSE_NAME" + licensePath = "$storagePath/beauty_bytedance/LicenseBag.bundle/$LICENSE_NAME" + FileUtils.copyAssets(context, "$assetsPath/LicenseBag.bundle/$LICENSE_NAME", licensePath) if (!File(licensePath).exists()) { return false } // copy models modelsPath = "$storagePath/beauty_bytedance/ModelResource.bundle" - FileUtils.copyFilesFromAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) + FileUtils.copyAssets(context, "$assetsPath/ModelResource.bundle", modelsPath) // copy beauty node beautyNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_Android_lite", beautyNodePath @@ -60,7 +59,7 @@ object ByteDanceBeautySDK { // copy beauty 4items node beauty4ItemsNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/beauty_4Items", beauty4ItemsNodePath @@ -69,7 +68,7 @@ object ByteDanceBeautySDK { // copy resharp node reSharpNodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/reshape_lite" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/reshape_lite", reSharpNodePath @@ -77,19 +76,19 @@ object ByteDanceBeautySDK { // copy stickers stickerPath = "$storagePath/beauty_bytedance/StickerResource.bundle/stickers" - FileUtils.copyFilesFromAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) + FileUtils.copyAssets(context, "$assetsPath/StickerResource.bundle/stickers", stickerPath) return true } // GL Thread - fun initEffect(context: Context) { + fun initEffect(context: Context) : Boolean{ val ret = renderManager.init( context, modelsPath, licensePath, false, false, 0 ) if (!checkResult("RenderManager init ", ret)) { - return + return false } renderManager.useBuiltinSensor(true) renderManager.set3Buffer(false) @@ -99,6 +98,7 @@ object ByteDanceBeautySDK { ) renderManager.loadResourceWithTimeout(-1) beautyConfig.resume() + return true } // GL Thread @@ -139,7 +139,7 @@ object ByteDanceBeautySDK { } internal fun setBeautyAPI(beautyAPI: ByteDanceBeautyAPI?) { - this.beautyAPI = beautyAPI + ByteDanceBeautySDK.beautyAPI = beautyAPI } private fun runOnBeautyThread(run: () -> Unit) { @@ -411,7 +411,7 @@ object ByteDanceBeautySDK { if (value != null) { val nodePath = "$storagePath/beauty_bytedance/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}" - FileUtils.copyFilesFromAssets( + FileUtils.copyAssets( value.context, "$assetsPath/ComposeMakeup.bundle/ComposeMakeup/style_makeup/${value.style}", nodePath diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt index 57bb76b5f..7d78db409 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/FaceUnityBeautySDK.kt @@ -31,6 +31,8 @@ object FaceUnityBeautySDK { private var beautyAPI: FaceUnityBeautyAPI? = null + private var authSuccess = false + fun initBeauty(context: Context): Boolean { val auth = try { getAuth() @@ -45,13 +47,14 @@ object FaceUnityBeautySDK { override fun onSuccess(code: Int, msg: String) { Log.i(TAG, "FURenderManager onSuccess -- code=$code, msg=$msg") if (code == OPERATE_SUCCESS_AUTH) { - faceunity.fuSetUseTexAsync(1) + authSuccess = true + faceunity.fuSetUseTexAsync(0) FUAIKit.getInstance() .loadAIProcessor(BUNDLE_AI_FACE, FUAITypeEnum.FUAITYPE_FACEPROCESSOR) - FUAIKit.getInstance().loadAIProcessor( - BUNDLE_AI_HUMAN, - FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR - ) + // FUAIKit.getInstance().loadAIProcessor( + // BUNDLE_AI_HUMAN, + // FUAITypeEnum.FUAITYPE_HUMAN_PROCESSOR + // ) } } @@ -63,9 +66,14 @@ object FaceUnityBeautySDK { return true } + fun isAuthSuccess(): Boolean { + return authSuccess + } + fun unInitBeauty() { beautyAPI = null beautyConfig.reset() + authSuccess = false FUAIKit.getInstance().releaseAllAIProcessor() FURenderKit.getInstance().release() } @@ -77,8 +85,9 @@ object FaceUnityBeautySDK { return aMethod.invoke(null) as? ByteArray } - internal fun setBeautyAPI(beautyAPI: FaceUnityBeautyAPI) { - this.beautyAPI = beautyAPI + internal fun setBeautyAPI(beautyAPI: FaceUnityBeautyAPI?) { + FaceUnityBeautySDK.beautyAPI = beautyAPI + beautyConfig.resume() } private fun runOnBeautyThread(run: () -> Unit) { @@ -312,6 +321,28 @@ object FaceUnityBeautySDK { sticker = null } + fun resume(){ + smooth = smooth + whiten = whiten + thinFace = thinFace + enlargeEye = enlargeEye + redden = redden + shrinkCheekbone = shrinkCheekbone + shrinkJawbone = shrinkJawbone + whiteTeeth = whiteTeeth + hairlineHeight = hairlineHeight + narrowNose = narrowNose + mouthSize = mouthSize + chinLength = chinLength + brightEye = brightEye + darkCircles = darkCircles + nasolabialFolds = nasolabialFolds + faceThree = faceThree + + makeUp = makeUp + sticker = sticker + } + } data class MakeUpItem( diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt index 1e8a3bb4d..42bcd0985 100644 --- a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/SenseTimeBeautySDK.kt @@ -8,7 +8,7 @@ import com.softsugar.stmobile.STMobileEffectNative import com.softsugar.stmobile.STMobileEffectParams import com.softsugar.stmobile.STMobileHumanActionNative import com.softsugar.stmobile.params.STEffectBeautyType -import io.agora.api.example.utils.FileUtils +import io.agora.api.example.examples.advanced.beauty.utils.FileUtils import io.agora.beautyapi.sensetime.SenseTimeBeautyAPI object SenseTimeBeautySDK { @@ -55,16 +55,25 @@ object SenseTimeBeautySDK { private var beautyAPI: SenseTimeBeautyAPI? = null + private var authSuccess = false + fun initBeautySDK(context: Context): Boolean { if (checkLicense(context)) { initHumanAction(context) + authSuccess = true return true } + initHumanAction(context) return false } + fun isAuthSuccess(): Boolean { + return authSuccess + } + fun unInitBeautySDK() { beautyAPI = null + authSuccess = false unInitHumanActionNative() beautyConfig.reset() } @@ -78,6 +87,7 @@ object SenseTimeBeautySDK { _mobileEffectNative?.createInstance(context, STMobileEffectNative.EFFECT_CONFIG_NONE) _mobileEffectNative?.setParam(STMobileEffectParams.EFFECT_PARAM_QUATERNION_SMOOTH_FRAME, 5f) Log.d(TAG, "SenseTime >> STMobileEffectNative create result : $result") + beautyConfig.resume() } fun unInitMobileEffect() { @@ -98,8 +108,8 @@ object SenseTimeBeautySDK { license, license.length ) - Log.d(TAG, "SenseTime >> checkLicense successfully! activeCode=$activeCode") - return true + Log.d(TAG, "SenseTime >> checkLicense activeCode=$activeCode") + return activeCode.isNotEmpty() } private fun initHumanAction(context: Context) { @@ -147,8 +157,8 @@ object SenseTimeBeautySDK { } - internal fun setBeautyAPI(beautyAPI: SenseTimeBeautyAPI){ - this.beautyAPI = beautyAPI + internal fun setBeautyAPI(beautyAPI: SenseTimeBeautyAPI?){ + SenseTimeBeautySDK.beautyAPI = beautyAPI beautyConfig.resume() } diff --git a/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt new file mode 100644 index 000000000..0da7f3323 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/api/example/examples/advanced/beauty/utils/FileUtils.kt @@ -0,0 +1,113 @@ +/* + * MIT License + * + * Copyright (c) 2023 Agora Community + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package io.agora.api.example.examples.advanced.beauty.utils + +import android.content.Context +import android.util.Log +import java.io.BufferedInputStream +import java.io.BufferedOutputStream +import java.io.BufferedReader +import java.io.File +import java.io.FileOutputStream +import java.io.IOException +import java.io.InputStream +import java.io.InputStreamReader +import java.io.OutputStream + +object FileUtils { + val TAG = "FileUtils" + + fun getAssetsString(context: Context, path: String): String { + val sb = StringBuilder() + var isr: InputStreamReader? = null + var br: BufferedReader? = null + try { + isr = InputStreamReader(context.resources.assets.open(path)) + br = BufferedReader(isr) + var line: String? = null + while (br.readLine().also { line = it } != null) { + sb.append(line).append("\n") + } + } catch (e: IOException) { + Log.e(TAG, "getAssetsString error: $e") + } finally { + if (isr != null) { + try { + isr.close() + } catch (e: IOException) { + e.printStackTrace() + } + } + if (br != null) { + try { + br.close() + } catch (e: IOException) { + e.printStackTrace() + } + } + } + return sb.toString() + } + + fun copyAssets(context: Context, assetsPath: String, targetPath: String) { + val fileNames = context.resources.assets.list(assetsPath) + if (fileNames?.isNotEmpty() == true) { + val targetFile = File(targetPath) + if (!targetFile.exists() && !targetFile.mkdirs()) { + return + } + for (fileName in fileNames) { + copyAssets( + context, + "$assetsPath/$fileName", + "$targetPath/$fileName" + ) + } + } else { + copyAssetsFile(context, assetsPath, targetPath) + } + } + + private fun copyAssetsFile(context: Context, assetsFile: String, targetPath: String) { + val dest = File(targetPath) + dest.parentFile?.mkdirs() + var input: InputStream? = null + var output: OutputStream? = null + try { + input = BufferedInputStream(context.assets.open(assetsFile)) + output = BufferedOutputStream(FileOutputStream(dest)) + val buffer = ByteArray(1024) + var length = 0 + while (input.read(buffer).also { length = it } != -1) { + output.write(buffer, 0, length) + } + } catch (e: Exception) { + Log.e(TAG, "copyAssetsFile", e) + } finally { + output?.close() + input?.close() + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt index b3022ab34..aff0a8971 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPI.kt @@ -31,7 +31,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 使用声网内部的祼数据接口进行处理 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt index e5b35bcc2..14bf60a10 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/ByteDanceBeautyAPIImpl.kt @@ -36,6 +36,8 @@ import io.agora.base.VideoFrame.I420Buffer import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.RendererCommon import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.bytedance.utils.APIReporter +import io.agora.beautyapi.bytedance.utils.APIType import io.agora.beautyapi.bytedance.utils.AgoraImageHelper import io.agora.beautyapi.bytedance.utils.ImageUtil import io.agora.beautyapi.bytedance.utils.LogUtils @@ -51,8 +53,6 @@ import java.util.concurrent.Executors class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private val TAG = "ByteDanceBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420 @@ -75,6 +75,9 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) private var frameWidth = 0 private var frameHeight = 0 + private val apiReporter by lazy { + APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) + } private enum class BeautyProcessType{ UNKNOWN, TEXTURE_OES, TEXTURE_2D, I420 @@ -95,7 +98,17 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${RenderManager.getSDKVersion()}") - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "$config", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.captureMode, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -114,7 +127,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } this.enable = enable - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "$enable", 0) + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -125,7 +142,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_HAS_NOT_INITIALIZED.value } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVideo", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if (view is TextureView || view is SurfaceView) { val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -180,7 +201,15 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset, beautyNodePath=$beautyNodePath, beauty4ItemNodePath=$beauty4ItemNodePath, reSharpNodePath=$reSharpNodePath", 0) + apiReporter.reportFuncEvent( + "setBeautyPreset", + mapOf( + "preset" to preset, + "beautyNodePath" to beautyNodePath, + "beauty4ItemNodePath" to beauty4ItemNodePath, + "reSharpNodePath" to reSharpNodePath + ), + emptyMap()) runOnProcessThread { val renderManager = @@ -247,6 +276,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { } override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", mapOf("key" to key, "value" to value), emptyMap()) when (key) { "beauty_mode" -> beautyMode = value.toInt() } @@ -273,7 +303,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -293,8 +327,9 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) LogUtils.i(TAG, "release") + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) isReleased = true workerThreadExecutor.shutdown() textureBufferHelper?.let { @@ -364,7 +399,11 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED ) } - skipFrame = 2 + textureBufferHelper?.invoke { + skipFrame = 2 + imageUtils?.release() + } + apiReporter.startDurationEvent("first_beauty_frame") return false } @@ -432,6 +471,8 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return false } + apiReporter.endDurationEvent("first_beauty_frame", emptyMap()) + val processBuffer: TextureBuffer = textureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, @@ -520,7 +561,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { videoFrame.timestampNs ) if (!success) { - return@Callable -1 + return@Callable srcTexture } return@Callable dstTexture }) @@ -583,7 +624,7 @@ class ByteDanceBeautyAPIImpl : ByteDanceBeautyAPI, IVideoFrameObserver { return@Callable if (success) { dstTexture } else { - -1 + srcTexture } }) } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt new file mode 100644 index 000000000..156978e8c --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.bytedance.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K歌 + CALL(2), // 呼叫连麦 + BEAUTY(3), // 美颜 + VIDEO_LOADER(4), // 秒开秒切 + PK(5), // 团战 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 屏幕共享 + AUDIO_SCENARIO(8) // 音频 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //频道使用耗时 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //首帧实际耗时 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //首帧感官耗时 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 上报普通场景化API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 上报耗时打点信息 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 上报自定义信息 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //测试环境使用 + // 数据上报 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 日志写入 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt index f63069580..813e16ee8 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/bytedance/utils/AgoraImageHelper.kt @@ -56,9 +56,9 @@ class AgoraImageHelper { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBuffer.frameBufferId) if(texType == VideoFrame.TextureBuffer.Type.OES){ - drawer.drawOes(texId, 0, transform, width, height, 0, 0, width, height, 0,0) + drawer.drawOes(texId,0, transform, width, height, 0, 0, width, height,0) }else{ - drawer.drawRgb(texId, 0, transform, width, height, 0, 0, width, height, 0,0) + drawer.drawRgb(texId,0, transform, width, height, 0, 0, width, height,0) } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0) GLES20.glFinish() diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt index 1058ea229..4da67d5a5 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPI.kt @@ -31,7 +31,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 使用声网内部的祼数据接口进行处理 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt index dd51417aa..710b392c3 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/FaceUnityBeautyAPIImpl.kt @@ -47,6 +47,8 @@ import io.agora.base.VideoFrame.SourceType import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.EglBase import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.faceunity.utils.APIReporter +import io.agora.beautyapi.faceunity.utils.APIType import io.agora.beautyapi.faceunity.utils.FuDeviceUtils import io.agora.beautyapi.faceunity.utils.LogUtils import io.agora.beautyapi.faceunity.utils.StatsHelper @@ -63,13 +65,10 @@ import java.util.concurrent.Callable class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private val TAG = "FaceUnityBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420,3: 单纹理模式 - private var enableTextureAsync = true // 是否开启纹理+异步缓存处理。对于GPU性能好的手机可以减小美颜处理耗时,对于中端机开启后效果也不明显。 + private var enableTextureAsync = false // 是否开启纹理+异步缓存处理。对于GPU性能好的手机可以减小美颜处理耗时,对于中端机开启后效果也不明显。 - private var textureBufferHelper: TextureBufferHelper? = null - private var wrapTextureBufferHelper: TextureBufferHelper? = null + private var beautyTextureBufferHelper: TextureBufferHelper? = null private var byteBuffer: ByteBuffer? = null private var byteArray: ByteArray? = null private var config: Config? = null @@ -79,7 +78,6 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var captureMirror = false private var renderMirror = false private val identityMatrix = Matrix() - private var mTextureProcessHelper: TextureProcessHelper? = null private var statsHelper: StatsHelper? = null private var skipFrame = 0 private enum class ProcessSourceType{ @@ -97,6 +95,13 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) private val transformGLFrameBuffer = GLFrameBuffer() + private val outGLFrameBuffer = GLFrameBuffer() + private val apiReporter by lazy { + APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) + } + + private var asyncTextureProcessHelper: TextureProcessHelper? = null + private var asyncTextureBufferHelper: TextureBufferHelper? = null override fun initialize(config: Config): Int { if (this.config != null) { @@ -123,7 +128,17 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } LogUtils.i(TAG, "initialize >> FuDeviceUtils deviceLevel=$deviceLevel") - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config, deviceLevel=$deviceLevel", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.cameraConfig, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -141,8 +156,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { skipFrame = 2 LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0) - + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) if(this.enable != enable){ this.enable = enable enableChange = true @@ -159,7 +177,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") localVideoRenderMode = renderMode - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVideo", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if (view is TextureView || view is SurfaceView) { val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -193,8 +215,11 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) - + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -207,10 +232,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { LogUtils.e(TAG, "runOnProcessThread >> The beauty api has been released!") return } - if (textureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { + if (beautyTextureBufferHelper?.handler?.looper?.thread == Thread.currentThread()) { run.invoke() - } else if (textureBufferHelper != null) { - textureBufferHelper?.handler?.post(run) + } else if (beautyTextureBufferHelper != null) { + beautyTextureBufferHelper?.handler?.post(run) } else { pendingProcessRunList.add(run) } @@ -219,6 +244,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { override fun isFrontCamera() = isFrontCamera override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", + mapOf("key" to key, "value" to value), + emptyMap() + ) when(key){ "beauty_mode" -> beautyMode = value.toInt() "enableTextureAsync" -> enableTextureAsync = value.toBoolean() @@ -237,8 +266,10 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0) - + apiReporter.reportFuncEvent("setBeautyPreset", + mapOf("preset" to preset), + emptyMap() + ) val recommendFaceBeauty = FaceBeauty(FUBundleData("graphics" + File.separator + "face_beautification.bundle")) if (preset == BeautyPreset.DEFAULT) { recommendFaceBeauty.filterName = FaceBeautyFilterEnum.FENNEN_1 @@ -304,24 +335,27 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) isReleased = true - textureBufferHelper?.let { - textureBufferHelper = null + beautyTextureBufferHelper?.let { + beautyTextureBufferHelper = null it.handler.removeCallbacksAndMessages(null) it.invoke { fuRenderer.release() - mTextureProcessHelper?.release() - mTextureProcessHelper = null transformGLFrameBuffer.release() + outGLFrameBuffer.release() null } - // it.handler.looper.quit() it.dispose() } - wrapTextureBufferHelper?.let { - wrapTextureBufferHelper = null + asyncTextureBufferHelper?.let { + asyncTextureBufferHelper = null + it.invoke { + asyncTextureProcessHelper?.release() + asyncTextureProcessHelper = null + } it.dispose() } statsHelper?.reset() @@ -378,10 +412,13 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if(renderMirror) Constants.VIDEO_MIRROR_MODE_ENABLED else Constants.VIDEO_MIRROR_MODE_DISABLED ) } - textureBufferHelper?.invoke { - mTextureProcessHelper?.reset() + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() + } + beautyTextureBufferHelper?.invoke { + skipFrame = 2 + outGLFrameBuffer.resetTexture() } - skipFrame = 2 return false } @@ -394,8 +431,8 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { if(enableChange){ enableChange = false - textureBufferHelper?.invoke { - mTextureProcessHelper?.reset() + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() } return false } @@ -404,12 +441,12 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return true } - if (textureBufferHelper == null) { - textureBufferHelper = TextureBufferHelper.create( + if (beautyTextureBufferHelper == null) { + beautyTextureBufferHelper = TextureBufferHelper.create( "FURender", EglBaseProvider.instance().rootEglBase.eglBaseContext ) - textureBufferHelper?.invoke { + beautyTextureBufferHelper?.invoke { synchronized(pendingProcessRunList){ val iterator = pendingProcessRunList.iterator() while (iterator.hasNext()){ @@ -419,13 +456,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } LogUtils.i(TAG, "processBeauty >> create texture buffer, beautyMode=$beautyMode") - } - if (wrapTextureBufferHelper == null) { - wrapTextureBufferHelper = TextureBufferHelper.create( - "FURenderWrap", - EglBaseProvider.instance().rootEglBase.eglBaseContext - ) - LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode") + apiReporter.startDurationEvent("first_beauty_frame") } val startTime = System.currentTimeMillis() val processTexId = when (beautyMode) { @@ -456,7 +487,16 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { return false } - val processBuffer: TextureBuffer = wrapTextureBufferHelper?.wrapTextureBuffer( + apiReporter.endDurationEvent("first_beauty_frame", + mapOf( + "width" to videoFrame.rotatedWidth, + "height" to videoFrame.rotatedHeight, + "camera_facing" to videoFrame.sourceType.name, + "buffer_type" to videoFrame.buffer::class.java.simpleName, + ) + ) + + val processBuffer: TextureBuffer = beautyTextureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, TextureBuffer.Type.RGB, @@ -481,16 +521,21 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } private fun processBeautySingleTextureAsync(videoFrame: VideoFrame): Int { - val texBufferHelper = wrapTextureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1 + if (asyncTextureBufferHelper == null) { + asyncTextureBufferHelper = TextureBufferHelper.create( + "FURenderAsync", + EglBaseProvider.instance().rootEglBase.eglBaseContext + ) + LogUtils.i(TAG, "processBeauty >> create texture buffer wrap, beautyMode=$beautyMode") + } + when(textureBuffer.type){ TextureBuffer.Type.OES -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_OES_ASYNC){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES_ASYNC}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_OES_ASYNC return -1 } @@ -498,19 +543,15 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { else -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_2D_ASYNC){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D_ASYNC}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_2D_ASYNC - skipFrame = 6 return -1 } } } - if(mTextureProcessHelper == null) { - mTextureProcessHelper = TextureProcessHelper() - mTextureProcessHelper?.setFilter { frame -> + if(asyncTextureProcessHelper == null) { + asyncTextureProcessHelper = TextureProcessHelper() + asyncTextureProcessHelper?.setFilter { frame -> val fuRenderKit = config?.fuRenderKit ?: return@setFilter -1 val input = FURenderInputData(frame.width, frame.height) @@ -522,22 +563,22 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { input.renderConfig.let { if (isFront) { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } } if (isReleased) { return@setFilter -1 } - val ret = textureBufferHelper?.invoke { + val ret = texBufferHelper.invoke { synchronized(EglBase.lock){ return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } @@ -546,12 +587,12 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } - return texBufferHelper.invoke { + return asyncTextureBufferHelper?.invoke { if(isReleased){ return@invoke -1 } - return@invoke mTextureProcessHelper?.process( + return@invoke asyncTextureProcessHelper?.process( textureBuffer.textureId, when (textureBuffer.type) { TextureBuffer.Type.OES -> GLES11Ext.GL_TEXTURE_EXTERNAL_OES @@ -564,20 +605,17 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { isFrontCamera, (isFrontCamera && !captureMirror) || (!isFrontCamera && captureMirror) )?: -1 - } + } ?: -1 } private fun processBeautySingleTexture(videoFrame: VideoFrame): Int { - val texBufferHelper = textureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 val textureBuffer = videoFrame.buffer as? TextureBuffer ?: return -1 when(textureBuffer.type){ TextureBuffer.Type.OES -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_OES){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_OES}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_OES return -1 } @@ -585,11 +623,7 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { else -> { if(currProcessSourceType != ProcessSourceType.TEXTURE_2D){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.TEXTURE_2D}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.TEXTURE_2D - skipFrame = 6 return -1 } } @@ -627,34 +661,35 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { input.renderConfig.let { if (isFront) { it.cameraFacing = CameraFacingEnum.CAMERA_FRONT - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } else { it.cameraFacing = CameraFacingEnum.CAMERA_BACK - it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0 - it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0 - it.outputMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL - it.deviceOrientation = 270 + it.inputBufferMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.inputTextureMatrix = FUTransformMatrixEnum.CCROT0_FLIPVERTICAL + it.outputMatrix = FUTransformMatrixEnum.CCROT0 + it.deviceOrientation = 90 } } if (isReleased) { return@invoke -1 } + var fuTexId = -1 synchronized(EglBase.lock){ - return@invoke fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + fuTexId = fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } + outGLFrameBuffer.setSize(videoFrame.rotatedWidth, videoFrame.rotatedHeight) + outGLFrameBuffer.resetTransform() + return@invoke outGLFrameBuffer.process(fuTexId, GLES20.GL_TEXTURE_2D) } } private fun processBeautySingleBuffer(videoFrame: VideoFrame): Int { - val texBufferHelper = textureBufferHelper ?: return -1 + val texBufferHelper = beautyTextureBufferHelper ?: return -1 if(currProcessSourceType != ProcessSourceType.I420){ LogUtils.i(TAG, "processBeauty >> process source type change old=$currProcessSourceType, new=${ProcessSourceType.I420}") - if (currProcessSourceType != ProcessSourceType.UNKNOWN) { - skipFrame = 3 - } currProcessSourceType = ProcessSourceType.I420 return -1 } @@ -748,15 +783,20 @@ class FaceUnityBeautyAPIImpl : FaceUnityBeautyAPI, IVideoFrameObserver { } } - mTextureProcessHelper?.let { - if(it.size() > 0){ - it.reset() - return@Callable -1 + if ((asyncTextureProcessHelper?.size() ?: 0) > 0) { + asyncTextureBufferHelper?.invoke { + asyncTextureProcessHelper?.reset() } + return@Callable -1 } - synchronized(EglBase.lock){ - return@Callable fuRenderKit.renderWithInput(input).texture?.texId ?: -1 + + var fuTexId = -1 + synchronized(EglBase.lock) { + fuTexId = fuRenderKit.renderWithInput(input).texture?.texId ?: -1 } + outGLFrameBuffer.setSize(videoFrame.rotatedWidth, videoFrame.rotatedHeight) + outGLFrameBuffer.resetTransform() + return@Callable outGLFrameBuffer.process(fuTexId, GLES20.GL_TEXTURE_2D); }) } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt new file mode 100644 index 000000000..6df300520 --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.faceunity.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K歌 + CALL(2), // 呼叫连麦 + BEAUTY(3), // 美颜 + VIDEO_LOADER(4), // 秒开秒切 + PK(5), // 团战 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 屏幕共享 + AUDIO_SCENARIO(8) // 音频 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //频道使用耗时 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //首帧实际耗时 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //首帧感官耗时 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 上报普通场景化API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 上报耗时打点信息 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 上报自定义信息 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //测试环境使用 + // 数据上报 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 日志写入 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java index aa345adae..5815b4e78 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/GLFrameBuffer.java @@ -115,9 +115,9 @@ public int process(int textureId, int textureType) { synchronized (EglBase.lock){ if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ - drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0,0); + drawer.drawOes(textureId,0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); }else{ - drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0,0); + drawer.drawRgb(textureId,0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); } } @@ -201,4 +201,7 @@ private void bindFramebuffer(int textureId) { GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_NONE); } + public void resetTexture() { + deleteTexture(); + } } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt index 1451750b4..439a185cc 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/faceunity/utils/egl/TextureProcessHelper.kt @@ -173,7 +173,7 @@ class TextureProcessHelper( future = futureQueue.poll() } glTextureBufferQueueIn.reset() -// glFrameBuffer.release() + glFrameBuffer.resetTexture() executeSync { glTextureBufferQueueOut.reset() } diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt index 0ecec0df9..2606617ff 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPI.kt @@ -32,7 +32,7 @@ import io.agora.base.VideoFrame import io.agora.rtc2.Constants import io.agora.rtc2.RtcEngine -const val VERSION = "1.0.6" +const val VERSION = "1.0.7" enum class CaptureMode{ Agora, // 使用声网内部的祼数据接口进行处理 diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt index ec346087a..efcdf6757 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/SenseTimeBeautyAPIImpl.kt @@ -40,7 +40,10 @@ import io.agora.base.VideoFrame.I420Buffer import io.agora.base.VideoFrame.SourceType import io.agora.base.VideoFrame.TextureBuffer import io.agora.base.internal.video.RendererCommon +import io.agora.base.internal.video.YuvConverter import io.agora.base.internal.video.YuvHelper +import io.agora.beautyapi.sensetime.utils.APIReporter +import io.agora.beautyapi.sensetime.utils.APIType import io.agora.beautyapi.sensetime.utils.LogUtils import io.agora.beautyapi.sensetime.utils.StatsHelper import io.agora.beautyapi.sensetime.utils.processor.IBeautyProcessor @@ -57,8 +60,6 @@ import java.util.concurrent.Executors class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private val TAG = "SenseTimeBeautyAPIImpl" - private val reportId = "scenarioAPI" - private val reportCategory = "beauty_android_$VERSION" private var beautyMode = 0 // 0: 自动根据buffer类型切换,1:固定使用OES纹理,2:固定使用i420 private var textureBufferHelper: TextureBufferHelper? = null @@ -77,6 +78,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private var cameraConfig = CameraConfig() private var localVideoRenderMode = Constants.RENDER_MODE_HIDDEN private val pendingProcessRunList = Collections.synchronizedList(mutableListOf<()->Unit>()) + private val apiReporter by lazy { APIReporter(APIType.BEAUTY, VERSION, config!!.rtcEngine) } private enum class ProcessSourceType{ UNKNOWN, @@ -104,8 +106,17 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { cameraConfig = CameraConfig(config.cameraConfig.frontMirror, config.cameraConfig.backMirror) LogUtils.i(TAG, "initialize >> config = $config") LogUtils.i(TAG, "initialize >> beauty api version=$VERSION, beauty sdk version=${STCommonNative.getVersion()}") - // config.rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\":101}") // 实时上报 - config.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "initialize", "config=$config", 0) + apiReporter.reportFuncEvent( + "initialize", + mapOf( + "captureMode" to config.captureMode, + "statsDuration" to config.statsDuration, + "statsEnable" to config.statsEnable, + "cameraConfig" to config.cameraConfig, + ), + emptyMap() + ) + apiReporter.startDurationEvent("initialize-release") return ErrorCode.ERROR_OK.value } @@ -123,7 +134,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { skipFrame = 2 LogUtils.i(TAG, "enable >> skipFrame = $skipFrame") } - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "enable=$enable", 0) + apiReporter.reportFuncEvent( + "enable", + mapOf("enable" to enable), + emptyMap() + ) if(this.enable != enable){ this.enable = enable @@ -142,7 +157,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { } LogUtils.i(TAG, "setupLocalVideo >> view=$view, renderMode=$renderMode") localVideoRenderMode = renderMode - rtcEngine.sendCustomReportMessage(reportId, reportCategory, "enable", "view=$view, renderMode=$renderMode", 0) + apiReporter.reportFuncEvent( + "setupLocalVide", + mapOf("view" to view, "renderMode" to renderMode), + emptyMap() + ) if(view is TextureView || view is SurfaceView){ val canvas = VideoCanvas(view, renderMode, 0) canvas.mirrorMode = Constants.VIDEO_MIRROR_MODE_DISABLED @@ -184,7 +203,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return ErrorCode.ERROR_HAS_RELEASED.value } LogUtils.i(TAG, "setBeautyPreset >> preset = $preset") - config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "enable", "preset=$preset", 0) + apiReporter.reportFuncEvent( + "setBeautyPreset", + mapOf("preset" to preset), + emptyMap() + ) val enable = preset == BeautyPreset.DEFAULT workerThreadExecutor.submit { @@ -316,7 +339,11 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { override fun updateCameraConfig(config: CameraConfig): Int { LogUtils.i(TAG, "updateCameraConfig >> oldCameraConfig=$cameraConfig, newCameraConfig=$config") cameraConfig = CameraConfig(config.frontMirror, config.backMirror) - this.config?.rtcEngine?.sendCustomReportMessage(reportId, reportCategory, "updateCameraConfig", "config=$config", 0) + apiReporter.reportFuncEvent( + "updateCameraConfig", + mapOf("config" to config), + emptyMap() + ) return ErrorCode.ERROR_OK.value } @@ -324,6 +351,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { override fun isFrontCamera() = isFrontCamera override fun setParameters(key: String, value: String) { + apiReporter.reportFuncEvent("setParameters", mapOf("key" to key, "value" to value), emptyMap()) when(key){ "beauty_mode" -> beautyMode = value.toInt() } @@ -342,7 +370,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { if (conf.captureMode == CaptureMode.Agora) { conf.rtcEngine.registerVideoFrameObserver(null) } - conf.rtcEngine.sendCustomReportMessage(reportId, reportCategory, "release", "", 0) + apiReporter.reportFuncEvent("release", emptyMap(), emptyMap()) + apiReporter.endDurationEvent("initialize-release", emptyMap()) LogUtils.i(TAG, "release") isReleased = true @@ -414,6 +443,7 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { textureBufferHelper?.invoke { beautyProcessor?.reset() } + apiReporter.startDurationEvent("first_beauty_frame") return false } @@ -475,6 +505,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { return false } + apiReporter.endDurationEvent("first_beauty_frame", emptyMap()) + val processBuffer: TextureBuffer = textureBufferHelper?.wrapTextureBuffer( videoFrame.rotatedWidth, videoFrame.rotatedHeight, @@ -632,6 +664,8 @@ class SenseTimeBeautyAPIImpl : SenseTimeBeautyAPI, IVideoFrameObserver { private fun getNV21Buffer(videoFrame: VideoFrame) : ByteArray? { val buffer = videoFrame.buffer + YuvConverter.setEnablePboOpt(true) + YuvConverter.setEnableConvertPerLog(true) val i420Buffer = buffer as? I420Buffer ?: buffer.toI420() val width = i420Buffer.width val height = i420Buffer.height diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt new file mode 100644 index 000000000..bbef8261c --- /dev/null +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/APIReporter.kt @@ -0,0 +1,139 @@ +package io.agora.beautyapi.sensetime.utils + +import android.util.Log +import io.agora.rtc2.Constants +import io.agora.rtc2.RtcEngine +import org.json.JSONObject + +enum class APIType(val value: Int) { + KTV(1), // K歌 + CALL(2), // 呼叫连麦 + BEAUTY(3), // 美颜 + VIDEO_LOADER(4), // 秒开秒切 + PK(5), // 团战 + VIRTUAL_SPACE(6), // + SCREEN_SPACE(7), // 屏幕共享 + AUDIO_SCENARIO(8) // 音频 +} + +enum class ApiEventType(val value: Int) { + API(0), + COST(1), + CUSTOM(2) +} + +object ApiEventKey { + const val TYPE = "type" + const val DESC = "desc" + const val API_VALUE = "apiValue" + const val TIMESTAMP = "ts" + const val EXT = "ext" +} + +object ApiCostEvent { + const val CHANNEL_USAGE = "channelUsage" //频道使用耗时 + const val FIRST_FRAME_ACTUAL = "firstFrameActual" //首帧实际耗时 + const val FIRST_FRAME_PERCEIVED = "firstFramePerceived" //首帧感官耗时 +} + +class APIReporter( + private val type: APIType, + private val version: String, + private val rtcEngine: RtcEngine +) { + private val tag = "APIReporter" + private val messageId = "agora:scenarioAPI" + private val durationEventStartMap = HashMap() + private val category = "${type.value}_Android_$version" + + init { + configParameters() + } + + // 上报普通场景化API + fun reportFuncEvent(name: String, value: Map, ext: Map) { + Log.d(tag, "reportFuncEvent: $name value: $value ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.API.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.API_VALUE to value, ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun startDurationEvent(name: String) { + Log.d(tag, "startDurationEvent: $name") + durationEventStartMap[name] = getCurrentTs() + } + + fun endDurationEvent(name: String, ext: Map) { + Log.d(tag, "endDurationEvent: $name") + val beginTs = durationEventStartMap[name] ?: return + durationEventStartMap.remove(name) + val ts = getCurrentTs() + val cost = (ts - beginTs).toInt() + + innerReportCostEvent(ts, name, cost, ext) + } + + // 上报耗时打点信息 + fun reportCostEvent(name: String, cost: Int, ext: Map) { + durationEventStartMap.remove(name) + innerReportCostEvent( + ts = getCurrentTs(), + name = name, + cost = cost, + ext = ext + ) + } + + // 上报自定义信息 + fun reportCustomEvent(name: String, ext: Map) { + Log.d(tag, "reportCustomEvent: $name ext: $ext") + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.CUSTOM.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to getCurrentTs(), ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, 0) + } + + fun writeLog(content: String, level: Int) { + rtcEngine.writeLog(level, content) + } + + fun cleanCache() { + durationEventStartMap.clear() + } + + // ---------------------- private ---------------------- + + private fun configParameters() { + //rtcEngine.setParameters("{\"rtc.qos_for_test_purpose\": true}") //测试环境使用 + // 数据上报 + rtcEngine.setParameters("{\"rtc.direct_send_custom_event\": true}") + // 日志写入 + rtcEngine.setParameters("{\"rtc.log_external_input\": true}") + } + + private fun getCurrentTs(): Long { + return System.currentTimeMillis() + } + + private fun innerReportCostEvent(ts: Long, name: String, cost: Int, ext: Map) { + Log.d(tag, "reportCostEvent: $name cost: $cost ms ext: $ext") + writeLog("reportCostEvent: $name cost: $cost ms", Constants.LOG_LEVEL_INFO) + val eventMap = mapOf(ApiEventKey.TYPE to ApiEventType.COST.value, ApiEventKey.DESC to name) + val labelMap = mapOf(ApiEventKey.TIMESTAMP to ts, ApiEventKey.EXT to ext) + val event = convertToJSONString(eventMap) ?: "" + val label = convertToJSONString(labelMap) ?: "" + rtcEngine.sendCustomReportMessage(messageId, category, event, label, cost) + } + + private fun convertToJSONString(dictionary: Map): String? { + return try { + JSONObject(dictionary).toString() + } catch (e: Exception) { + writeLog("[$tag]convert to json fail: $e dictionary: $dictionary", Constants.LOG_LEVEL_WARNING) + null + } + } +} \ No newline at end of file diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java index b9853ec1f..d2f245c83 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/egl/GLFrameBuffer.java @@ -113,9 +113,9 @@ public int process(int textureId, int textureType) { float[] matrix = RendererCommon.convertMatrixFromAndroidGraphicsMatrix(transform); if(textureType == GLES11Ext.GL_TEXTURE_EXTERNAL_OES){ - drawer.drawOes(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0,0); + drawer.drawOes(textureId, 0,matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); }else{ - drawer.drawRgb(textureId, 0, matrix, mWidth, mHeight, 0, 0, mWidth, mHeight, 0,0); + drawer.drawRgb(textureId, 0,matrix, mWidth, mHeight, 0, 0, mWidth, mHeight,0); } GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); GLES20.glFinish(); diff --git a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt index 164406442..36bca0b0f 100644 --- a/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt +++ b/Android/APIExample/app/src/main/java/io/agora/beautyapi/sensetime/utils/processor/BeautyProcessor.kt @@ -443,13 +443,12 @@ class BeautyProcessor : IBeautyProcessor { if (isReleased) { return -1 } - mSTMobileEffectNative.render( + val ret = mSTMobileEffectNative.render( sTEffectRenderInParam, stEffectRenderOutParam, false ) - if (event == mCustomEvent) { mCustomEvent = 0 } @@ -457,12 +456,18 @@ class BeautyProcessor : IBeautyProcessor { if (isReleased) { return -1 } + + var finalTextId = stEffectRenderOutParam.texture?.id ?: 0 + if(ret < 0){ + finalTextId = textureId + } + glFrameBuffer.setSize(width, height) glFrameBuffer.resetTransform() glFrameBuffer.setFlipV(true) glFrameBuffer.textureId = finalOutTextureId glFrameBuffer.process( - stEffectRenderOutParam.texture?.id ?: 0, + finalTextId, GLES20.GL_TEXTURE_2D ) GLES20.glFinish()