diff --git a/example/android/build.gradle b/example/android/build.gradle index 83ae220..3cdaac9 100644 --- a/example/android/build.gradle +++ b/example/android/build.gradle @@ -26,6 +26,6 @@ subprojects { project.evaluationDependsOn(':app') } -task clean(type: Delete) { +tasks.register("clean", Delete) { delete rootProject.buildDir } diff --git a/example/lib/animated_example_controller.dart b/example/lib/animated_example_controller.dart index 1d24d25..a6287b6 100644 --- a/example/lib/animated_example_controller.dart +++ b/example/lib/animated_example_controller.dart @@ -37,7 +37,7 @@ class ExampleAnimationController extends ChangeNotifier { VideoPlayerController? videoController; if (!Platform.isMacOS) { videoController = VideoPlayerController.network( - 'https://www.fluttercampus.com/video.mp4', + 'https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4', // 1 min: https://storage.googleapis.com/gtv-videos-bucket/sample/ForBiggerFun.mp4 // 4 sec: 'https://www.fluttercampus.com/video.mp4' ); diff --git a/example/lib/main.dart b/example/lib/main.dart index 469273b..cee9492 100644 --- a/example/lib/main.dart +++ b/example/lib/main.dart @@ -76,7 +76,6 @@ class _MyHomePageState extends State settings: const MotionSettings( pixelRatio: 5, frameRate: 30, - simultaneousCaptureHandlers: 6, ), logInConsole: true, format: Mp4Format(audio: [ @@ -131,6 +130,7 @@ class _MyHomePageState extends State Future displayResult(RenderResult result, [bool saveToGallery = false]) async { + print("file path: ${result.output.path}"); print("file exits: ${await result.output.exists()}"); if (mounted) { showDialog( diff --git a/lib/src/capturer.dart b/lib/src/capturer.dart index a199af0..39055ec 100644 --- a/lib/src/capturer.dart +++ b/lib/src/capturer.dart @@ -1,8 +1,9 @@ import 'dart:async'; +import 'dart:io'; import 'dart:typed_data'; import 'dart:ui' as ui; -import 'package:ffmpeg_kit_flutter_https_gpl/ffmpeg_kit.dart'; +import 'package:ffmpeg_kit_flutter_https_gpl/ffmpeg_kit_config.dart'; import 'package:flutter/material.dart'; import 'package:flutter/rendering.dart'; import 'package:render/src/service/notifier.dart'; @@ -23,11 +24,6 @@ class RenderCapturer { RenderCapturer(this.session, [this.context]); - int _activeHandlers = 0; - - /// Captures that are yet to be handled. Handled images will be disposed. - final List _unhandledCaptures = []; - /// Current image handling process. Handlers are being handles asynchronous /// as conversion and file writing is involved. final List> _handlers = []; @@ -47,6 +43,9 @@ class RenderCapturer { /// will be seen as first frame. Size? firstFrameSize; + /// The writer to write the captured images to. + IOSink? _frameWriter; + /// Runs a capturing process for a defined time. Returns capturing time duration. Future> run(Duration duration) async { start(duration); @@ -60,11 +59,7 @@ class RenderCapturer { Future> single() async { startTime = DateTime.now(); _captureFrame(0, 1); - await Future.doWhile(() async { - //await all active capture handlers - await Future.wait(_handlers); - return _handlers.length < _unhandledCaptures.length; - }); + await Future.wait(_handlers); final capturingDuration = Duration( milliseconds: DateTime.now().millisecondsSinceEpoch - startTime!.millisecondsSinceEpoch); @@ -76,6 +71,7 @@ class RenderCapturer { assert(!_rendering, "Cannot start new process, during an active one."); _rendering = true; startTime = DateTime.now(); + session.binding.addPostFrameCallback((binderTimeStamp) { startingDuration = session.binding.currentFrameTimeStamp; _postFrameCallback( @@ -95,15 +91,10 @@ class RenderCapturer { _rendering = false; startingDuration = null; // * wait for handlers - await Future.doWhile(() async { - //await all active capture handlers - await Future.wait(_handlers); - return _handlers.length < _unhandledCaptures.length; - }); + await Future.wait(_handlers); // * finish capturing, notify session - final frameAmount = _unhandledCaptures.length; + final frameAmount = _handlers.length; _handlers.clear(); - _unhandledCaptures.clear(); return session.upgrade(capturingDuration, frameAmount); } @@ -149,41 +140,23 @@ class RenderCapturer { /// Converting the raw image data to a png file and writing the capture. Future _handleCapture( + ui.Image capture, int captureNumber, [ int? totalFrameTarget, ]) async { - _activeHandlers++; try { - final ui.Image capture = _unhandledCaptures.elementAt(captureNumber); // * retrieve bytes // toByteData(format: ui.ImageByteFormat.png) takes way longer than raw // and then converting to png with ffmpeg final ByteData? byteData = await capture.toByteData(format: ui.ImageByteFormat.rawRgba); final rawIntList = byteData!.buffer.asInt8List(); - // * write raw file for processing - final rawFile = session - .createProcessFile("frameHandling/frame_raw$captureNumber.bmp"); - await rawFile.writeAsBytes(rawIntList); - // * write & convert file (to save storage) - final file = session.createInputFile("frame$captureNumber.png"); - final saveSize = Size( - // adjust frame size, so that it can be divided by 2 - (capture.width / 2).ceil() * 2, - (capture.height / 2).ceil() * 2, - ); - await FFmpegKit.executeWithArguments([ - "-y", - "-f", "rawvideo", // specify input format - "-pixel_format", "rgba", // maintain transparency - "-video_size", "${capture.width}x${capture.height}", // set capture size - "-i", rawFile.path, // input the raw frame - "-vf", "scale=${saveSize.width}:${saveSize.height}", // scale to save - file.path, //out put png - ]); + + // * write image to pipe + _writeToPipe(rawIntList); + // * finish capture.dispose(); - rawFile.deleteSync(); if (!_rendering) { //only record next state, when rendering is done not to mix up notification _recordActivity(RenderState.handleCaptures, captureNumber, @@ -197,19 +170,6 @@ class RenderCapturer { ), ); } - _activeHandlers--; - _triggerHandler(totalFrameTarget); - } - - /// Triggers the next handler, if within allowed simultaneous handlers - /// and images still available. - void _triggerHandler([int? totalFrameTarget]) { - final nextCaptureIndex = _handlers.length; - if (_activeHandlers < - (session.settings.asMotion?.simultaneousCaptureHandlers ?? 1) && - nextCaptureIndex < _unhandledCaptures.length) { - _handlers.add(_handleCapture(nextCaptureIndex, totalFrameTarget)); - } } /// Captures associated task of this frame @@ -238,8 +198,7 @@ class RenderCapturer { ); } // * initiate handler - _unhandledCaptures.add(image); - _triggerHandler(totalFrameTarget); + _handlers.add(_handleCapture(image, frameNumber, totalFrameTarget)); _recordActivity(RenderState.capturing, frameNumber, totalFrameTarget, "Captured frame $frameNumber"); } @@ -346,4 +305,21 @@ class RenderCapturer { session.recordActivity(state, null, message: message); } } + + /// Opens the pipe to the ffmpeg process + void openPipe() { + var f = File(session.inputPipe); + _frameWriter = f.openWrite(); + } + + /// Closes the pipe to the ffmpeg process + Future closePipe() async { + await _frameWriter?.close(); + await FFmpegKitConfig.closeFFmpegPipe(session.inputPipe); + } + + /// Writes data to the pipe to the ffmpeg process + void _writeToPipe(List data) { + return _frameWriter?.add(data); + } } diff --git a/lib/src/core.dart b/lib/src/core.dart index b1798ad..a1bc358 100644 --- a/lib/src/core.dart +++ b/lib/src/core.dart @@ -220,8 +220,12 @@ class RenderController { .then((detachedSession) async { final session = _createRenderSessionFrom(detachedSession, notifier); final capturer = RenderCapturer(session); - final realSession = await capturer.single(); - final processor = ImageProcessor(realSession); + capturer.openPipe(); + capturer.single().then((value) => capturer.closePipe()); + final processor = ImageProcessor( + session, + capturer.firstFrameSize!.width.toInt(), + capturer.firstFrameSize!.height.toInt()); await processor.process(); await session.dispose(); }); @@ -251,9 +255,16 @@ class RenderController { .then((detachedSession) async { final session = _createRenderSessionFrom(detachedSession, notifier); final capturer = RenderCapturer(session); - final realSession = await capturer.run(duration); - final processor = MotionProcessor(realSession); - await processor.process(); + capturer.openPipe(); + await capturer + .single(); // wait for first frame to be captured to get the size of the frame + capturer.run(duration).then( + (value) => capturer.closePipe()); // run the capturer for the duration + final processor = MotionProcessor( + session, + capturer.firstFrameSize!.width.toInt(), + capturer.firstFrameSize!.height.toInt()); + await processor.process(duration: duration); await session.dispose(); }); if (logInConsole) { @@ -291,8 +302,12 @@ class RenderController { widgetTask, ); final capturer = RenderCapturer(session, context); - final realSession = await capturer.single(); - final processor = ImageProcessor(realSession); + capturer.openPipe(); + capturer.single().then((value) => capturer.closePipe()); + final processor = ImageProcessor( + session, + capturer.firstFrameSize!.width.toInt(), + capturer.firstFrameSize!.height.toInt()); await processor.process(); await session.dispose(); }); @@ -337,9 +352,14 @@ class RenderController { widgetTask, ); final capturer = RenderCapturer(session, context); - final realSession = await capturer.run(duration); - final processor = MotionProcessor(realSession); - await processor.process(); + capturer.openPipe(); + await capturer.single(); + capturer.run(duration).then((value) => capturer.closePipe()); + final processor = MotionProcessor( + session, + capturer.firstFrameSize!.width.toInt(), + capturer.firstFrameSize!.height.toInt()); + await processor.process(duration: duration); await session.dispose(); }); if (logInConsole) { @@ -440,6 +460,7 @@ class MotionRecorder { ); _capturer = RenderCapturer(_session, context); _capturer.start(); + _capturer.openPipe(); }); if (logInConsole) { _controller._debugPrintOnStream( @@ -454,8 +475,12 @@ class MotionRecorder { /// Stops the recording and returns the result of the recording. Future stop() async { - final realSession = await _capturer.finish(); - final processor = MotionProcessor(realSession); + await _capturer.finish(); + await _capturer.closePipe(); + final processor = MotionProcessor( + _session, + _capturer.firstFrameSize!.width.toInt(), + _capturer.firstFrameSize!.height.toInt()); processor.process(); // wait for result instead of process final out = await stream .firstWhere((event) => event.isResult || event.isFatalError); diff --git a/lib/src/formats/abstract.dart b/lib/src/formats/abstract.dart index 61e2052..8704d27 100644 --- a/lib/src/formats/abstract.dart +++ b/lib/src/formats/abstract.dart @@ -74,6 +74,8 @@ abstract class RenderFormat { required String inputPath, required String outputPath, required double frameRate, + required int width, + required int height, }); /// Scaling ffmpeg filter with appropriate interpolation integration @@ -118,10 +120,13 @@ abstract class MotionFormat extends RenderFormat { /// Default motion processor. This can be override, if more/other settings are /// needed. @override - FFmpegRenderOperation processor( - {required String inputPath, - required String outputPath, - required double frameRate}) { + FFmpegRenderOperation processor({ + required String inputPath, + required String outputPath, + required double frameRate, + required int width, + required int height, + }) { final audioInput = audio != null && audio!.isNotEmpty ? audio!.map((e) => "-i??${e.path}").join('??') : null; @@ -138,6 +143,10 @@ abstract class MotionFormat extends RenderFormat { "aac??-shortest??-pix_fmt??yuv420p??-vsync??2" : "-map??[v]??-pix_fmt??yuv420p"; return FFmpegRenderOperation([ + "-f", "rawvideo", // input format + "-pixel_format", "rgba", // input pixel format + "-s", "${width}x${height}", // input size + "-r", "$frameRate", // input frame rate "-i", inputPath, // retrieve captures audioInput, "-filter_complex", @@ -176,12 +185,19 @@ abstract class ImageFormat extends RenderFormat { /// Default image processor. This can be override, if more settings are /// needed. @override - FFmpegRenderOperation processor( - {required String inputPath, - required String outputPath, - required double frameRate}) { + FFmpegRenderOperation processor({ + required String inputPath, + required String outputPath, + required double frameRate, + required int width, + required int height, + }) { return FFmpegRenderOperation([ "-y", + "-f", "rawvideo", + "-pixel_format", "rgba", + "-s", "${width}x${height}", + "-r", "$frameRate", "-i", inputPath, // input image scalingFilter != null ? "-vf??$scalingFilter" : null, "-vframes", "1", // indicate that there is only one frame diff --git a/lib/src/formats/image.dart b/lib/src/formats/image.dart index 3d37851..f733caa 100644 --- a/lib/src/formats/image.dart +++ b/lib/src/formats/image.dart @@ -94,10 +94,13 @@ class BmpFormat extends ImageFormat { } @override - FFmpegRenderOperation processor( - {required String inputPath, - required String outputPath, - required double frameRate}) { + FFmpegRenderOperation processor({ + required String inputPath, + required String outputPath, + required double frameRate, + required int width, + required int height, + }) { return FFmpegRenderOperation([ "-y", "-i", inputPath, // input image diff --git a/lib/src/formats/motion.dart b/lib/src/formats/motion.dart index 5eb061a..02400de 100644 --- a/lib/src/formats/motion.dart +++ b/lib/src/formats/motion.dart @@ -91,9 +91,15 @@ class GifFormat extends MotionFormat { required String inputPath, required String outputPath, required double frameRate, + required int width, + required int height, }) { return FFmpegRenderOperation([ "-y", + "-f", "rawvideo", // input format + "-pixel_format", "rgba", // input pixel format + "-s", "${width}x${height}", // input size + "-r", "$frameRate", // input frame rate "-i", inputPath, // retrieve captures transparency ? "-filter_complex??[0:v] setpts=N/($frameRate*TB)," diff --git a/lib/src/process.dart b/lib/src/process.dart index a64e8b4..a72669f 100644 --- a/lib/src/process.dart +++ b/lib/src/process.dart @@ -11,24 +11,33 @@ import 'package:render/src/service/settings.dart'; import 'service/exception.dart'; abstract class RenderProcessor { - final RenderSession session; + final RenderSession session; - RenderProcessor(this.session); + RenderProcessor(this.session, this.width, this.height); bool _processing = false; String get inputPath; - ///Converts saved frames from temporary directory to output file - Future process() async { + int width; + + int height; + + int? totalFrameTarget; + + Duration? duration; + + /// Converts the captures into a video file. + Future process({Duration? duration}) async { if (_processing) { throw const RenderException( "Cannot start new process, during an active one."); } + totalFrameTarget = session.settings.asMotion?.frameRate ?? 1; + this.duration = duration; _processing = true; try { - final output = - await _processTask(session.format.processShare); + final output = await _processTask(session.format.processShare); session.recordResult(output); _processing = false; } on RenderException catch (error) { @@ -41,12 +50,14 @@ abstract class RenderProcessor { Future _processTask(double progressShare) async { final mainOutputFile = session.createOutputFile("output_main.${session.format.extension}"); + double frameRate = session.settings.asMotion?.frameRate.toDouble() ?? 1; // Receive main operation processing instructions final operation = session.format.processor( - inputPath: inputPath, - outputPath: mainOutputFile.path, - frameRate: session.settings.realFrameRate, - ); + inputPath: inputPath, + outputPath: mainOutputFile.path, + frameRate: frameRate, + width: width, + height: height); await _executeCommand( operation.arguments, progressShare: progressShare, @@ -86,15 +97,19 @@ abstract class RenderProcessor { } }, (Statistics statistics) { - final progression = ((statistics.getTime() * 100) ~/ - session.settings.capturingDuration.inMilliseconds) - .clamp(0, 100) / - 100; - session.recordActivity( - RenderState.processing, - progression.toDouble(), - message: "Converting captures", - ); + if (totalFrameTarget != null && duration != null) { + final progression = (statistics.getVideoFrameNumber() / + (totalFrameTarget! * duration!.inSeconds)) + .clamp(0.0, 1.0); + session.recordActivity(RenderState.processing, progression, + message: "Converting captures"); + } else { + session.recordActivity( + RenderState.processing, + null, + message: "Converting captures", + ); + } }, ); await FFmpegKitConfig.ffmpegExecute(ffmpegSession).timeout( @@ -113,15 +128,15 @@ abstract class RenderProcessor { } class ImageProcessor extends RenderProcessor { - ImageProcessor(super.session); + ImageProcessor(super.session, super.width, super.height); @override - String get inputPath => "${session.inputDirectory}/frame0.png"; + String get inputPath => session.inputPipe; } class MotionProcessor extends RenderProcessor { - MotionProcessor(super.session); + MotionProcessor(super.session, super.width, super.height); @override - String get inputPath => "${session.inputDirectory}/frame%d.png"; + String get inputPath => session.inputPipe; } diff --git a/lib/src/service/notifier.dart b/lib/src/service/notifier.dart index feb907f..401ccab 100644 --- a/lib/src/service/notifier.dart +++ b/lib/src/service/notifier.dart @@ -152,7 +152,7 @@ class RenderResult extends RenderActivity { final File output; ///The settings used to create the output file. - final RealRenderSettings usedSettings; + final RenderSettings usedSettings; final RenderFormat format; diff --git a/lib/src/service/session.dart b/lib/src/service/session.dart index 0f1460e..0c272e0 100644 --- a/lib/src/service/session.dart +++ b/lib/src/service/session.dart @@ -1,5 +1,6 @@ import 'dart:async'; import 'dart:io'; +import 'package:ffmpeg_kit_flutter_https_gpl/ffmpeg_kit_config.dart'; import 'package:flutter/scheduler.dart'; import 'package:path_provider/path_provider.dart'; import 'package:render/src/service/settings.dart'; @@ -14,21 +15,13 @@ class DetachedRenderSession { /// Pointer to session files and operation. final String sessionId; - /// Directory of a temporary storage, where files can be used for processing. - /// This should be somewhere in a RAM location for fast processing. - final String temporaryDirectory; - - /// Where internal files are being written (frames, layers, palettes, etc.) - /// Note that there will be additional sub-directories that separate different - /// internal actions and sessions. Directories will be deleted after a session. - final String inputDirectory; + /// Where image buffer are being piped without writing to files + /// Pipe was created by FFmpegKitConfig.registerNewFFmpegPipe() + final String inputPipe; /// Where result files are being written final String outputDirectory; - /// A directory where files are being written that are used for processing. - final String processDirectory; - /// All render related settings final K settings; @@ -46,12 +39,10 @@ class DetachedRenderSession { required this.logLevel, required this.binding, required this.outputDirectory, - required this.inputDirectory, required this.sessionId, - required this.temporaryDirectory, - required this.processDirectory, required this.settings, required this.format, + required this.inputPipe, }); /// Creates a detached render session from default values (paths & session syntax) @@ -61,15 +52,14 @@ class DetachedRenderSession { create( T format, K settings, LogLevel logLevel) async { final tempDir = await getTemporaryDirectory(); + final inputPipe = await FFmpegKitConfig.registerNewFFmpegPipe(); final sessionId = const Uuid().v4(); return DetachedRenderSession( logLevel: logLevel, binding: SchedulerBinding.instance, + inputPipe: inputPipe!, outputDirectory: "${tempDir.path}/render/$sessionId/output", - inputDirectory: "${tempDir.path}/render/$sessionId/input", - processDirectory: "${tempDir.path}/render/$sessionId/process", sessionId: sessionId, - temporaryDirectory: tempDir.path, settings: settings, format: format, ); @@ -82,18 +72,10 @@ class DetachedRenderSession { return outputFile; } - /// Creating a file in the input directory. - File createInputFile(String subPath) => - _createFile("$inputDirectory/$subPath"); - /// Creating a file in the output directory. File createOutputFile(String subPath) => _createFile("$outputDirectory/$subPath"); - /// Creating a file in the process directory. - File createProcessFile(String subPath) => - _createFile("$processDirectory/$subPath"); - /// The expected processing state share each part holds. This is relevant for /// calculating the expected time remain and progress percentage of rendering. /// Values are based on experimentation. @@ -132,13 +114,11 @@ class RenderSession RenderSession({ required super.logLevel, required super.settings, - required super.inputDirectory, required super.outputDirectory, - required super.processDirectory, required super.sessionId, - required super.temporaryDirectory, required super.format, required super.binding, + required super.inputPipe, required this.task, required this.onDispose, required StreamController notifier, @@ -159,15 +139,13 @@ class RenderSession }) : _notifier = notifier, startTime = DateTime.now(), super( - logLevel: detachedSession.logLevel, + logLevel: detachedSession.logLevel, binding: detachedSession.binding, format: detachedSession.format, settings: detachedSession.settings, - processDirectory: detachedSession.processDirectory, - inputDirectory: detachedSession.inputDirectory, outputDirectory: detachedSession.outputDirectory, sessionId: detachedSession.sessionId, - temporaryDirectory: detachedSession.temporaryDirectory, + inputPipe: detachedSession.inputPipe, ); /// Upgrade the current renderSession to a real session @@ -183,15 +161,13 @@ class RenderSession onDispose: onDispose, startTime: startTime, logLevel: logLevel, - inputDirectory: inputDirectory, outputDirectory: outputDirectory, - processDirectory: processDirectory, sessionId: sessionId, - temporaryDirectory: temporaryDirectory, format: format, binding: binding, task: task, notifier: _notifier, + inputPipe: inputPipe, ); } @@ -254,7 +230,7 @@ class RenderSession session: this, format: format, timestamp: currentTimeStamp, - usedSettings: settings as RealRenderSettings, + usedSettings: settings, output: output, message: message, details: details, @@ -266,12 +242,6 @@ class RenderSession /// Disposing the current render session. Future dispose() async { onDispose(); - if (Directory(inputDirectory).existsSync()) { - Directory(inputDirectory).deleteSync(recursive: true); - } - if (Directory(processDirectory).existsSync()) { - Directory(processDirectory).deleteSync(recursive: true); - } await _notifier.close(); } } diff --git a/lib/src/service/settings.dart b/lib/src/service/settings.dart index 02a9541..97de04f 100644 --- a/lib/src/service/settings.dart +++ b/lib/src/service/settings.dart @@ -51,27 +51,9 @@ class MotionSettings extends RenderSettings { /// ! This frame rate therefore does not necessary equal to output file frame rate final int frameRate; - /// The max amount of capture handlers that should process captures at once. - /// - /// Handlers process and write frames from the RAM to a local directory. - /// Having multiple handlers at the same time heavily influences the - /// performance of the application during rendering. - /// - /// The more handlers are running simultaneously the worse gets the framerate - /// and might result in a "laggy" behavior. Less simultaneously handlers result - /// in longer loading phases. - /// - /// Note, that if there a lot of unhandled frames it might still result in - /// laggy behavior, as the application's RAM gets filled with UI images, - /// instead of many handler operations. - /// - /// To get a good sweet spot you can follow the following introduction for - /// your specific situation: - /// - /// Low pixelRatio - high frameRate - many handlers - /// high pixelRatio - low frameRate - many handlers - /// high pixelRatio - high frameRate - few handlers - final int simultaneousCaptureHandlers; + @Deprecated("Render has changed internal processing." + " This value is not used anymore.") + final int? simultaneousCaptureHandlers; //TODO: remove by 0.2.0 /// Data class for storing render related settings. /// Setting the optimal settings is critical for a successfully capturing. @@ -80,7 +62,9 @@ class MotionSettings extends RenderSettings { /// it is important find leveled values and optionally computational scaling /// of the output format. const MotionSettings({ - this.simultaneousCaptureHandlers = 10, + @Deprecated("Render has changed internal processing." + " This value is not used anymore.") + this.simultaneousCaptureHandlers, this.frameRate = 20, super.pixelRatio, super.processTimeout,