Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Piping frames to ffmpeg #15

Draft
wants to merge 4 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion example/lib/animated_example_controller.dart
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class ExampleAnimationController extends ChangeNotifier {
VideoPlayerController? videoController;
if (!Platform.isMacOS) {
videoController = VideoPlayerController.network(
'https://www.fluttercampus.com/video.mp4',
'https://flutter.github.io/assets-for-api-docs/assets/videos/bee.mp4',
// 1 min: https://storage.googleapis.com/gtv-videos-bucket/sample/ForBiggerFun.mp4
// 4 sec: 'https://www.fluttercampus.com/video.mp4'
);
Expand Down
1 change: 1 addition & 0 deletions example/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,7 @@ class _MyHomePageState extends State<MyHomePage>

Future<void> displayResult(RenderResult result,
[bool saveToGallery = false]) async {
print("file path: ${result.output.path}");
print("file exits: ${await result.output.exists()}");
if (mounted) {
showDialog(
Expand Down
53 changes: 29 additions & 24 deletions lib/src/capturer.dart
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import 'dart:async';
import 'dart:io';
import 'dart:typed_data';
import 'dart:ui' as ui;

import 'package:ffmpeg_kit_flutter_https_gpl/ffmpeg_kit.dart';
import 'package:ffmpeg_kit_flutter_https_gpl/ffmpeg_kit_config.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:render/src/service/notifier.dart';
Expand Down Expand Up @@ -47,6 +48,9 @@ class RenderCapturer<K extends RenderFormat> {
/// will be seen as first frame.
Size? firstFrameSize;

/// The writer to write the captured images to.
IOSink? _frameWriter;

/// Runs a capturing process for a defined time. Returns capturing time duration.
Future<RenderSession<K, RealRenderSettings>> run(Duration duration) async {
start(duration);
Expand Down Expand Up @@ -76,6 +80,7 @@ class RenderCapturer<K extends RenderFormat> {
assert(!_rendering, "Cannot start new process, during an active one.");
_rendering = true;
startTime = DateTime.now();

session.binding.addPostFrameCallback((binderTimeStamp) {
startingDuration = session.binding.currentFrameTimeStamp;
_postFrameCallback(
Expand Down Expand Up @@ -161,29 +166,12 @@ class RenderCapturer<K extends RenderFormat> {
final ByteData? byteData =
await capture.toByteData(format: ui.ImageByteFormat.rawRgba);
final rawIntList = byteData!.buffer.asInt8List();
// * write raw file for processing
final rawFile = session
.createProcessFile("frameHandling/frame_raw$captureNumber.bmp");
await rawFile.writeAsBytes(rawIntList);
// * write & convert file (to save storage)
final file = session.createInputFile("frame$captureNumber.png");
final saveSize = Size(
// adjust frame size, so that it can be divided by 2
(capture.width / 2).ceil() * 2,
(capture.height / 2).ceil() * 2,
);
await FFmpegKit.executeWithArguments([
"-y",
"-f", "rawvideo", // specify input format
"-pixel_format", "rgba", // maintain transparency
"-video_size", "${capture.width}x${capture.height}", // set capture size
"-i", rawFile.path, // input the raw frame
"-vf", "scale=${saveSize.width}:${saveSize.height}", // scale to save
file.path, //out put png
]);

// * write image to pipe
_writeToPipe(rawIntList);

// * finish
capture.dispose();
rawFile.deleteSync();
if (!_rendering) {
//only record next state, when rendering is done not to mix up notification
_recordActivity(RenderState.handleCaptures, captureNumber,
Expand Down Expand Up @@ -335,8 +323,8 @@ class RenderCapturer<K extends RenderFormat> {
}

/// Recording the activity of the current session specifically for capturing
void _recordActivity(
RenderState state, int frame, int? totalFrameTarget, String message) {
void _recordActivity(RenderState state, int frame, int? totalFrameTarget,
String message) {
if (totalFrameTarget != null) {
session.recordActivity(
state, ((1 / totalFrameTarget) * frame).clamp(0.0, 1.0),
Expand All @@ -346,4 +334,21 @@ class RenderCapturer<K extends RenderFormat> {
session.recordActivity(state, null, message: message);
}
}

/// Opens the pipe to the ffmpeg process
void openPipe() {
var f = File(session.inputPipe);
_frameWriter = f.openWrite();
}

/// Closes the pipe to the ffmpeg process
Future<void> closePipe() async {
await _frameWriter?.close();
await FFmpegKitConfig.closeFFmpegPipe(session.inputPipe);
}

/// Writes data to the pipe to the ffmpeg process
void _writeToPipe(List<int> data) {
return _frameWriter?.add(data);
}
}
45 changes: 35 additions & 10 deletions lib/src/core.dart
Original file line number Diff line number Diff line change
Expand Up @@ -220,8 +220,12 @@ class RenderController {
.then((detachedSession) async {
final session = _createRenderSessionFrom(detachedSession, notifier);
final capturer = RenderCapturer(session);
final realSession = await capturer.single();
final processor = ImageProcessor(realSession);
capturer.openPipe();
capturer.single().then((value) => capturer.closePipe());
final processor = ImageProcessor(
polarby marked this conversation as resolved.
Show resolved Hide resolved
session,
capturer.firstFrameSize!.width.toInt(),
capturer.firstFrameSize!.height.toInt());
await processor.process();
await session.dispose();
});
Expand Down Expand Up @@ -251,8 +255,15 @@ class RenderController {
.then((detachedSession) async {
final session = _createRenderSessionFrom(detachedSession, notifier);
final capturer = RenderCapturer(session);
final realSession = await capturer.run(duration);
final processor = MotionProcessor(realSession);
capturer.openPipe();
await capturer
.single(); // wait for first frame to be captured to get the size of the frame
capturer.run(duration).then(
(value) => capturer.closePipe()); // run the capturer for the duration
final processor = MotionProcessor(
session,
capturer.firstFrameSize!.width.toInt(),
capturer.firstFrameSize!.height.toInt());
await processor.process();
await session.dispose();
});
Expand Down Expand Up @@ -291,8 +302,12 @@ class RenderController {
widgetTask,
);
final capturer = RenderCapturer(session, context);
final realSession = await capturer.single();
final processor = ImageProcessor(realSession);
capturer.openPipe();
capturer.single().then((value) => capturer.closePipe());
final processor = ImageProcessor(
session,
capturer.firstFrameSize!.width.toInt(),
capturer.firstFrameSize!.height.toInt());
await processor.process();
await session.dispose();
});
Expand Down Expand Up @@ -337,8 +352,13 @@ class RenderController {
widgetTask,
);
final capturer = RenderCapturer(session, context);
final realSession = await capturer.run(duration);
final processor = MotionProcessor(realSession);
capturer.openPipe();
await capturer.single();
capturer.run(duration).then((value) => capturer.closePipe());
final processor = MotionProcessor(
session,
capturer.firstFrameSize!.width.toInt(),
capturer.firstFrameSize!.height.toInt());
await processor.process();
await session.dispose();
});
Expand Down Expand Up @@ -440,6 +460,7 @@ class MotionRecorder<T extends MotionFormat> {
);
_capturer = RenderCapturer(_session, context);
_capturer.start();
_capturer.openPipe();
});
if (logInConsole) {
_controller._debugPrintOnStream(
Expand All @@ -454,8 +475,12 @@ class MotionRecorder<T extends MotionFormat> {

/// Stops the recording and returns the result of the recording.
Future<RenderResult> stop() async {
final realSession = await _capturer.finish();
final processor = MotionProcessor(realSession);
await _capturer.finish();
await _capturer.closePipe();
final processor = MotionProcessor(
_session,
_capturer.firstFrameSize!.width.toInt(),
_capturer.firstFrameSize!.height.toInt());
processor.process(); // wait for result instead of process
final out = await stream
.firstWhere((event) => event.isResult || event.isFatalError);
Expand Down
32 changes: 24 additions & 8 deletions lib/src/formats/abstract.dart
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,8 @@ abstract class RenderFormat {
required String inputPath,
required String outputPath,
required double frameRate,
required int width,
polarby marked this conversation as resolved.
Show resolved Hide resolved
required int height,
});

/// Scaling ffmpeg filter with appropriate interpolation integration
Expand Down Expand Up @@ -118,10 +120,13 @@ abstract class MotionFormat extends RenderFormat {
/// Default motion processor. This can be override, if more/other settings are
/// needed.
@override
FFmpegRenderOperation processor(
{required String inputPath,
required String outputPath,
required double frameRate}) {
FFmpegRenderOperation processor({
required String inputPath,
required String outputPath,
required double frameRate,
required int width,
required int height,
}) {
final audioInput = audio != null && audio!.isNotEmpty
? audio!.map((e) => "-i??${e.path}").join('??')
: null;
Expand All @@ -138,6 +143,10 @@ abstract class MotionFormat extends RenderFormat {
"aac??-shortest??-pix_fmt??yuv420p??-vsync??2"
: "-map??[v]??-pix_fmt??yuv420p";
return FFmpegRenderOperation([
"-f", "rawvideo", // input format
"-pixel_format", "rgba", // input pixel format
"-s", "${width}x${height}", // input size
"-r", "$frameRate", // input frame rate
"-i", inputPath, // retrieve captures
audioInput,
"-filter_complex",
Expand Down Expand Up @@ -176,12 +185,19 @@ abstract class ImageFormat extends RenderFormat {
/// Default image processor. This can be override, if more settings are
/// needed.
@override
FFmpegRenderOperation processor(
{required String inputPath,
required String outputPath,
required double frameRate}) {
FFmpegRenderOperation processor({
required String inputPath,
required String outputPath,
required double frameRate,
required int width,
required int height,
}) {
return FFmpegRenderOperation([
"-y",
"-f", "rawvideo",
"-pixel_format", "rgba",
"-s", "${width}x${height}",
"-r", "$frameRate",
"-i", inputPath, // input image
scalingFilter != null ? "-vf??$scalingFilter" : null,
"-vframes", "1", // indicate that there is only one frame
Expand Down
11 changes: 7 additions & 4 deletions lib/src/formats/image.dart
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,13 @@ class BmpFormat extends ImageFormat {
}

@override
FFmpegRenderOperation processor(
{required String inputPath,
required String outputPath,
required double frameRate}) {
FFmpegRenderOperation processor({
required String inputPath,
required String outputPath,
required double frameRate,
required int width,
required int height,
}) {
return FFmpegRenderOperation([
"-y",
"-i", inputPath, // input image
Expand Down
6 changes: 6 additions & 0 deletions lib/src/formats/motion.dart
Original file line number Diff line number Diff line change
Expand Up @@ -91,9 +91,15 @@ class GifFormat extends MotionFormat {
required String inputPath,
required String outputPath,
required double frameRate,
required int width,
required int height,
}) {
return FFmpegRenderOperation([
"-y",
"-f", "rawvideo", // input format
"-pixel_format", "rgba", // input pixel format
"-s", "${width}x${height}", // input size
"-r", "$frameRate", // input frame rate
"-i", inputPath, // retrieve captures
transparency
? "-filter_complex??[0:v] setpts=N/($frameRate*TB),"
Expand Down
38 changes: 21 additions & 17 deletions lib/src/process.dart
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,18 @@ import 'package:render/src/service/settings.dart';
import 'service/exception.dart';

abstract class RenderProcessor<T extends RenderFormat> {
final RenderSession<T, RealRenderSettings> session;
final RenderSession<T, RenderSettings> session;

RenderProcessor(this.session);
RenderProcessor(this.session, this.width, this.height);

bool _processing = false;

String get inputPath;

int width;

int height;

///Converts saved frames from temporary directory to output file
Future<void> process() async {
if (_processing) {
Expand All @@ -27,8 +31,7 @@ abstract class RenderProcessor<T extends RenderFormat> {
}
_processing = true;
try {
final output =
await _processTask(session.format.processShare);
final output = await _processTask(session.format.processShare);
session.recordResult(output);
_processing = false;
} on RenderException catch (error) {
Expand All @@ -41,12 +44,17 @@ abstract class RenderProcessor<T extends RenderFormat> {
Future<File> _processTask(double progressShare) async {
final mainOutputFile =
session.createOutputFile("output_main.${session.format.extension}");
double frameRate = 1;
if (session.settings.isMotion) {
frameRate = session.settings.asMotion!.frameRate.toDouble();
}
// Receive main operation processing instructions
final operation = session.format.processor(
inputPath: inputPath,
outputPath: mainOutputFile.path,
frameRate: session.settings.realFrameRate,
);
inputPath: inputPath,
outputPath: mainOutputFile.path,
frameRate: frameRate,
width: width,
height: height);
await _executeCommand(
operation.arguments,
progressShare: progressShare,
Expand Down Expand Up @@ -86,13 +94,9 @@ abstract class RenderProcessor<T extends RenderFormat> {
}
},
(Statistics statistics) {
final progression = ((statistics.getTime() * 100) ~/
polarby marked this conversation as resolved.
Show resolved Hide resolved
session.settings.capturingDuration.inMilliseconds)
.clamp(0, 100) /
100;
session.recordActivity(
RenderState.processing,
progression.toDouble(),
null,
message: "Converting captures",
);
},
Expand All @@ -113,15 +117,15 @@ abstract class RenderProcessor<T extends RenderFormat> {
}

class ImageProcessor extends RenderProcessor<ImageFormat> {
ImageProcessor(super.session);
ImageProcessor(super.session, super.width, super.height);

@override
String get inputPath => "${session.inputDirectory}/frame0.png";
String get inputPath => session.inputPipe;
}

class MotionProcessor extends RenderProcessor<MotionFormat> {
MotionProcessor(super.session);
MotionProcessor(super.session, super.width, super.height);

@override
String get inputPath => "${session.inputDirectory}/frame%d.png";
String get inputPath => session.inputPipe;
}
2 changes: 1 addition & 1 deletion lib/src/service/notifier.dart
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ class RenderResult extends RenderActivity {
final File output;

///The settings used to create the output file.
final RealRenderSettings usedSettings;
final RenderSettings usedSettings;
polarby marked this conversation as resolved.
Show resolved Hide resolved

final RenderFormat format;

Expand Down
Loading