adding missing ffmpeg help links

This commit is contained in:
John Andrews
2022-04-24 14:13:59 +12:00
parent f1c3145260
commit 67aa8d549e
3 changed files with 261 additions and 261 deletions

View File

@@ -1,65 +1,66 @@
using FileFlows.VideoNodes.FfmpegBuilderNodes.Models;
using System.Diagnostics.CodeAnalysis;
namespace FileFlows.VideoNodes.FfmpegBuilderNodes
namespace FileFlows.VideoNodes.FfmpegBuilderNodes;
public class FfmpegBuilderAudioAdjustVolume : FfmpegBuilderNode
{
public class FfmpegBuilderAudioAdjustVolume : FfmpegBuilderNode
public override string HelpUrl => "https://github.com/revenz/FileFlows/wiki/FFMPEG-Builder:-Audio-Adjust-Volume";
public override string Icon => "fas fa-volume-up";
[NumberInt(1)]
[Range(0, 1000)]
public int VolumePercent { get; set; }
[Boolean(2)]
public bool AllAudio { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
public override int Execute(NodeParameters args)
{
public override string Icon => "fas fa-volume-up";
base.Init(args);
[NumberInt(1)]
[Range(0, 1000)]
public int VolumePercent { get; set; }
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
[Boolean(2)]
public bool AllAudio { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
public override int Execute(NodeParameters args)
if (Model.AudioStreams?.Any() != true)
{
base.Init(args);
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
if (Model.AudioStreams?.Any() != true)
{
args.Logger?.ILog("No audio streams detected");
return 2;
}
if (VolumePercent == 100)
{
args.Logger?.ILog("Volume percent set to 100, no adjustment necassary");
return 2;
}
float volume = this.VolumePercent / 100f;
bool working = false;
foreach (var item in Model.AudioStreams.Select((x, index) => (stream: x, index)))
{
var audio = item.stream.Stream;
if (item.stream.Deleted)
continue;
if (string.IsNullOrEmpty(Pattern) == false)
{
if (PatternMatches2(this.Pattern, item.index, item.stream, this.NotMatching) == false)
continue;
}
else if (AllAudio == false && item.index > 0)
continue;
item.stream.Filter.Add($"volume={volume.ToString(".0######")}");
working = true;
}
return working ? 1 : 2;
args.Logger?.ILog("No audio streams detected");
return 2;
}
if (VolumePercent == 100)
{
args.Logger?.ILog("Volume percent set to 100, no adjustment necassary");
return 2;
}
float volume = this.VolumePercent / 100f;
bool working = false;
foreach (var item in Model.AudioStreams.Select((x, index) => (stream: x, index)))
{
var audio = item.stream.Stream;
if (item.stream.Deleted)
continue;
if (string.IsNullOrEmpty(Pattern) == false)
{
if (PatternMatches2(this.Pattern, item.index, item.stream, this.NotMatching) == false)
continue;
}
else if (AllAudio == false && item.index > 0)
continue;
item.stream.Filter.Add($"volume={volume.ToString(".0######")}");
working = true;
}
return working ? 1 : 2;
}
}

View File

@@ -1,78 +1,78 @@
using FileFlows.VideoNodes.FfmpegBuilderNodes.Models;
using System.Diagnostics.CodeAnalysis;
namespace FileFlows.VideoNodes.FfmpegBuilderNodes
namespace FileFlows.VideoNodes.FfmpegBuilderNodes;
public class FfmpegBuilderAudioNormalization : FfmpegBuilderNode
{
public class FfmpegBuilderAudioNormalization : FfmpegBuilderNode
public override string HelpUrl => "https://github.com/revenz/FileFlows/wiki/FFMPEG-Builder:-Audio-Normalization";
public override string Icon => "fas fa-volume-up";
public override int Outputs => 2;
[Boolean(1)]
public bool AllAudio { get; set; }
[Boolean(2)]
public bool TwoPass { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
[RequiresUnreferencedCode("")]
public override int Execute(NodeParameters args)
{
public override string Icon => "fas fa-volume-up";
public override int Outputs => 2;
base.Init(args);
[Boolean(1)]
public bool AllAudio { get; set; }
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
[Boolean(2)]
public bool TwoPass { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
[RequiresUnreferencedCode("")]
public override int Execute(NodeParameters args)
if (Model.AudioStreams?.Any() != true)
{
base.Init(args);
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
if (Model.AudioStreams?.Any() != true)
{
args.Logger?.ILog("No audio streams detected");
return 2;
}
// store them incase we are creating duplicate tracks from same source, we dont need
// to calculate the normalization each time
Dictionary<int, string> normalizedTracks = new Dictionary<int, string>();
bool normalizing = false;
foreach (var item in Model.AudioStreams.Select((x, index) => (stream: x, index)))
{
var audio = item.stream.Stream;
if (item.stream.Deleted)
continue;
if (string.IsNullOrEmpty(Pattern) == false)
{
if (PatternMatches2(this.Pattern, item.index, item.stream, this.NotMatching) == false)
continue;
}
else if (AllAudio == false && item.index > 0)
continue;
if (TwoPass)
{
if (normalizedTracks.ContainsKey(audio.TypeIndex))
item.stream.Filter.Add(normalizedTracks[audio.TypeIndex]);
else
{
string twoPass = AudioNormalization.DoTwoPass(this, args, ffmpegExe, audio.TypeIndex);
item.stream.Filter.Add(twoPass);
normalizedTracks.Add(audio.TypeIndex, twoPass);
}
}
else
{
item.stream.Filter.Add($"loudnorm={AudioNormalization.LOUDNORM_TARGET}");
}
normalizing = true;
}
return normalizing ? 1 : 2;
args.Logger?.ILog("No audio streams detected");
return 2;
}
// store them incase we are creating duplicate tracks from same source, we dont need
// to calculate the normalization each time
Dictionary<int, string> normalizedTracks = new Dictionary<int, string>();
bool normalizing = false;
foreach (var item in Model.AudioStreams.Select((x, index) => (stream: x, index)))
{
var audio = item.stream.Stream;
if (item.stream.Deleted)
continue;
if (string.IsNullOrEmpty(Pattern) == false)
{
if (PatternMatches2(this.Pattern, item.index, item.stream, this.NotMatching) == false)
continue;
}
else if (AllAudio == false && item.index > 0)
continue;
if (TwoPass)
{
if (normalizedTracks.ContainsKey(audio.TypeIndex))
item.stream.Filter.Add(normalizedTracks[audio.TypeIndex]);
else
{
string twoPass = AudioNormalization.DoTwoPass(this, args, ffmpegExe, audio.TypeIndex);
item.stream.Filter.Add(twoPass);
normalizedTracks.Add(audio.TypeIndex, twoPass);
}
}
else
{
item.stream.Filter.Add($"loudnorm={AudioNormalization.LOUDNORM_TARGET}");
}
normalizing = true;
}
return normalizing ? 1 : 2;
}
}

View File

@@ -1,166 +1,166 @@
namespace FileFlows.VideoNodes
namespace FileFlows.VideoNodes;
using FileFlows.Plugin;
using FileFlows.Plugin.Attributes;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
public class AudioNormalization: EncodingNode
{
using FileFlows.Plugin;
using FileFlows.Plugin.Attributes;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
public override int Outputs => 2;
public class AudioNormalization: EncodingNode
public override string Icon => "fas fa-volume-up";
[Boolean(1)]
public bool AllAudio { get; set; }
[Boolean(2)]
public bool TwoPass { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
internal const string LOUDNORM_TARGET = "I=-24:LRA=7:TP=-2.0";
public override int Execute(NodeParameters args)
{
public override int Outputs => 2;
public override string Icon => "fas fa-volume-up";
[Boolean(1)]
public bool AllAudio { get; set; }
[Boolean(2)]
public bool TwoPass { get; set; }
[TextVariable(3)]
public string Pattern { get; set; }
[Boolean(4)]
public bool NotMatching { get; set; }
internal const string LOUDNORM_TARGET = "I=-24:LRA=7:TP=-2.0";
public override int Execute(NodeParameters args)
try
{
try
VideoInfo videoInfo = GetVideoInfo(args);
if (videoInfo == null)
return -1;
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
if (videoInfo.AudioStreams?.Any() != true)
{
VideoInfo videoInfo = GetVideoInfo(args);
if (videoInfo == null)
return -1;
args.Logger?.ILog("No audio streams detected");
return 2;
}
string ffmpegExe = GetFFMpegExe(args);
if (string.IsNullOrEmpty(ffmpegExe))
return -1;
List<string> ffArgs = new List<string>();
if (videoInfo.AudioStreams?.Any() != true)
ffArgs.AddRange(new[] { "-strict", "-2" }); // allow experimental stuff
ffArgs.AddRange(new[] { "-c", "copy" });
if (videoInfo.VideoStreams?.Any() == true)
ffArgs.AddRange(new[] { "-map", "0:v" });
List<int> tracksToNormalize = new ();
for (int j = 0; j < videoInfo.AudioStreams.Count;j++)
{
var audio = videoInfo.AudioStreams[j];
if(string.IsNullOrEmpty(Pattern) == false)
{
args.Logger?.ILog("No audio streams detected");
return 2;
string audioString = audio.Title + ":" + audio.Language + ":" + audio.Codec;
args.Logger?.ILog($"Audio Track [{j}] test string: {audioString}");
bool match = new Regex(Pattern, RegexOptions.IgnoreCase).IsMatch(audioString);
if (NotMatching)
match = !match;
if (match == false)
{
ffArgs.AddRange(new[] { "-map", $"0:a:{j}" });
continue;
}
}
List<string> ffArgs = new List<string>();
ffArgs.AddRange(new[] { "-strict", "-2" }); // allow experimental stuff
ffArgs.AddRange(new[] { "-c", "copy" });
if (videoInfo.VideoStreams?.Any() == true)
ffArgs.AddRange(new[] { "-map", "0:v" });
List<int> tracksToNormalize = new ();
for (int j = 0; j < videoInfo.AudioStreams.Count;j++)
if (AllAudio || j == 0)
{
var audio = videoInfo.AudioStreams[j];
if(string.IsNullOrEmpty(Pattern) == false)
if (TwoPass)
{
string audioString = audio.Title + ":" + audio.Language + ":" + audio.Codec;
args.Logger?.ILog($"Audio Track [{j}] test string: {audioString}");
bool match = new Regex(Pattern, RegexOptions.IgnoreCase).IsMatch(audioString);
if (NotMatching)
match = !match;
if (match == false)
{
ffArgs.AddRange(new[] { "-map", $"0:a:{j}" });
continue;
}
}
if (AllAudio || j == 0)
{
if (TwoPass)
{
string twoPass = DoTwoPass(this, args, ffmpegExe, j);
ffArgs.AddRange(new[] { "-map", $"0:a:{j}", "-c:a:" + j, audio.Codec, "-filter:a:" + j, twoPass });
}
else
{
ffArgs.AddRange(new[] { "-map", $"0:a:{j}", "-c:a:" + j, audio.Codec, "-filter:a:" + j, $"loudnorm={LOUDNORM_TARGET}" });
}
tracksToNormalize.Add(j);
string twoPass = DoTwoPass(this, args, ffmpegExe, j);
ffArgs.AddRange(new[] { "-map", $"0:a:{j}", "-c:a:" + j, audio.Codec, "-filter:a:" + j, twoPass });
}
else
{
ffArgs.AddRange(new[] { "-map", $"0:a:{j}" });
ffArgs.AddRange(new[] { "-map", $"0:a:{j}", "-c:a:" + j, audio.Codec, "-filter:a:" + j, $"loudnorm={LOUDNORM_TARGET}" });
}
tracksToNormalize.Add(j);
}
if (tracksToNormalize.Any() == false)
else
{
args.Logger?.ILog("No audio streams to normalize");
return 2;
ffArgs.AddRange(new[] { "-map", $"0:a:{j}" });
}
foreach (int i in tracksToNormalize)
args.Logger?.ILog($"Normalizing track [{i}]: {videoInfo.AudioStreams[i].Title};{videoInfo.AudioStreams[i].Language};{videoInfo.AudioStreams[i].Codec};");
if (videoInfo.SubtitleStreams?.Any() == true)
ffArgs.AddRange(new[] { "-map", "0:s" });
string extension = new FileInfo(args.WorkingFile).Extension;
if (extension.StartsWith("."))
extension = extension.Substring(1);
if (Encode(args, ffmpegExe, ffArgs, extension) == false)
return -1;
return 1;
}
catch (Exception ex)
if (tracksToNormalize.Any() == false)
{
args.Logger?.ELog("Failed processing VideoFile: " + ex.Message);
args.Logger?.ILog("No audio streams to normalize");
return 2;
}
foreach (int i in tracksToNormalize)
args.Logger?.ILog($"Normalizing track [{i}]: {videoInfo.AudioStreams[i].Title};{videoInfo.AudioStreams[i].Language};{videoInfo.AudioStreams[i].Codec};");
if (videoInfo.SubtitleStreams?.Any() == true)
ffArgs.AddRange(new[] { "-map", "0:s" });
string extension = new FileInfo(args.WorkingFile).Extension;
if (extension.StartsWith("."))
extension = extension.Substring(1);
if (Encode(args, ffmpegExe, ffArgs, extension) == false)
return -1;
}
return 1;
}
[RequiresUnreferencedCode("Calls System.Text.Json.JsonSerializer.Deserialize<FileFlows.VideoNodes.AudioNormalization.LoudNormStats>(string, System.Text.Json.JsonSerializerOptions?)")]
public static string DoTwoPass(EncodingNode node, NodeParameters args, string ffmpegExe, int audioIndex)
catch (Exception ex)
{
//-af loudnorm=I=-24:LRA=7:TP=-2.0"
string output;
var result = node.Encode(args, ffmpegExe, new List<string>
{
"-hide_banner",
"-i", args.WorkingFile,
"-strict", "-2", // allow experimental stuff
"-map", "0:a:" + audioIndex,
"-af", "loudnorm=" + LOUDNORM_TARGET + ":print_format=json",
"-f", "null",
"-"
}, out output, updateWorkingFile: false, dontAddInputFile: true);
args.Logger?.ELog("Failed processing VideoFile: " + ex.Message);
return -1;
}
}
if (result == false)
throw new Exception("Failed to prcoess audio track");
[RequiresUnreferencedCode("Calls System.Text.Json.JsonSerializer.Deserialize<FileFlows.VideoNodes.AudioNormalization.LoudNormStats>(string, System.Text.Json.JsonSerializerOptions?)")]
public static string DoTwoPass(EncodingNode node, NodeParameters args, string ffmpegExe, int audioIndex)
{
//-af loudnorm=I=-24:LRA=7:TP=-2.0"
string output;
var result = node.Encode(args, ffmpegExe, new List<string>
{
"-hide_banner",
"-i", args.WorkingFile,
"-strict", "-2", // allow experimental stuff
"-map", "0:a:" + audioIndex,
"-af", "loudnorm=" + LOUDNORM_TARGET + ":print_format=json",
"-f", "null",
"-"
}, out output, updateWorkingFile: false, dontAddInputFile: true);
int index = output.LastIndexOf("{");
if (index == -1)
throw new Exception("Failed to detected json in output");
string json = output.Substring(index);
json = json.Substring(0, json.IndexOf("}") + 1);
if (string.IsNullOrEmpty(json))
throw new Exception("Failed to parse TwoPass json");
if (result == false)
throw new Exception("Failed to prcoess audio track");
int index = output.LastIndexOf("{");
if (index == -1)
throw new Exception("Failed to detected json in output");
string json = output.Substring(index);
json = json.Substring(0, json.IndexOf("}") + 1);
if (string.IsNullOrEmpty(json))
throw new Exception("Failed to parse TwoPass json");
#pragma warning disable CS8600 // Converting null literal or possible null value to non-nullable type.
LoudNormStats stats = JsonSerializer.Deserialize<LoudNormStats>(json);
LoudNormStats stats = JsonSerializer.Deserialize<LoudNormStats>(json);
#pragma warning restore CS8600 // Converting null literal or possible null value to non-nullable type.
string ar = $"loudnorm=print_format=summary:linear=true:{LOUDNORM_TARGET}:measured_I={stats.input_i}:measured_LRA={stats.input_lra}:measured_tp={stats.input_tp}:measured_thresh={stats.input_thresh}:offset={stats.target_offset}";
return ar;
}
string ar = $"loudnorm=print_format=summary:linear=true:{LOUDNORM_TARGET}:measured_I={stats.input_i}:measured_LRA={stats.input_lra}:measured_tp={stats.input_tp}:measured_thresh={stats.input_thresh}:offset={stats.target_offset}";
return ar;
}
private class LoudNormStats
{
/*
private class LoudNormStats
{
/*
{
"input_i" : "-7.47",
"input_tp" : "12.33",
@@ -173,12 +173,11 @@
"normalization_type" : "dynamic",
"target_offset" : "0.25"
}
*/
public string input_i { get; set; }
public string input_tp { get; set; }
public string input_lra { get; set; }
public string input_thresh { get; set; }
public string target_offset { get; set; }
}
*/
public string input_i { get; set; }
public string input_tp { get; set; }
public string input_lra { get; set; }
public string input_thresh { get; set; }
public string target_offset { get; set; }
}
}