Skip to content

Commit

Permalink
✨ Implement MODS to AVI
Browse files Browse the repository at this point in the history
  • Loading branch information
pleonex committed Nov 8, 2023
1 parent 5913ec0 commit ef84408
Show file tree
Hide file tree
Showing 8 changed files with 216 additions and 32 deletions.
1 change: 1 addition & 0 deletions src/Directory.Packages.props
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
<!-- Centralize dependency management -->
<ItemGroup>
<PackageVersion Include="BenchmarkDotNet" Version="0.13.9" />
<PackageVersion Include="SharpAvi" Version="3.0.1" />
<PackageVersion Include="Texim" Version="0.1.0-preview.195" />
<PackageVersion Include="Yarhl" Version="4.0.0-preview.221" />
<PackageVersion Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
Expand Down
36 changes: 34 additions & 2 deletions src/PlayMobic.Tool/Program.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
return await new RootCommand("Tool for MODS videos") {
SetupInfoCommand(),
SetupExtraFramesCommand(),
SetupMods2AviCommand(),
SetupDemuxCommand(),
}.InvokeAsync(args);

Expand Down Expand Up @@ -41,11 +42,24 @@ Command SetupExtraFramesCommand()
return command;
}

Command SetupMods2AviCommand()
{
var inputArg = new Option<FileInfo>("--input", "Path to the .mods file") { IsRequired = true };
var outputArg = new Option<string>("--output", "Path to the file output AVI file") { IsRequired = true };
var command = new Command("mods2avi", "Convert a MODS video into an AVI file") {
inputArg,
outputArg,
};
command.SetHandler(Mods2Avi, inputArg, outputArg);

return command;
}

Command SetupDemuxCommand()
{
var inputArg = new Option<FileInfo>("--input", "Path to the .mods file") { IsRequired = true };
var outputArg = new Option<string>("--output", "Path to the folder to write the streams") { IsRequired = true };
var command = new Command("demux", "Extract each video and audio streams") {
var command = new Command("demux", "Extract and decode each video and audio streams") {
inputArg,
outputArg,
};
Expand Down Expand Up @@ -97,6 +111,7 @@ void ExtractFrames(FileInfo videoFile, string outputPath)

var demuxer = new ModsDemuxer(video);
var videoDecoder = new MobiclipDecoder(info.Width, info.Height);
byte[] rgbFrame = new byte[info.Width * info.Height * 4];
var image2BinaryBitmap = new FullImage2Bitmap();

// This work because video is always the first stream in the packets
Expand All @@ -108,7 +123,7 @@ void ExtractFrames(FileInfo videoFile, string outputPath)
throw new NotSupportedException("Unsupported colorspace");
}

byte[] rgbFrame = ColorSpaceConverter.YCoCg2Rgb32(frame);
ColorSpaceConverter.YCoCg2Rgb32(frame, rgbFrame);
var frameImage = new FullImage(frame.Width, frame.Height) {
Pixels = Rgb32.Instance.Decode(rgbFrame),
};
Expand All @@ -122,6 +137,23 @@ void ExtractFrames(FileInfo videoFile, string outputPath)
Console.WriteLine("Done");
}

void Mods2Avi(FileInfo videoFile, string outputPath)
{
Console.WriteLine("Input: {0}", videoFile.FullName);
Console.WriteLine("Output: {0}", outputPath);

Console.WriteLine("Decoding MODS video into an AVI file...");
var watch = Stopwatch.StartNew();

using DataStream outputStream = DataStreamFactory.FromFile(outputPath, FileOpenMode.Write);
using Node videoNode = NodeFactory.FromFile(videoFile.FullName, FileOpenMode.Read)
.TransformWith<Binary2Mods>()
.TransformWith(new Mods2BinaryAvi(outputStream));

watch.Stop();
Console.WriteLine("Done in {0}", watch.Elapsed);
}

void Demux(FileInfo videoFile, string outputPath)
{
string videoPath = Path.GetFullPath(Path.Combine(outputPath, videoFile.Name + ".rawvideo"));
Expand Down
20 changes: 0 additions & 20 deletions src/PlayMobic/Audio/DataStreamExtensions.cs

This file was deleted.

45 changes: 45 additions & 0 deletions src/PlayMobic/Audio/IOAudioExtensions.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
namespace PlayMobic.Audio;
using System;
using Yarhl.IO;

public static class IOAudioExtensions
{
public static void WriteInterleavedPCM16(this DataStream stream, Stream audioData, int channels)
{
int samplesPerChannel = (int)(audioData.Length / channels / 2);

Span<byte> tempBuffer = stackalloc byte[2];
for (int i = 0; i < samplesPerChannel; i++) {
for (int c = 0; c < channels; c++) {
audioData.Position = (i * 2) + (c * samplesPerChannel * 2);
audioData.Read(tempBuffer);
stream.Write(tempBuffer);
}
}
}

public static void ReadInterleavedPCM16(this DataStream stream, int audioDataLength, byte[] output, int channels)
{
if (output.Length < stream.Length) {
throw new ArgumentException("Output is too small");
}

const int SamplesPerBlock = 256;
int channelBlockSize = SamplesPerBlock * 2;
int blockSize = channelBlockSize * channels;
int blocksCount = audioDataLength / blockSize;

int outputPos = 0;
for (int b = 0; b < blocksCount; b++) {
int blockOffset = b * blockSize;

for (int i = 0; i < SamplesPerBlock; i++) {
for (int c = 0; c < channels; c++) {
stream.Position = blockOffset + (c * channelBlockSize) + (i * 2);
stream.Read(output, outputPos, 2);
outputPos += 2;
}
}
}
}
}
97 changes: 97 additions & 0 deletions src/PlayMobic/Containers/Mods/Mods2BinaryAvi.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
namespace PlayMobic.Containers.Mods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using PlayMobic.Audio;
using PlayMobic.Video.Mobiclip;
using PlayMobic.Video;
using SharpAvi.Codecs;
using SharpAvi.Output;
using Yarhl.FileFormat;
using Yarhl.IO;

public class Mods2BinaryAvi : IConverter<ModsVideo, BinaryFormat>
{
private readonly Stream output;

public Mods2BinaryAvi(Stream output)
{
this.output = output;
}

public event EventHandler<int>? ProgressUpdate;

public BinaryFormat Convert(ModsVideo source)
{
ArgumentNullException.ThrowIfNull(source);

using var writer = new AviWriter(output, true) {
FramesPerSecond = (decimal)source.Info.FramesPerSecond,
EmitIndex1 = true,
};

IAviVideoStream videoStream = writer.AddUncompressedVideoStream(source.Info.Width, source.Info.Height);
IAviAudioStream audioStream = writer.AddAudioStream(source.Info.AudioChannelsCount, source.Info.AudioFrequency, 16);
Decode(source, videoStream, audioStream);

writer.Close();
return new BinaryFormat(output);
}

private void Decode(ModsVideo video, IAviVideoStream videoStream, IAviAudioStream audioStream)
{
ModsInfo info = video.Info;
var videoDecoder = new MobiclipDecoder(info.Width, info.Height, isStereo: false);
var audioDecoders = new IAudioDecoder[info.AudioChannelsCount];
for (int i = 0; i < audioDecoders.Length; i++) {
audioDecoders[i] = CreateAudioDecoder(info.AudioCodec);
}

byte[] rgbFrame = new byte[info.Width * info.Height * 4];
using var audioBlocksBuffer = new DataStream();
byte[] audioInterleaveBuffer = new byte[info.AudioChannelsCount * 6000];
int audioBlockLength = 0;

var demuxer = new ModsDemuxer(video);
foreach (MediaPacket framePacket in demuxer.ReadFrames()) {
if (framePacket is VideoPacket) {
// Flush previous audio data block
if (audioBlocksBuffer.Length > 0) {
audioBlocksBuffer.ReadInterleavedPCM16(audioBlockLength, audioInterleaveBuffer, info.AudioChannelsCount);
audioStream.WriteBlock(audioInterleaveBuffer, 0, audioBlockLength);
audioBlocksBuffer.Position = 0;
audioBlockLength = 0;
}

FrameYuv420 frame = videoDecoder.DecodeFrame(framePacket.Data);
if (frame.ColorSpace is not YuvColorSpace.YCoCg) {
throw new NotSupportedException("Not supported colorspace");
}

ColorSpaceConverter.YCoCg2Bgr32(frame, rgbFrame);
videoStream.WriteFrame(framePacket.IsKeyFrame, rgbFrame);
ProgressUpdate?.Invoke(this, framePacket.FrameCount);
} else if (framePacket is AudioPacket audioPacket) {
byte[] channelData = audioDecoders[audioPacket.TrackIndex].Decode(audioPacket.Data, audioPacket.IsKeyFrame);
audioBlocksBuffer.Write(channelData);
audioBlockLength += channelData.Length;
}
}

// Flush last block
if (audioBlocksBuffer.Length > 0) {
audioBlocksBuffer.ReadInterleavedPCM16(audioBlockLength, audioInterleaveBuffer, info.AudioChannelsCount);
audioStream.WriteBlock(audioInterleaveBuffer, 0, audioBlockLength);
}
}

private static IAudioDecoder CreateAudioDecoder(AudioCodecKind codecKind)
{
return codecKind switch {
AudioCodecKind.ImaAdPcm => new ImaAdpcmDecoder(),
_ => throw new NotImplementedException("Unsupported audio codec"),
};
}
}
8 changes: 4 additions & 4 deletions src/PlayMobic/Containers/Mods/Mods2RawContainer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ private void Decode(ModsVideo video, DataStream videoStream, DataStream audioStr
}

var colorConvertedFrame = new FrameYuv420(info.Width, info.Height);
using var audioInterleaveBuffer = new DataStream();
using var audioBlockBuffer = new DataStream();
var demuxer = new ModsDemuxer(video);
foreach (MediaPacket framePacket in demuxer.ReadFrames()) {
if (framePacket is VideoPacket) {
Expand All @@ -65,11 +65,11 @@ private void Decode(ModsVideo video, DataStream videoStream, DataStream audioStr
ProgressUpdate?.Invoke(this, framePacket.FrameCount);
} else if (framePacket is AudioPacket audioPacket) {
byte[] channelData = audioDecoders[audioPacket.TrackIndex].Decode(audioPacket.Data, audioPacket.IsKeyFrame);
audioInterleaveBuffer.Write(channelData);
audioBlockBuffer.Write(channelData);

if (audioPacket.TrackIndex + 1 == info.AudioChannelsCount) {
audioStream.WriteInterleavedPCM16(audioInterleaveBuffer, info.AudioChannelsCount);
audioInterleaveBuffer.Position = 0;
audioStream.WriteInterleavedPCM16(audioBlockBuffer, info.AudioChannelsCount);
audioBlockBuffer.Position = 0;
}
}
}
Expand Down
1 change: 1 addition & 0 deletions src/PlayMobic/PlayMobic.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
</ItemGroup>

<ItemGroup>
<PackageReference Include="SharpAvi" />
<PackageReference Include="Yarhl" />
</ItemGroup>

Expand Down
40 changes: 34 additions & 6 deletions src/PlayMobic/Video/ColorSpaceConverter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@

public static class ColorSpaceConverter
{
public static byte[] YCoCg2Rgb32(FrameYuv420 source)
public static void YCoCg2Rgb32(FrameYuv420 source, Span<byte> output)
{
byte[] rgb = new byte[source.Width * source.Height * 4];
if (output.Length != source.Width * source.Height * 4) {
throw new ArgumentException("Invalid output size");
}

for (int y = 0; y < source.Height; y++) {
for (int x = 0; x < source.Width; x++) {
Expand All @@ -21,13 +23,39 @@ public static byte[] YCoCg2Rgb32(FrameYuv420 source)
int r = tmp + co;

int index = ((y * source.Width) + x) * 4;
rgb[index + 0] = ClampByte(r);
rgb[index + 1] = ClampByte(g);
rgb[index + 2] = ClampByte(b);
output[index + 0] = ClampByte(r);
output[index + 1] = ClampByte(g);
output[index + 2] = ClampByte(b);
output[index + 3] = 0; // not used
}
}
}

public static void YCoCg2Bgr32(FrameYuv420 source, Span<byte> output)
{
if (output.Length != source.Width * source.Height * 4) {
throw new ArgumentException("Invalid output size");
}

for (int y = 0; y < source.Height; y++) {
for (int x = 0; x < source.Width; x++) {
// luma is in range 0-255 but chroma is centered at 128, center at 0
byte luma = source.Luma[x, y];
int co = source.ChromaU[x / 2, y / 2] - 128;
int cg = source.ChromaV[x / 2, y / 2] - 128;

return rgb;
int tmp = luma - cg;
int g = luma + cg;
int b = tmp - co;
int r = tmp + co;

int index = ((y * source.Width) + x) * 4;
output[index + 0] = ClampByte(b);
output[index + 1] = ClampByte(g);
output[index + 2] = ClampByte(r);
output[index + 3] = 0; // not used
}
}
}

public static void YCoCg2YCbCr(FrameYuv420 source, FrameYuv420 output)
Expand Down

0 comments on commit ef84408

Please sign in to comment.