Implement IMA ADPCM WAV file support

Closes #4628
This commit is contained in:
James Slater
2014-06-04 22:19:03 +01:00
committed by reaperrr
parent 780fd0df29
commit a5978b5c55
3 changed files with 192 additions and 7 deletions

View File

@@ -1,6 +1,6 @@
#region Copyright & License Information
/*
* Copyright 2007-2013 The OpenRA Developers (see AUTHORS)
* Copyright 2007-2014 The OpenRA Developers (see AUTHORS)
* This file is part of OpenRA, which is free software. It is made
* available to you under the terms of the GNU General Public License
* as published by the Free Software Foundation. For more information,
@@ -26,9 +26,13 @@ namespace OpenRA.FileFormats
public readonly int BlockAlign;
public readonly int BitsPerSample;
public readonly int UncompressedSize;
public readonly int DataSize;
public readonly byte[] RawOutput;
public enum WaveType { Pcm = 0x1, Adpcm = 0x11 };
public static WaveType Type { get; private set; }
public WavLoader(Stream s)
{
while (s.Position < s.Length)
@@ -47,16 +51,24 @@ namespace OpenRA.FileFormats
break;
case "fmt ":
FmtChunkSize = s.ReadInt32();
if (FmtChunkSize != 16)
throw new NotSupportedException("{0} fmt chunk size is not a supported encoding scheme.".F(FmtChunkSize));
AudioFormat = s.ReadInt16();
if (AudioFormat != 1)
throw new NotSupportedException("Non-PCM compression is not supported.");
Type = (WaveType)AudioFormat;
if (Type != WaveType.Pcm && Type != WaveType.Adpcm)
throw new NotSupportedException("Compression type is not supported.");
Channels = s.ReadInt16();
SampleRate = s.ReadInt32();
ByteRate = s.ReadInt32();
BlockAlign = s.ReadInt16();
BitsPerSample = s.ReadInt16();
s.ReadBytes(FmtChunkSize - 16);
break;
case "fact":
{
var chunkSize = s.ReadInt32();
UncompressedSize = s.ReadInt32();
s.ReadBytes(chunkSize - 4);
}
break;
case "data":
DataSize = s.ReadInt32();
@@ -64,11 +76,84 @@ namespace OpenRA.FileFormats
break;
default:
// Ignore unknown chunks
var chunkSize = s.ReadInt32();
s.ReadBytes(chunkSize);
{
var chunkSize = s.ReadInt32();
s.ReadBytes(chunkSize);
}
break;
}
}
if (Type == WaveType.Adpcm)
{
RawOutput = DecodeAdpcmData();
BitsPerSample = 16;
}
}
public byte[] DecodeAdpcmData()
{
var s = new MemoryStream(RawOutput);
var numBlocks = DataSize / BlockAlign;
var blockDataSize = BlockAlign - (Channels * 4);
var outputSize = UncompressedSize * Channels * 2;
var outOffset = 0;
var output = new byte[outputSize];
var predictor = new int[Channels];
var index = new int[Channels];
// Decode each block of ADPCM data in RawOutput
for (var block = 0; block < numBlocks; block++)
{
// Each block starts with a initial state per-channel
for (var c = 0; c < Channels; c++)
{
predictor[c] = s.ReadInt16();
index[c] = s.ReadUInt8();
/* unknown/reserved */ s.ReadUInt8();
// Output first sample from input
output[outOffset++] = (byte)predictor[c];
output[outOffset++] = (byte)(predictor[c] >> 8);
if (outOffset >= outputSize)
return output;
}
// Decode and output remaining data in this block
var blockOffset = 0;
while (blockOffset < blockDataSize)
{
for (var c = 0; c < Channels; c++)
{
// Decode 4 bytes (to 16 bytes of output) per channel
var chunk = s.ReadBytes(4);
var decoded = AdpcmLoader.LoadAdpcmSound(chunk, ref index[c], ref predictor[c]);
// Interleave output, one sample per channel
var outOffsetChannel = outOffset + (2 * c);
for (var i = 0; i < decoded.Length; i += 2)
{
var outOffsetSample = outOffsetChannel + i;
if (outOffsetSample >= outputSize)
return output;
output[outOffsetSample] = decoded[i];
output[outOffsetSample + 1] = decoded[i + 1];
outOffsetChannel += 2 * (Channels - 1);
}
blockOffset += 4;
}
outOffset += 16 * Channels;
}
}
return output;
}
}
}