Providing streaming WavFormat data.
WavFormat.GetPCMInputStream now returns data that is streamed, rather than a MemoryStream.
This commit is contained in:
committed by
Paul Chote
parent
4ae92a5c22
commit
7ed769421e
@@ -23,6 +23,7 @@ namespace OpenRA.Primitives
|
|||||||
{
|
{
|
||||||
readonly Queue<byte> data = new Queue<byte>(1024);
|
readonly Queue<byte> data = new Queue<byte>(1024);
|
||||||
readonly Stream baseStream;
|
readonly Stream baseStream;
|
||||||
|
bool baseStreamEmpty;
|
||||||
|
|
||||||
protected ReadOnlyAdapterStream(Stream stream)
|
protected ReadOnlyAdapterStream(Stream stream)
|
||||||
{
|
{
|
||||||
@@ -55,10 +56,9 @@ namespace OpenRA.Primitives
|
|||||||
var copied = 0;
|
var copied = 0;
|
||||||
ConsumeData(buffer, offset, count, ref copied);
|
ConsumeData(buffer, offset, count, ref copied);
|
||||||
|
|
||||||
var finished = false;
|
while (copied < count && !baseStreamEmpty)
|
||||||
while (copied < count && !finished)
|
|
||||||
{
|
{
|
||||||
finished = BufferData(baseStream, data);
|
baseStreamEmpty = BufferData(baseStream, data);
|
||||||
ConsumeData(buffer, offset, count, ref copied);
|
ConsumeData(buffer, offset, count, ref copied);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -50,37 +50,25 @@ namespace OpenRA.Mods.Common.AudioLoaders
|
|||||||
|
|
||||||
public sealed class WavFormat : ISoundFormat
|
public sealed class WavFormat : ISoundFormat
|
||||||
{
|
{
|
||||||
public int Channels { get { return reader.Value.Channels; } }
|
public int Channels { get { return channels; } }
|
||||||
public int SampleBits { get { return reader.Value.BitsPerSample; } }
|
public int SampleBits { get { return sampleBits; } }
|
||||||
public int SampleRate { get { return reader.Value.SampleRate; } }
|
public int SampleRate { get { return sampleRate; } }
|
||||||
public float LengthInSeconds { get { return WavReader.WaveLength(stream); } }
|
public float LengthInSeconds { get { return WavReader.WaveLength(sourceStream); } }
|
||||||
public Stream GetPCMInputStream() { return new MemoryStream(reader.Value.RawOutput); }
|
public Stream GetPCMInputStream() { return wavStreamFactory(); }
|
||||||
public void Dispose() { stream.Dispose(); }
|
public void Dispose() { sourceStream.Dispose(); }
|
||||||
|
|
||||||
Lazy<WavReader> reader;
|
readonly Stream sourceStream;
|
||||||
|
readonly Func<Stream> wavStreamFactory;
|
||||||
readonly Stream stream;
|
readonly short channels;
|
||||||
|
readonly int sampleBits;
|
||||||
|
readonly int sampleRate;
|
||||||
|
|
||||||
public WavFormat(Stream stream)
|
public WavFormat(Stream stream)
|
||||||
{
|
{
|
||||||
this.stream = stream;
|
sourceStream = stream;
|
||||||
|
|
||||||
var position = stream.Position;
|
if (!WavReader.LoadSound(stream, out wavStreamFactory, out channels, out sampleBits, out sampleRate))
|
||||||
reader = Exts.Lazy(() =>
|
|
||||||
{
|
|
||||||
var wavReader = new WavReader();
|
|
||||||
try
|
|
||||||
{
|
|
||||||
if (!wavReader.LoadSound(stream))
|
|
||||||
throw new InvalidDataException();
|
throw new InvalidDataException();
|
||||||
}
|
}
|
||||||
finally
|
|
||||||
{
|
|
||||||
stream.Position = position;
|
|
||||||
}
|
|
||||||
|
|
||||||
return wavReader;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -10,72 +10,70 @@
|
|||||||
#endregion
|
#endregion
|
||||||
|
|
||||||
using System;
|
using System;
|
||||||
|
using System.Collections.Generic;
|
||||||
using System.IO;
|
using System.IO;
|
||||||
|
using OpenRA.Primitives;
|
||||||
|
|
||||||
namespace OpenRA.Mods.Common.FileFormats
|
namespace OpenRA.Mods.Common.FileFormats
|
||||||
{
|
{
|
||||||
public class WavReader
|
public static class WavReader
|
||||||
{
|
{
|
||||||
public int FileSize;
|
enum WaveType { Pcm = 0x1, ImaAdpcm = 0x11 }
|
||||||
public string Format;
|
|
||||||
|
|
||||||
public int FmtChunkSize;
|
public static bool LoadSound(Stream s, out Func<Stream> result, out short channels, out int sampleBits, out int sampleRate)
|
||||||
public int AudioFormat;
|
|
||||||
public int Channels;
|
|
||||||
public int SampleRate;
|
|
||||||
public int ByteRate;
|
|
||||||
public int BlockAlign;
|
|
||||||
public int BitsPerSample;
|
|
||||||
|
|
||||||
public int UncompressedSize;
|
|
||||||
public int DataSize;
|
|
||||||
public byte[] RawOutput;
|
|
||||||
|
|
||||||
public enum WaveType { Pcm = 0x1, ImaAdpcm = 0x11 }
|
|
||||||
public static WaveType Type { get; private set; }
|
|
||||||
|
|
||||||
public bool LoadSound(Stream s)
|
|
||||||
{
|
{
|
||||||
|
result = null;
|
||||||
|
channels = -1;
|
||||||
|
sampleBits = -1;
|
||||||
|
sampleRate = -1;
|
||||||
|
|
||||||
var type = s.ReadASCII(4);
|
var type = s.ReadASCII(4);
|
||||||
if (type != "RIFF")
|
if (type != "RIFF")
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
FileSize = s.ReadInt32();
|
s.ReadInt32(); // File-size
|
||||||
Format = s.ReadASCII(4);
|
var format = s.ReadASCII(4);
|
||||||
if (Format != "WAVE")
|
if (format != "WAVE")
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
WaveType audioType = 0;
|
||||||
|
var dataOffset = -1L;
|
||||||
|
var dataSize = -1;
|
||||||
|
short blockAlign = -1;
|
||||||
|
int uncompressedSize = -1;
|
||||||
while (s.Position < s.Length)
|
while (s.Position < s.Length)
|
||||||
{
|
{
|
||||||
if ((s.Position & 1) == 1)
|
if ((s.Position & 1) == 1)
|
||||||
s.ReadByte(); // Alignment
|
s.ReadByte(); // Alignment
|
||||||
|
|
||||||
type = s.ReadASCII(4);
|
var blockType = s.ReadASCII(4);
|
||||||
switch (type)
|
switch (blockType)
|
||||||
{
|
{
|
||||||
case "fmt ":
|
case "fmt ":
|
||||||
FmtChunkSize = s.ReadInt32();
|
var fmtChunkSize = s.ReadInt32();
|
||||||
AudioFormat = s.ReadInt16();
|
var audioFormat = s.ReadInt16();
|
||||||
Type = (WaveType)AudioFormat;
|
audioType = (WaveType)audioFormat;
|
||||||
|
|
||||||
if (!Enum.IsDefined(typeof(WaveType), Type))
|
if (!Enum.IsDefined(typeof(WaveType), audioType))
|
||||||
throw new NotSupportedException("Compression type {0} is not supported.".F(AudioFormat));
|
throw new NotSupportedException("Compression type {0} is not supported.".F(audioFormat));
|
||||||
|
|
||||||
Channels = s.ReadInt16();
|
channels = s.ReadInt16();
|
||||||
SampleRate = s.ReadInt32();
|
sampleRate = s.ReadInt32();
|
||||||
ByteRate = s.ReadInt32();
|
s.ReadInt32(); // Byte Rate
|
||||||
BlockAlign = s.ReadInt16();
|
blockAlign = s.ReadInt16();
|
||||||
BitsPerSample = s.ReadInt16();
|
sampleBits = s.ReadInt16();
|
||||||
|
|
||||||
s.ReadBytes(FmtChunkSize - 16);
|
s.ReadBytes(fmtChunkSize - 16);
|
||||||
break;
|
break;
|
||||||
case "fact":
|
case "fact":
|
||||||
var chunkSize = s.ReadInt32();
|
var chunkSize = s.ReadInt32();
|
||||||
UncompressedSize = s.ReadInt32();
|
uncompressedSize = s.ReadInt32();
|
||||||
s.ReadBytes(chunkSize - 4);
|
s.ReadBytes(chunkSize - 4);
|
||||||
break;
|
break;
|
||||||
case "data":
|
case "data":
|
||||||
DataSize = s.ReadInt32();
|
dataSize = s.ReadInt32();
|
||||||
RawOutput = s.ReadBytes(DataSize);
|
dataOffset = s.Position;
|
||||||
|
s.Position += dataSize;
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
var unknownChunkSize = s.ReadInt32();
|
var unknownChunkSize = s.ReadInt32();
|
||||||
@@ -84,11 +82,18 @@ namespace OpenRA.Mods.Common.FileFormats
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (Type == WaveType.ImaAdpcm)
|
if (audioType == WaveType.ImaAdpcm)
|
||||||
|
sampleBits = 16;
|
||||||
|
|
||||||
|
var chan = channels;
|
||||||
|
result = () =>
|
||||||
{
|
{
|
||||||
RawOutput = DecodeImaAdpcmData();
|
var audioStream = SegmentStream.CreateWithoutOwningStream(s, dataOffset, dataSize);
|
||||||
BitsPerSample = 16;
|
if (audioType == WaveType.ImaAdpcm)
|
||||||
}
|
return new WavStream(audioStream, dataSize, blockAlign, chan, uncompressedSize);
|
||||||
|
|
||||||
|
return audioStream; // Data is already PCM format.
|
||||||
|
};
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@@ -112,69 +117,86 @@ namespace OpenRA.Mods.Common.FileFormats
|
|||||||
return length / (channels * sampleRate * bitsPerSample);
|
return length / (channels * sampleRate * bitsPerSample);
|
||||||
}
|
}
|
||||||
|
|
||||||
public byte[] DecodeImaAdpcmData()
|
sealed class WavStream : ReadOnlyAdapterStream
|
||||||
{
|
{
|
||||||
var s = new MemoryStream(RawOutput);
|
readonly short channels;
|
||||||
|
readonly int numBlocks;
|
||||||
|
readonly int blockDataSize;
|
||||||
|
readonly int outputSize;
|
||||||
|
readonly int[] predictor;
|
||||||
|
readonly int[] index;
|
||||||
|
|
||||||
var numBlocks = DataSize / BlockAlign;
|
readonly byte[] interleaveBuffer;
|
||||||
var blockDataSize = BlockAlign - (Channels * 4);
|
int outOffset;
|
||||||
var outputSize = UncompressedSize * Channels * 2;
|
int currentBlock;
|
||||||
|
|
||||||
var outOffset = 0;
|
public WavStream(Stream stream, int dataSize, short blockAlign, short channels, int uncompressedSize) : base(stream)
|
||||||
var output = new byte[outputSize];
|
|
||||||
|
|
||||||
var predictor = new int[Channels];
|
|
||||||
var index = new int[Channels];
|
|
||||||
|
|
||||||
// Decode each block of IMA ADPCM data in RawOutput
|
|
||||||
for (var block = 0; block < numBlocks; block++)
|
|
||||||
{
|
{
|
||||||
|
this.channels = channels;
|
||||||
|
numBlocks = dataSize / blockAlign;
|
||||||
|
blockDataSize = blockAlign - (channels * 4);
|
||||||
|
outputSize = uncompressedSize * channels * 2;
|
||||||
|
predictor = new int[channels];
|
||||||
|
index = new int[channels];
|
||||||
|
|
||||||
|
interleaveBuffer = new byte[channels * 16];
|
||||||
|
}
|
||||||
|
|
||||||
|
protected override bool BufferData(Stream baseStream, Queue<byte> data)
|
||||||
|
{
|
||||||
|
// Decode each block of IMA ADPCM data
|
||||||
// Each block starts with a initial state per-channel
|
// Each block starts with a initial state per-channel
|
||||||
for (var c = 0; c < Channels; c++)
|
for (var c = 0; c < channels; c++)
|
||||||
{
|
{
|
||||||
predictor[c] = s.ReadInt16();
|
predictor[c] = baseStream.ReadInt16();
|
||||||
index[c] = s.ReadUInt8();
|
index[c] = baseStream.ReadUInt8();
|
||||||
/* unknown/reserved */ s.ReadUInt8();
|
baseStream.ReadUInt8(); // Unknown/Reserved
|
||||||
|
|
||||||
// Output first sample from input
|
// Output first sample from input
|
||||||
output[outOffset++] = (byte)predictor[c];
|
data.Enqueue((byte)predictor[c]);
|
||||||
output[outOffset++] = (byte)(predictor[c] >> 8);
|
data.Enqueue((byte)(predictor[c] >> 8));
|
||||||
|
outOffset += 2;
|
||||||
|
|
||||||
if (outOffset >= outputSize)
|
if (outOffset >= outputSize)
|
||||||
return output;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Decode and output remaining data in this block
|
// Decode and output remaining data in this block
|
||||||
var blockOffset = 0;
|
var blockOffset = 0;
|
||||||
while (blockOffset < blockDataSize)
|
while (blockOffset < blockDataSize)
|
||||||
{
|
{
|
||||||
for (var c = 0; c < Channels; c++)
|
for (var c = 0; c < channels; c++)
|
||||||
{
|
{
|
||||||
// Decode 4 bytes (to 16 bytes of output) per channel
|
// Decode 4 bytes (to 16 bytes of output) per channel
|
||||||
var chunk = s.ReadBytes(4);
|
var chunk = baseStream.ReadBytes(4);
|
||||||
var decoded = ImaAdpcmReader.LoadImaAdpcmSound(chunk, ref index[c], ref predictor[c]);
|
var decoded = ImaAdpcmReader.LoadImaAdpcmSound(chunk, ref index[c], ref predictor[c]);
|
||||||
|
|
||||||
// Interleave output, one sample per channel
|
// Interleave output, one sample per channel
|
||||||
var outOffsetChannel = outOffset + (2 * c);
|
var interleaveChannelOffset = 2 * c;
|
||||||
for (var i = 0; i < decoded.Length; i += 2)
|
for (var i = 0; i < decoded.Length; i += 2)
|
||||||
{
|
{
|
||||||
var outOffsetSample = outOffsetChannel + i;
|
var interleaveSampleOffset = interleaveChannelOffset + i;
|
||||||
if (outOffsetSample >= outputSize)
|
interleaveBuffer[interleaveSampleOffset] = decoded[i];
|
||||||
return output;
|
interleaveBuffer[interleaveSampleOffset + 1] = decoded[i + 1];
|
||||||
|
interleaveChannelOffset += 2 * (channels - 1);
|
||||||
output[outOffsetSample] = decoded[i];
|
|
||||||
output[outOffsetSample + 1] = decoded[i + 1];
|
|
||||||
outOffsetChannel += 2 * (Channels - 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
blockOffset += 4;
|
blockOffset += 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
outOffset += 16 * Channels;
|
var outputRemaining = outputSize - outOffset;
|
||||||
}
|
var toCopy = Math.Min(outputRemaining, interleaveBuffer.Length);
|
||||||
|
for (var i = 0; i < toCopy; i++)
|
||||||
|
data.Enqueue(interleaveBuffer[i]);
|
||||||
|
|
||||||
|
outOffset += 16 * channels;
|
||||||
|
|
||||||
|
if (outOffset >= outputSize)
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
return output;
|
return ++currentBlock >= numBlocks;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user