1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229
|
using System;
using System.Runtime.InteropServices;
#if IPHONE
using MonoTouch.UIKit;
using MonoTouch.Foundation;
using MonoTouch.CoreFoundation;
using MonoTouch.AudioToolbox;
using MonoTouch.AudioUnit;
using OpenTK.Audio.OpenAL;
#elif MONOMAC
using MonoMac.AppKit;
using MonoMac.Foundation;
using MonoMac.CoreFoundation;
using MonoMac.AudioToolbox;
using MonoMac.AudioUnit;
using MonoMac.OpenAL;
#endif
namespace Microsoft.Xna.Framework.Audio
{
internal static class OpenALSupport
{
public static ExtAudioFile GetExtAudioFile (NSUrl url, out AudioStreamBasicDescription audioDescription)
{
// Notice the following line that we can not pass a NSUrl to a CFUrl
//ExtAudioFile ext = ExtAudioFile.OpenUrl(url);
// Basic Descriptions
AudioStreamBasicDescription fileFormat;
AudioStreamBasicDescription outputFormat;
// So now we create a CFUrl
CFUrl curl = CFUrl.FromFile (url.Path);
// Open the file
ExtAudioFile ext = ExtAudioFile.OpenUrl (curl);
// Get the audio format
fileFormat = ext.FileDataFormat;
// Don't know how to handle sounds with more than 2 channels (i.e. stereo)
// Remember that OpenAL sound effects must be mono to be spatialized anyway.
if (fileFormat.ChannelsPerFrame > 2) {
#if DEBUG
Console.WriteLine ("Unsupported Format: Channel count [0] is greater than stereo.", fileFormat.ChannelsPerFrame);
#endif
return null;
}
// The output format must be linear PCM because that's the only type OpenAL knows how to deal with.
// Set the client format to 16 bit signed integer (native-endian) data because that is the most
// optimal format on iPhone/iPod Touch hardware.
// Maintain the channel count and sample rate of the original source format.
outputFormat = new AudioStreamBasicDescription (); // Create our output format description to be converted to
outputFormat.SampleRate = fileFormat.SampleRate; // Preserve the original sample rate
outputFormat.ChannelsPerFrame = fileFormat.ChannelsPerFrame; // Preserve the orignal number of channels
outputFormat.Format = AudioFormatType.LinearPCM; // We want Linear PCM
// IsBigEndian is causing some problems with distorted sounds on MacOSX
// outputFormat.FormatFlags = AudioFormatFlags.IsBigEndian
// | AudioFormatFlags.IsPacked
// | AudioFormatFlags.IsSignedInteger;
outputFormat.FormatFlags = AudioFormatFlags.IsPacked
| AudioFormatFlags.IsSignedInteger;
outputFormat.FramesPerPacket = 1; // We know for linear PCM, the definition is 1 frame per packet
outputFormat.BitsPerChannel = 16; // We know we want 16-bit
outputFormat.BytesPerPacket = 2 * outputFormat.ChannelsPerFrame; // We know we are using 16-bit, so 2-bytes per channel per frame
outputFormat.BytesPerFrame = 2 * outputFormat.ChannelsPerFrame; // For PCM, since 1 frame is 1 packet, it is the same as mBytesPerPacket
// Set the desired client (output) data format
ext.ClientDataFormat = outputFormat;
// Copy the output format to the audio description that was passed in so the
// info will be returned to the user.
audioDescription = outputFormat;
return ext;
}
public static bool GetDataFromExtAudioFile (ExtAudioFile ext, AudioStreamBasicDescription outputFormat, int maxBufferSize,
byte[] dataBuffer, out int dataBufferSize, out ALFormat format, out double sampleRate)
{
int errorStatus = 0;
int bufferSizeInFrames = 0;
dataBufferSize = 0;
format = ALFormat.Mono16;
sampleRate = 0;
/* Compute how many frames will fit into our max buffer size */
bufferSizeInFrames = maxBufferSize / outputFormat.BytesPerFrame;
if (dataBuffer != null) {
MutableAudioBufferList audioBufferList = new MutableAudioBufferList (1, maxBufferSize);
audioBufferList.Buffers [0].DataByteSize = maxBufferSize;
audioBufferList.Buffers [0].NumberChannels = outputFormat.ChannelsPerFrame;
// This a hack so if there is a problem speak to kjpou1 -Kenneth
// the cleanest way is to copy the buffer to the pointer already allocated
// but what we are going to do is replace the pointer with our own and restore it later
//
GCHandle meBePinned = GCHandle.Alloc (dataBuffer, GCHandleType.Pinned);
IntPtr meBePointer = meBePinned.AddrOfPinnedObject ();
// Let's not use copy for right now while we test this. For very large files this
// might show some stutter in the sound loading
//Marshal.Copy(dataBuffer, 0, audioBufferList.Buffers[0].Data, maxBufferSize);
IntPtr savedDataPtr = audioBufferList.Buffers [0].Data;
audioBufferList.Buffers [0].Data = meBePointer;
try {
// Read the data into an AudioBufferList
// errorStatus here returns back the amount of information read
errorStatus = ext.Read (bufferSizeInFrames, audioBufferList);
if (errorStatus >= 0) {
/* Success */
/* Note: 0 == bufferSizeInFrames is a legitimate value meaning we are EOF. */
/* ExtAudioFile.Read returns the number of frames actually read.
* Need to convert back to bytes.
*/
dataBufferSize = bufferSizeInFrames * outputFormat.BytesPerFrame;
// Now we set our format
format = outputFormat.ChannelsPerFrame > 1 ? ALFormat.Stereo16 : ALFormat.Mono16;
sampleRate = outputFormat.SampleRate;
} else {
#if DEBUG
Console.WriteLine ("ExtAudioFile.Read failed, Error = " + errorStatus);
#endif
return false;
}
} catch (Exception exc) {
#if DEBUG
Console.WriteLine ("ExtAudioFile.Read failed: " + exc.Message);
#endif
return false;
} finally {
// Don't forget to free our dataBuffer memory pointer that was pinned above
meBePinned.Free ();
// and restore what was allocated to beginwith
audioBufferList.Buffers[0].Data = savedDataPtr;
}
}
return true;
}
/**
* Returns a byte buffer containing all the pcm data.
*/
public static byte[] GetOpenALAudioDataAll (NSUrl file_url, out int dataBufferSize, out ALFormat alFormat, out double sampleRate, out double duration)
{
long fileLengthInFrames = 0;
AudioStreamBasicDescription outputFormat;
int maxBufferSize;
byte[] pcmData;
dataBufferSize = 0;
alFormat = 0;
sampleRate = 0;
duration = 0;
ExtAudioFile extFile;
try {
extFile = GetExtAudioFile (file_url, out outputFormat);
} catch (Exception extExc) {
#if DEBUG
Console.WriteLine ("ExtAudioFile.OpenUrl failed, Error : " + extExc.Message);
#endif
return null;
}
/* Get the total frame count */
try {
fileLengthInFrames = extFile.FileLengthFrames;
} catch (Exception exc) {
#if DEBUG
Console.WriteLine ("ExtAudioFile.FileLengthFranes failed, Error : " + exc.Message);
#endif
return null;
}
/* Compute the number of bytes needed to hold all the data in the file. */
maxBufferSize = (int)(fileLengthInFrames * outputFormat.BytesPerFrame);
/* Allocate memory to hold all the decoded PCM data. */
pcmData = new byte[maxBufferSize];
bool gotData = GetDataFromExtAudioFile (extFile, outputFormat, maxBufferSize, pcmData,
out dataBufferSize, out alFormat, out sampleRate);
if (!gotData) {
pcmData = null;
}
duration = (dataBufferSize / ((outputFormat.BitsPerChannel / 8) * outputFormat.ChannelsPerFrame)) / outputFormat.SampleRate;
// we probably should make sure the buffer sizes are in accordance.
// assert(maxBufferSize == dataBufferSize);
// We do not need the ExtAudioFile so we will set it to null
extFile = null;
return pcmData;
}
public static byte[] LoadFromFile (string filename, out int dataBufferSize, out ALFormat alFormat, out double sampleRate, out double duration)
{
return OpenALSupport.GetOpenALAudioDataAll (NSUrl.FromFilename (filename),
out dataBufferSize, out alFormat, out sampleRate, out duration);
}
}
}
|