diff --git a/Headphoned.xcworkspace/contents.xcworkspacedata b/Headphoned.xcworkspace/contents.xcworkspacedata
new file mode 100644
index 0000000..0b705bc
--- /dev/null
+++ b/Headphoned.xcworkspace/contents.xcworkspacedata
@@ -0,0 +1,10 @@
+
+
+
+
+
+
+
diff --git a/Podfile b/Podfile
new file mode 100644
index 0000000..cc592d3
--- /dev/null
+++ b/Podfile
@@ -0,0 +1 @@
+pod 'EZAudio', '~> 1.0'
\ No newline at end of file
diff --git a/Podfile.lock b/Podfile.lock
new file mode 100644
index 0000000..9d63fd0
--- /dev/null
+++ b/Podfile.lock
@@ -0,0 +1,17 @@
+PODS:
+ - EZAudio (1.1.2):
+ - EZAudio/Full (= 1.1.2)
+ - EZAudio/Core (1.1.2)
+ - EZAudio/Full (1.1.2):
+ - EZAudio/Core
+ - TPCircularBuffer (~> 0.0)
+ - TPCircularBuffer (0.0.1)
+
+DEPENDENCIES:
+ - EZAudio (~> 1.0)
+
+SPEC CHECKSUMS:
+ EZAudio: 01373de2b1257b77be1bf13753fb1c4ea8a9777e
+ TPCircularBuffer: 949e9632b9fb99b7274d7b2296ee22bff5841e35
+
+COCOAPODS: 0.38.2
diff --git a/Pods/EZAudio/EZAudio/EZAudio.h b/Pods/EZAudio/EZAudio/EZAudio.h
new file mode 100644
index 0000000..ebb559c
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudio.h
@@ -0,0 +1,522 @@
+//
+// EZAudio.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 11/21/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+
+//------------------------------------------------------------------------------
+#pragma mark - Core Components
+//------------------------------------------------------------------------------
+
+#import "EZAudioDevice.h"
+#import "EZAudioFile.h"
+#import "EZMicrophone.h"
+#import "EZOutput.h"
+#import "EZRecorder.h"
+#import "EZAudioPlayer.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Interface Components
+//------------------------------------------------------------------------------
+
+#import "EZPlot.h"
+#import "EZAudioDisplayLink.h"
+#import "EZAudioPlot.h"
+#import "EZAudioPlotGL.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Utility Components
+//------------------------------------------------------------------------------
+
+#import "EZAudioFFT.h"
+#import "EZAudioFloatConverter.h"
+#import "EZAudioFloatData.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+
+/**
+ EZAudio is a simple, intuitive framework for iOS and OSX. The goal of EZAudio was to provide a modular, cross-platform framework to simplify performing everyday audio operations like getting microphone input, creating audio waveforms, recording/playing audio files, etc. The visualization tools like the EZAudioPlot and EZAudioPlotGL were created to plug right into the framework's various components and provide highly optimized drawing routines that work in harmony with audio callback loops. All components retain the same namespace whether you're on an iOS device or a Mac computer so an EZAudioPlot understands it will subclass an UIView on an iOS device or an NSView on a Mac.
+
+ Class methods for EZAudio are provided as utility methods used throughout the other modules within the framework. For instance, these methods help make sense of error codes (checkResult:operation:), map values betwen coordinate systems (MAP:leftMin:leftMax:rightMin:rightMax:), calculate root mean squared values for buffers (RMS:length:), etc.
+
+ @warning As of 1.0 these methods have been moved over to `EZAudioUtilities` to allow using specific modules without requiring the whole library.
+ */
+@interface EZAudio : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Debugging
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Debugging EZAudio
+///-----------------------------------------------------------
+
+/**
+ Globally sets whether or not the program should exit if a `checkResult:operation:` operation fails. Currently the behavior on EZAudio is to quit if a `checkResult:operation:` fails, but this is not desirable in any production environment. Internally there are a lot of `checkResult:operation:` operations used on all the core classes. This should only ever be set to NO in production environments since a `checkResult:operation:` failing means something breaking has likely happened.
+ @param shouldExitOnCheckResultFail A BOOL indicating whether or not the running program should exist due to a `checkResult:operation:` fail.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)setShouldExitOnCheckResultFail:(BOOL)shouldExitOnCheckResultFail __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a flag indicating whether or not the program will exit if a `checkResult:operation:` fails.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A BOOL indicating whether or not the program will exit if a `checkResult:operation:` fails.
+ */
++ (BOOL)shouldExitOnCheckResultFail __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioBufferList Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name AudioBufferList Utility
+///-----------------------------------------------------------
+
+/**
+ Allocates an AudioBufferList structure. Make sure to call freeBufferList when done using AudioBufferList or it will leak.
+ @param frames The number of frames that will be stored within each audio buffer
+ @param channels The number of channels (e.g. 2 for stereo, 1 for mono, etc.)
+ @param interleaved Whether the samples will be interleaved (if not it will be assumed to be non-interleaved and each channel will have an AudioBuffer allocated)
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return An AudioBufferList struct that has been allocated in memory
+ */
++ (AudioBufferList *)audioBufferListWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+ interleaved:(BOOL)interleaved __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Allocates an array of float arrays given the number of frames needed to store in each float array.
+ @param frames A UInt32 representing the number of frames to store in each float buffer
+ @param channels A UInt32 representing the number of channels (i.e. the number of float arrays to allocate)
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return An array of float arrays, each the length of the number of frames specified
+ */
++ (float **)floatBuffersWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Deallocates an AudioBufferList structure from memory.
+ @param bufferList A pointer to the buffer list you would like to free
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)freeBufferList:(AudioBufferList *)bufferList __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Deallocates an array of float buffers
+ @param buffers An array of float arrays
+ @param channels A UInt32 representing the number of channels (i.e. the number of float arrays to deallocate)
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)freeFloatBuffers:(float **)buffers numberOfChannels:(UInt32)channels __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioStreamBasicDescription Utilties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Creating An AudioStreamBasicDescription
+///-----------------------------------------------------------
+
+/**
+ Creates a signed-integer, interleaved AudioStreamBasicDescription for the number of channels specified for an AIFF format.
+ @param channels The desired number of channels
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)AIFFFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an AudioStreamBasicDescription for the iLBC narrow band speech codec.
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)iLBCFormatWithSampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a float-based, non-interleaved AudioStreamBasicDescription for the number of channels specified.
+ @param channels A UInt32 representing the number of channels.
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A float-based AudioStreamBasicDescription with the number of channels specified.
+ */
++ (AudioStreamBasicDescription)floatFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an AudioStreamBasicDescription for an M4A AAC format.
+ @param channels The desired number of channels
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)M4AFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a single-channel, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)monoFloatFormatWithSampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a single-channel, float-based AudioStreamBasicDescription (as of 0.0.6 this is the same as `monoFloatFormatWithSampleRate:`).
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (AudioStreamBasicDescription)monoCanonicalFormatWithSampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, non-interleaved, float-based AudioStreamBasicDescription (as of 0.0.6 this is the same as `stereoFloatNonInterleavedFormatWithSampleRate:`).
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoCanonicalNonInterleavedFormatWithSampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, interleaved, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoFloatInterleavedFormatWithSampleRate:(float)sampleRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, non-interleaved, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoFloatNonInterleavedFormatWithSampleRate:(float)sameRate __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+// @name AudioStreamBasicDescription Helper Functions
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to see if it is a float-based format (as opposed to a signed integer based format).
+ @param asbd A valid AudioStreamBasicDescription
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is a float format.
+ */
++ (BOOL)isFloatFormat:(AudioStreamBasicDescription)asbd __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to check for an interleaved flag (samples are
+ stored in one buffer one after another instead of two (or n channels) parallel buffers
+ @param asbd A valid AudioStreamBasicDescription
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is interleaved
+ */
++ (BOOL)isInterleaved:(AudioStreamBasicDescription)asbd __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to see if it is a linear PCM format (uncompressed,
+ 1 frame per packet)
+ @param asbd A valid AudioStreamBasicDescription
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is linear PCM.
+ */
++ (BOOL)isLinearPCM:(AudioStreamBasicDescription)asbd __attribute__((deprecated));
+
+///-----------------------------------------------------------
+/// @name AudioStreamBasicDescription Utilities
+///-----------------------------------------------------------
+
+/**
+ Nicely logs out the contents of an AudioStreamBasicDescription struct
+ @param asbd The AudioStreamBasicDescription struct with content to print out
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)printASBD:(AudioStreamBasicDescription)asbd __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Converts seconds into a string formatted as MM:SS
+ @param seconds An NSTimeInterval representing the number of seconds
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return An NSString instance formatted as MM:SS from the seconds provided.
+ */
++ (NSString *)displayTimeStringFromSeconds:(NSTimeInterval)seconds __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a string to use when logging out the contents of an AudioStreamBasicDescription
+ @param asbd A valid AudioStreamBasicDescription struct.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return An NSString representing the contents of the AudioStreamBasicDescription.
+ */
++ (NSString *)stringForAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Just a wrapper around the setCanonical function provided in the Core Audio Utility C++ class.
+ @param asbd The AudioStreamBasicDescription structure to modify
+ @param nChannels The number of expected channels on the description
+ @param interleaved A flag indicating whether the stereo samples should be interleaved in the buffer
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)setCanonicalAudioStreamBasicDescription:(AudioStreamBasicDescription*)asbd
+ numberOfChannels:(UInt32)nChannels
+ interleaved:(BOOL)interleaved __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - Math Utilities
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Math Utilities
+///-----------------------------------------------------------
+
+/**
+ Appends an array of values to a history buffer and performs an internal shift to add the values to the tail and removes the same number of values from the head.
+ @param buffer A float array of values to append to the tail of the history buffer
+ @param bufferLength The length of the float array being appended to the history buffer
+ @param scrollHistory The target history buffer in which to append the values
+ @param scrollHistoryLength The length of the target history buffer
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)appendBufferAndShift:(float*)buffer
+ withBufferSize:(int)bufferLength
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Appends a value to a history buffer and performs an internal shift to add the value to the tail and remove the 0th value.
+ @param value The float value to append to the history array
+ @param scrollHistory The target history buffer in which to append the values
+ @param scrollHistoryLength The length of the target history buffer
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++(void) appendValue:(float)value
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Maps a value from one coordinate system into another one. Takes in the current value to map, the minimum and maximum values of the first coordinate system, and the minimum and maximum values of the second coordinate system and calculates the mapped value in the second coordinate system's constraints.
+ @param value The value expressed in the first coordinate system
+ @param leftMin The minimum of the first coordinate system
+ @param leftMax The maximum of the first coordinate system
+ @param rightMin The minimum of the second coordindate system
+ @param rightMax The maximum of the second coordinate system
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return The mapped value in terms of the second coordinate system
+ */
++ (float)MAP:(float)value
+ leftMin:(float)leftMin
+ leftMax:(float)leftMax
+ rightMin:(float)rightMin
+ rightMax:(float)rightMax __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Calculates the root mean squared for a buffer.
+ @param buffer A float buffer array of values whose root mean squared to calculate
+ @param bufferSize The size of the float buffer
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return The root mean squared of the buffer
+ */
++ (float)RMS:(float*)buffer length:(int)bufferSize __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Calculate the sign function sgn(x) =
+ { -1 , x < 0,
+ { 0 , x = 0,
+ { 1 , x > 0
+ @param value The float value for which to use as x
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return The float sign value
+ */
++ (float)SGN:(float)value __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - OSStatus Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name OSStatus Utility
+///-----------------------------------------------------------
+
+/**
+ Basic check result function useful for checking each step of the audio setup process
+ @param result The OSStatus representing the result of an operation
+ @param operation A string (const char, not NSString) describing the operation taking place (will print if fails)
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)checkResult:(OSStatus)result operation:(const char *)operation __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a string representation of the often cryptic Core Audio error codes
+ @param code A UInt32 representing an error code
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ @return An NSString with a human readable version of the error code.
+ */
++ (NSString *)stringFromUInt32Code:(UInt32)code __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - Plot Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Plot Utility
+///-----------------------------------------------------------
+
+/**
+ Given a buffer representing a window of float history data this append the RMS of a buffer of incoming float data...This will likely be deprecated in a future version of EZAudio for a circular buffer based approach.
+ @param scrollHistory An array of float arrays being used to hold the history values for each channel.
+ @param scrollHistoryLength An int representing the length of the history window.
+ @param index An int pointer to the index of the current read index of the history buffer.
+ @param buffer A float array representing the incoming audio data.
+ @param bufferSize An int representing the length of the incoming audio data.
+ @param isChanging A BOOL pointer representing whether the resolution (length of the history window) is currently changing.
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)updateScrollHistory:(float **)scrollHistory
+ withLength:(int)scrollHistoryLength
+ atIndex:(int *)index
+ withBuffer:(float *)buffer
+ withBufferSize:(int)bufferSize
+ isResolutionChanging:(BOOL *)isChanging __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - TPCircularBuffer Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name TPCircularBuffer Utility
+///-----------------------------------------------------------
+
+/**
+ Appends the data from the audio buffer list to the circular buffer
+ @param circularBuffer Pointer to the instance of the TPCircularBuffer to add the audio data to
+ @param audioBufferList Pointer to the instance of the AudioBufferList with the audio data
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)appendDataToCircularBuffer:(TPCircularBuffer*)circularBuffer
+ fromAudioBufferList:(AudioBufferList*)audioBufferList __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the circular buffer (just a wrapper around the C method)
+ * @param circularBuffer Pointer to an instance of the TPCircularBuffer
+ * @param size The length of the TPCircularBuffer (usually 1024)
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)circularBuffer:(TPCircularBuffer*)circularBuffer
+ withSize:(int)size __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Frees a circular buffer
+ @param circularBuffer Pointer to the circular buffer to clear
+ @deprecated This method is deprecated starting in version 0.1.0.
+ @note Please use same method in EZAudioUtilities class instead.
+ */
++ (void)freeCircularBuffer:(TPCircularBuffer*)circularBuffer __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudio.m b/Pods/EZAudio/EZAudio/EZAudio.m
new file mode 100644
index 0000000..d334e6a
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudio.m
@@ -0,0 +1,307 @@
+//
+// EZAudio.m
+// EZAudioCoreGraphicsWaveformExample
+//
+// Created by Syed Haris Ali on 5/13/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+
+#import "EZAudio.h"
+
+@implementation EZAudio
+
+//------------------------------------------------------------------------------
+#pragma mark - Debugging
+//------------------------------------------------------------------------------
+
++ (void)setShouldExitOnCheckResultFail:(BOOL)shouldExitOnCheckResultFail
+{
+ [EZAudioUtilities setShouldExitOnCheckResultFail:shouldExitOnCheckResultFail];
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)shouldExitOnCheckResultFail
+{
+ return [EZAudioUtilities shouldExitOnCheckResultFail];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioBufferList Utility
+//------------------------------------------------------------------------------
+
++ (AudioBufferList *)audioBufferListWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+ interleaved:(BOOL)interleaved
+{
+ return [EZAudioUtilities audioBufferListWithNumberOfFrames:frames
+ numberOfChannels:channels
+ interleaved:interleaved];
+}
+
+//------------------------------------------------------------------------------
+
++ (float **)floatBuffersWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+{
+ return [EZAudioUtilities floatBuffersWithNumberOfFrames:frames
+ numberOfChannels:channels];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeBufferList:(AudioBufferList *)bufferList
+{
+ [EZAudioUtilities freeBufferList:bufferList];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeFloatBuffers:(float **)buffers numberOfChannels:(UInt32)channels
+{
+ [EZAudioUtilities freeFloatBuffers:buffers numberOfChannels:channels];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioStreamBasicDescription Utility
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)AIFFFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities AIFFFormatWithNumberOfChannels:channels
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)iLBCFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities iLBCFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)floatFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities floatFormatWithNumberOfChannels:channels
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)M4AFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities M4AFormatWithNumberOfChannels:channels
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)monoFloatFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities monoFloatFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)monoCanonicalFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities monoCanonicalFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoCanonicalNonInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities stereoCanonicalNonInterleavedFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoFloatInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities stereoFloatInterleavedFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoFloatNonInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isFloatFormat:(AudioStreamBasicDescription)asbd
+{
+ return [EZAudioUtilities isFloatFormat:asbd];
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isInterleaved:(AudioStreamBasicDescription)asbd
+{
+ return [EZAudioUtilities isInterleaved:asbd];
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isLinearPCM:(AudioStreamBasicDescription)asbd
+{
+ return [EZAudioUtilities isLinearPCM:asbd];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)printASBD:(AudioStreamBasicDescription)asbd
+{
+ [EZAudioUtilities printASBD:asbd];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)displayTimeStringFromSeconds:(NSTimeInterval)seconds
+{
+ return [EZAudioUtilities displayTimeStringFromSeconds:seconds];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)stringForAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd
+{
+ return [EZAudioUtilities stringForAudioStreamBasicDescription:asbd];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)setCanonicalAudioStreamBasicDescription:(AudioStreamBasicDescription*)asbd
+ numberOfChannels:(UInt32)nChannels
+ interleaved:(BOOL)interleaved
+{
+ [EZAudioUtilities setCanonicalAudioStreamBasicDescription:asbd
+ numberOfChannels:nChannels
+ interleaved:interleaved];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Math Utilities
+//------------------------------------------------------------------------------
+
++ (void)appendBufferAndShift:(float*)buffer
+ withBufferSize:(int)bufferLength
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength
+{
+ [EZAudioUtilities appendBufferAndShift:buffer
+ withBufferSize:bufferLength
+ toScrollHistory:scrollHistory
+ withScrollHistorySize:scrollHistoryLength];
+}
+
+//------------------------------------------------------------------------------
+
++ (void) appendValue:(float)value
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength
+{
+ [EZAudioUtilities appendValue:value
+ toScrollHistory:scrollHistory
+ withScrollHistorySize:scrollHistoryLength];
+}
+
+//------------------------------------------------------------------------------
+
++ (float)MAP:(float)value
+ leftMin:(float)leftMin
+ leftMax:(float)leftMax
+ rightMin:(float)rightMin
+ rightMax:(float)rightMax
+{
+ return [EZAudioUtilities MAP:value
+ leftMin:leftMin
+ leftMax:leftMax
+ rightMin:rightMin
+ rightMax:rightMax];
+}
+
+//------------------------------------------------------------------------------
+
++ (float)RMS:(float *)buffer length:(int)bufferSize
+{
+ return [EZAudioUtilities RMS:buffer length:bufferSize];
+}
+
+//------------------------------------------------------------------------------
+
++ (float)SGN:(float)value
+{
+ return [EZAudioUtilities SGN:value];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - OSStatus Utility
+//------------------------------------------------------------------------------
+
++ (void)checkResult:(OSStatus)result operation:(const char *)operation
+{
+ [EZAudioUtilities checkResult:result
+ operation:operation];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)stringFromUInt32Code:(UInt32)code
+{
+ return [EZAudioUtilities stringFromUInt32Code:code];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Plot Utility
+//------------------------------------------------------------------------------
+
++ (void)updateScrollHistory:(float **)scrollHistory
+ withLength:(int)scrollHistoryLength
+ atIndex:(int *)index
+ withBuffer:(float *)buffer
+ withBufferSize:(int)bufferSize
+ isResolutionChanging:(BOOL *)isChanging
+{
+ [EZAudioUtilities updateScrollHistory:scrollHistory
+ withLength:scrollHistoryLength
+ atIndex:index
+ withBuffer:buffer
+ withBufferSize:bufferSize
+ isResolutionChanging:isChanging];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - TPCircularBuffer Utility
+//------------------------------------------------------------------------------
+
++ (void)appendDataToCircularBuffer:(TPCircularBuffer *)circularBuffer
+ fromAudioBufferList:(AudioBufferList *)audioBufferList
+{
+ [EZAudioUtilities appendDataToCircularBuffer:circularBuffer
+ fromAudioBufferList:audioBufferList];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)circularBuffer:(TPCircularBuffer *)circularBuffer withSize:(int)size
+{
+ [EZAudioUtilities circularBuffer:circularBuffer withSize:size];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeCircularBuffer:(TPCircularBuffer *)circularBuffer
+{
+ [EZAudioUtilities freeCircularBuffer:circularBuffer];
+}
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioDevice.h b/Pods/EZAudio/EZAudio/EZAudioDevice.h
new file mode 100644
index 0000000..45d58be
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioDevice.h
@@ -0,0 +1,187 @@
+//
+// EZAudioDevice.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/25/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+
+#if TARGET_OS_IPHONE
+#import
+#elif TARGET_OS_MAC
+#endif
+
+/**
+ The EZAudioDevice provides an interface for getting the available input and output hardware devices on iOS and OSX. On iOS the EZAudioDevice uses the available devices found from the AVAudioSession, while on OSX the EZAudioDevice wraps the AudioHardware API to find any devices that are connected including the built-in devices (for instance, Built-In Microphone, Display Audio). Since the AVAudioSession and AudioHardware APIs are quite different the EZAudioDevice has different properties available on each platform. The EZMicrophone now supports setting any specific EZAudioDevice from the `inputDevices` function.
+ */
+@interface EZAudioDevice : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
+//------------------------------------------------------------------------------
+// @name Getting The Devices
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current EZAudioDevice that is being used to pull input.
+ @return An EZAudioDevice instance representing the currently selected input device.
+ */
++ (EZAudioDevice *)currentInputDevice;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current EZAudioDevice that is being used to output audio.
+ @return An EZAudioDevice instance representing the currently selected ouotput device.
+ */
++ (EZAudioDevice *)currentOutputDevice;
+
+//------------------------------------------------------------------------------
+
+/**
+ Enumerates all the available input devices and returns the result in an NSArray of EZAudioDevice instances.
+ @return An NSArray containing EZAudioDevice instances, one for each available input device.
+ */
++ (NSArray *)inputDevices;
+
+//------------------------------------------------------------------------------
+
+/**
+ Enumerates all the available output devices and returns the result in an NSArray of EZAudioDevice instances.
+ @return An NSArray of output EZAudioDevice instances.
+ */
++ (NSArray *)outputDevices;
+
+#if TARGET_OS_IPHONE
+
+//------------------------------------------------------------------------------
+
+/**
+ Enumerates all the available input devices.
+ - iOS only
+ @param block When enumerating this block executes repeatedly for each EZAudioDevice found. It contains two arguments - first, the EZAudioDevice found, then a pointer to a stop BOOL to allow breaking out of the enumeration)
+ */
++ (void)enumerateInputDevicesUsingBlock:(void(^)(EZAudioDevice *device,
+ BOOL *stop))block;
+
+//------------------------------------------------------------------------------
+
+/**
+ Enumerates all the available output devices.
+ - iOS only
+ @param block When enumerating this block executes repeatedly for each EZAudioDevice found. It contains two arguments - first, the EZAudioDevice found, then a pointer to a stop BOOL to allow breaking out of the enumeration)
+ */
++ (void)enumerateOutputDevicesUsingBlock:(void (^)(EZAudioDevice *device,
+ BOOL *stop))block;
+
+#elif TARGET_OS_MAC
+
+/**
+ Enumerates all the available devices and returns the result in an NSArray of EZAudioDevice instances.
+ - OSX only
+ @return An NSArray of input and output EZAudioDevice instances.
+ */
++ (NSArray *)devices;
+
+//------------------------------------------------------------------------------
+
+/**
+ Enumerates all the available devices.
+ - OSX only
+ @param block When enumerating this block executes repeatedly for each EZAudioDevice found. It contains two arguments - first, the EZAudioDevice found, then a pointer to a stop BOOL to allow breaking out of the enumeration)
+ */
++ (void)enumerateDevicesUsingBlock:(void(^)(EZAudioDevice *device,
+ BOOL *stop))block;
+
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+/**
+ An NSString representing a human-reable version of the device.
+ */
+@property (nonatomic, copy, readonly) NSString *name;
+
+#if TARGET_OS_IPHONE
+
+/**
+ An AVAudioSessionPortDescription describing an input or output hardware port.
+ - iOS only
+ */
+@property (nonatomic, strong, readonly) AVAudioSessionPortDescription *port;
+
+//------------------------------------------------------------------------------
+
+/**
+ An AVAudioSessionDataSourceDescription describing a specific data source for the `port` provided.
+ - iOS only
+ */
+@property (nonatomic, strong, readonly) AVAudioSessionDataSourceDescription *dataSource;
+
+#elif TARGET_OS_MAC
+
+/**
+ An AudioDeviceID representing the device in the AudioHardware API.
+ - OSX only
+ */
+@property (nonatomic, assign, readonly) AudioDeviceID deviceID;
+
+//------------------------------------------------------------------------------
+
+/**
+ An NSString representing the name of the manufacturer of the device.
+ - OSX only
+ */
+@property (nonatomic, copy, readonly) NSString *manufacturer;
+
+//------------------------------------------------------------------------------
+
+/**
+ An NSInteger representing the number of input channels available.
+ - OSX only
+ */
+@property (nonatomic, assign, readonly) NSInteger inputChannelCount;
+
+//------------------------------------------------------------------------------
+
+/**
+ An NSInteger representing the number of output channels available.
+ - OSX only
+ */
+@property (nonatomic, assign, readonly) NSInteger outputChannelCount;
+
+//------------------------------------------------------------------------------
+
+/**
+ An NSString representing the persistent identifier for the AudioDevice.
+ - OSX only
+ */
+@property (nonatomic, copy, readonly) NSString *UID;
+
+#endif
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioDevice.m b/Pods/EZAudio/EZAudio/EZAudioDevice.m
new file mode 100644
index 0000000..6eacc2f
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioDevice.m
@@ -0,0 +1,475 @@
+//
+// EZAudioDevice.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/25/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioDevice.h"
+#import "EZAudioUtilities.h"
+
+@interface EZAudioDevice ()
+
+@property (nonatomic, copy, readwrite) NSString *name;
+
+#if TARGET_OS_IPHONE
+
+@property (nonatomic, strong, readwrite) AVAudioSessionPortDescription *port;
+@property (nonatomic, strong, readwrite) AVAudioSessionDataSourceDescription *dataSource;
+
+#elif TARGET_OS_MAC
+
+@property (nonatomic, assign, readwrite) AudioDeviceID deviceID;
+@property (nonatomic, copy, readwrite) NSString *manufacturer;
+@property (nonatomic, assign, readwrite) NSInteger inputChannelCount;
+@property (nonatomic, assign, readwrite) NSInteger outputChannelCount;
+@property (nonatomic, copy, readwrite) NSString *UID;
+
+#endif
+
+@end
+
+@implementation EZAudioDevice
+
+#if TARGET_OS_IPHONE
+
+//------------------------------------------------------------------------------
+
++ (EZAudioDevice *)currentInputDevice
+{
+ AVAudioSession *session = [AVAudioSession sharedInstance];
+ AVAudioSessionPortDescription *port = [[[session currentRoute] inputs] firstObject];
+ AVAudioSessionDataSourceDescription *dataSource = [session inputDataSource];
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = port;
+ device.dataSource = dataSource;
+ return device;
+}
+
+//------------------------------------------------------------------------------
+
++ (EZAudioDevice *)currentOutputDevice
+{
+ AVAudioSession *session = [AVAudioSession sharedInstance];
+ AVAudioSessionPortDescription *port = [[[session currentRoute] outputs] firstObject];
+ AVAudioSessionDataSourceDescription *dataSource = [session outputDataSource];
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = port;
+ device.dataSource = dataSource;
+ return device;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)inputDevices
+{
+ __block NSMutableArray *devices = [NSMutableArray array];
+ [self enumerateInputDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop)
+ {
+ [devices addObject:device];
+ }];
+ return devices;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)outputDevices
+{
+ __block NSMutableArray *devices = [NSMutableArray array];
+ [self enumerateOutputDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop)
+ {
+ [devices addObject:device];
+ }];
+ return devices;
+}
+
+//------------------------------------------------------------------------------
+
++ (void)enumerateInputDevicesUsingBlock:(void (^)(EZAudioDevice *, BOOL *))block
+{
+ if (!block)
+ {
+ return;
+ }
+
+ NSArray *inputs = [[AVAudioSession sharedInstance] availableInputs];
+ if (inputs == nil)
+ {
+ NSLog(@"Audio session is not active! In order to enumerate the audio devices you must set the category and set active the audio session for your iOS app before calling this function.");
+ return;
+ }
+
+ BOOL stop;
+ for (AVAudioSessionPortDescription *inputDevicePortDescription in inputs)
+ {
+ // add any additional sub-devices
+ NSArray *dataSources = [inputDevicePortDescription dataSources];
+ if (dataSources.count)
+ {
+ for (AVAudioSessionDataSourceDescription *inputDeviceDataSourceDescription in dataSources)
+ {
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = inputDevicePortDescription;
+ device.dataSource = inputDeviceDataSourceDescription;
+ block(device, &stop);
+ }
+ }
+ else
+ {
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = inputDevicePortDescription;
+ block(device, &stop);
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+
++ (void)enumerateOutputDevicesUsingBlock:(void (^)(EZAudioDevice *, BOOL *))block
+{
+ if (!block)
+ {
+ return;
+ }
+
+ AVAudioSessionRouteDescription *currentRoute = [[AVAudioSession sharedInstance] currentRoute];
+ NSArray *portDescriptions = [currentRoute outputs];
+
+ BOOL stop;
+ for (AVAudioSessionPortDescription *outputDevicePortDescription in portDescriptions)
+ {
+ // add any additional sub-devices
+ NSArray *dataSources = [outputDevicePortDescription dataSources];
+ if (dataSources.count)
+ {
+ for (AVAudioSessionDataSourceDescription *outputDeviceDataSourceDescription in dataSources)
+ {
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = outputDevicePortDescription;
+ device.dataSource = outputDeviceDataSourceDescription;
+ block(device, &stop);
+ }
+ }
+ else
+ {
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.port = outputDevicePortDescription;
+ block(device, &stop);
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)name
+{
+ NSMutableString *name = [NSMutableString string];
+ if (self.port)
+ {
+ [name appendString:self.port.portName];
+ }
+ if (self.dataSource)
+ {
+ [name appendFormat:@": %@", self.dataSource.dataSourceName];
+ }
+ return name;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)description
+{
+ return [NSString stringWithFormat:@"%@ { port: %@, data source: %@ }",
+ [super description],
+ self.port,
+ self.dataSource];
+}
+
+//------------------------------------------------------------------------------
+
+- (BOOL)isEqual:(id)object
+{
+ if ([object isKindOfClass:self.class])
+ {
+ EZAudioDevice *device = (EZAudioDevice *)object;
+ BOOL isPortUIDEqual = [device.port.UID isEqualToString:self.port.UID];
+ BOOL isDataSourceIDEqual = device.dataSource.dataSourceID.longValue == self.dataSource.dataSourceID.longValue;
+ return isPortUIDEqual && isDataSourceIDEqual;
+ }
+ else
+ {
+ return [super isEqual:object];
+ }
+}
+
+#elif TARGET_OS_MAC
+
++ (void)enumerateDevicesUsingBlock:(void(^)(EZAudioDevice *device,
+ BOOL *stop))block
+{
+ if (!block)
+ {
+ return;
+ }
+
+ // get the present system devices
+ AudioObjectPropertyAddress address = [self addressForPropertySelector:kAudioHardwarePropertyDevices];
+ UInt32 devicesDataSize;
+ [EZAudioUtilities checkResult:AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,
+ &address,
+ 0,
+ NULL,
+ &devicesDataSize)
+ operation:"Failed to get data size"];
+
+ // enumerate devices
+ NSInteger count = devicesDataSize / sizeof(AudioDeviceID);
+ AudioDeviceID *deviceIDs = (AudioDeviceID *)malloc(devicesDataSize);
+
+ // fill in the devices
+ [EZAudioUtilities checkResult:AudioObjectGetPropertyData(kAudioObjectSystemObject,
+ &address,
+ 0,
+ NULL,
+ &devicesDataSize,
+ deviceIDs)
+ operation:"Failed to get device IDs for available devices on OSX"];
+
+ BOOL stop = NO;
+ for (UInt32 i = 0; i < count; i++)
+ {
+ AudioDeviceID deviceID = deviceIDs[i];
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.deviceID = deviceID;
+ device.manufacturer = [self manufacturerForDeviceID:deviceID];
+ device.name = [self namePropertyForDeviceID:deviceID];
+ device.UID = [self UIDPropertyForDeviceID:deviceID];
+ device.inputChannelCount = [self channelCountForScope:kAudioObjectPropertyScopeInput forDeviceID:deviceID];
+ device.outputChannelCount = [self channelCountForScope:kAudioObjectPropertyScopeOutput forDeviceID:deviceID];
+ block(device, &stop);
+ if (stop)
+ {
+ break;
+ }
+ }
+
+ free(deviceIDs);
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)devices
+{
+ __block NSMutableArray *devices = [NSMutableArray array];
+ [self enumerateDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop)
+ {
+ [devices addObject:device];
+ }];
+ return devices;
+}
+
+//------------------------------------------------------------------------------
+
++ (EZAudioDevice *)deviceWithPropertySelector:(AudioObjectPropertySelector)propertySelector
+{
+ AudioDeviceID deviceID;
+ UInt32 propSize = sizeof(AudioDeviceID);
+ AudioObjectPropertyAddress address = [self addressForPropertySelector:propertySelector];
+ [EZAudioUtilities checkResult:AudioObjectGetPropertyData(kAudioObjectSystemObject,
+ &address,
+ 0,
+ NULL,
+ &propSize,
+ &deviceID)
+ operation:"Failed to get device device on OSX"];
+ EZAudioDevice *device = [[EZAudioDevice alloc] init];
+ device.deviceID = deviceID;
+ device.manufacturer = [self manufacturerForDeviceID:deviceID];
+ device.name = [self namePropertyForDeviceID:deviceID];
+ device.UID = [self UIDPropertyForDeviceID:deviceID];
+ device.inputChannelCount = [self channelCountForScope:kAudioObjectPropertyScopeInput forDeviceID:deviceID];
+ device.outputChannelCount = [self channelCountForScope:kAudioObjectPropertyScopeOutput forDeviceID:deviceID];
+ return device;
+}
+
+//------------------------------------------------------------------------------
+
++ (EZAudioDevice *)currentInputDevice
+{
+ return [self deviceWithPropertySelector:kAudioHardwarePropertyDefaultInputDevice];
+}
+
+//------------------------------------------------------------------------------
+
++ (EZAudioDevice *)currentOutputDevice
+{
+ return [self deviceWithPropertySelector:kAudioHardwarePropertyDefaultOutputDevice];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)inputDevices
+{
+ __block NSMutableArray *devices = [NSMutableArray array];
+ [self enumerateDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop)
+ {
+ if (device.inputChannelCount > 0)
+ {
+ [devices addObject:device];
+ }
+ }];
+ return devices;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)outputDevices
+{
+ __block NSMutableArray *devices = [NSMutableArray array];
+ [self enumerateDevicesUsingBlock:^(EZAudioDevice *device, BOOL *stop)
+ {
+ if (device.outputChannelCount > 0)
+ {
+ [devices addObject:device];
+ }
+ }];
+ return devices;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Utility
+//------------------------------------------------------------------------------
+
++ (AudioObjectPropertyAddress)addressForPropertySelector:(AudioObjectPropertySelector)selector
+{
+ AudioObjectPropertyAddress address;
+ address.mScope = kAudioObjectPropertyScopeGlobal;
+ address.mElement = kAudioObjectPropertyElementMaster;
+ address.mSelector = selector;
+ return address;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)stringPropertyForSelector:(AudioObjectPropertySelector)selector
+ withDeviceID:(AudioDeviceID)deviceID
+{
+ AudioObjectPropertyAddress address = [self addressForPropertySelector:selector];
+ CFStringRef string;
+ UInt32 propSize = sizeof(CFStringRef);
+ NSString *errorString = [NSString stringWithFormat:@"Failed to get device property (%u)",(unsigned int)selector];
+ [EZAudioUtilities checkResult:AudioObjectGetPropertyData(deviceID,
+ &address,
+ 0,
+ NULL,
+ &propSize,
+ &string)
+ operation:errorString.UTF8String];
+ return (__bridge_transfer NSString *)string;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSInteger)channelCountForScope:(AudioObjectPropertyScope)scope
+ forDeviceID:(AudioDeviceID)deviceID
+{
+ AudioObjectPropertyAddress address;
+ address.mScope = scope;
+ address.mElement = kAudioObjectPropertyElementMaster;
+ address.mSelector = kAudioDevicePropertyStreamConfiguration;
+
+ AudioBufferList streamConfiguration;
+ UInt32 propSize = sizeof(streamConfiguration);
+ [EZAudioUtilities checkResult:AudioObjectGetPropertyData(deviceID,
+ &address,
+ 0,
+ NULL,
+ &propSize,
+ &streamConfiguration)
+ operation:"Failed to get frame size"];
+
+ NSInteger channelCount = 0;
+ for (NSInteger i = 0; i < streamConfiguration.mNumberBuffers; i++)
+ {
+ channelCount += streamConfiguration.mBuffers[i].mNumberChannels;
+ }
+
+ return channelCount;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)manufacturerForDeviceID:(AudioDeviceID)deviceID
+{
+ return [self stringPropertyForSelector:kAudioDevicePropertyDeviceManufacturerCFString
+ withDeviceID:deviceID];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)namePropertyForDeviceID:(AudioDeviceID)deviceID
+{
+ return [self stringPropertyForSelector:kAudioDevicePropertyDeviceNameCFString
+ withDeviceID:deviceID];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)UIDPropertyForDeviceID:(AudioDeviceID)deviceID
+{
+ return [self stringPropertyForSelector:kAudioDevicePropertyDeviceUID
+ withDeviceID:deviceID];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)description
+{
+ return [NSString stringWithFormat:@"%@ { deviceID: %i, manufacturer: %@, name: %@, UID: %@, inputChannelCount: %ld, outputChannelCount: %ld }",
+ [super description],
+ self.deviceID,
+ self.manufacturer,
+ self.name,
+ self.UID,
+ self.inputChannelCount,
+ self.outputChannelCount];
+}
+
+//------------------------------------------------------------------------------
+
+- (BOOL)isEqual:(id)object
+{
+ if ([object isKindOfClass:self.class])
+ {
+ EZAudioDevice *device = (EZAudioDevice *)object;
+ return [self.UID isEqualToString:device.UID];
+ }
+ else
+ {
+ return [super isEqual:object];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+#endif
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioDisplayLink.h b/Pods/EZAudio/EZAudio/EZAudioDisplayLink.h
new file mode 100644
index 0000000..d3c6c94
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioDisplayLink.h
@@ -0,0 +1,94 @@
+//
+// EZAudioDisplayLink.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/25/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+
+@class EZAudioDisplayLink;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLinkDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioDisplayLinkDelegate provides a means for an EZAudioDisplayLink instance to notify a receiver when it should redraw itself.
+ */
+@protocol EZAudioDisplayLinkDelegate
+
+@required
+/**
+ Required method for an EZAudioDisplayLinkDelegate to implement. This fires at the screen's display rate (typically 60 fps).
+ @param displayLink An EZAudioDisplayLink instance used by a receiver to draw itself at the screen's refresh rate.
+ */
+- (void)displayLinkNeedsDisplay:(EZAudioDisplayLink *)displayLink;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLink
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioDisplayLink provides a cross-platform (iOS and Mac) abstraction over the CADisplayLink for iOS and CVDisplayLink for Mac. The purpose of this class is to provide an accurate timer for views that need to redraw themselves at 60 fps. This class is used by the EZAudioPlot and, eventually, the EZAudioPlotGL to provide a timer mechanism to draw real-time plots.
+ */
+@interface EZAudioDisplayLink : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
+/**
+ Class method to create an EZAudioDisplayLink. The caller should implement the EZAudioDisplayLinkDelegate protocol to receive the `displayLinkNeedsDisplay:` delegate method to know when to redraw itself.
+ @param delegate An instance that implements the EZAudioDisplayLinkDelegate protocol.
+ @return An instance of the EZAudioDisplayLink.
+ */
++ (instancetype)displayLinkWithDelegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioDisplayLinkDelegate for which to receive the redraw calls.
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Instance Methods
+//------------------------------------------------------------------------------
+
+/**
+ Method to start the display link and provide the `displayLinkNeedsDisplay:` calls to the `delegate`
+ */
+- (void)start;
+
+/**
+ Method to stop the display link from providing the `displayLinkNeedsDisplay:` calls to the `delegate`
+ */
+- (void)stop;
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioDisplayLink.m b/Pods/EZAudio/EZAudio/EZAudioDisplayLink.m
new file mode 100644
index 0000000..d649cf5
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioDisplayLink.m
@@ -0,0 +1,180 @@
+//
+// EZAudioDisplayLink.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/25/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioDisplayLink.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - CVDisplayLink Callback (Declaration)
+//------------------------------------------------------------------------------
+
+#if TARGET_OS_IPHONE
+#elif TARGET_OS_MAC
+static CVReturn EZAudioDisplayLinkCallback(CVDisplayLinkRef displayLinkRef,
+ const CVTimeStamp *now,
+ const CVTimeStamp *outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext);
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLink (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZAudioDisplayLink ()
+#if TARGET_OS_IPHONE
+@property (nonatomic, strong) CADisplayLink *displayLink;
+#elif TARGET_OS_MAC
+@property (nonatomic, assign) CVDisplayLinkRef displayLink;
+#endif
+@property (nonatomic, assign) BOOL stopped;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLink (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioDisplayLink
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+#if TARGET_OS_IPHONE
+ [self.displayLink invalidate];
+#elif TARGET_OS_MAC
+ CVDisplayLinkStop(self.displayLink);
+ CVDisplayLinkRelease(self.displayLink);
+ self.displayLink = nil;
+#endif
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initialization
+//------------------------------------------------------------------------------
+
++ (instancetype)displayLinkWithDelegate:(id)delegate
+{
+ EZAudioDisplayLink *displayLink = [[self alloc] init];
+ displayLink.delegate = delegate;
+ return displayLink;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype) init
+{
+ self = [super init];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ self.stopped = YES;
+#if TARGET_OS_IPHONE
+ self.displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(update)];
+ [self.displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSDefaultRunLoopMode];
+#elif TARGET_OS_MAC
+ CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink);
+ CVDisplayLinkSetOutputCallback(self.displayLink,
+ EZAudioDisplayLinkCallback,
+ (__bridge void *)(self));
+ CVDisplayLinkStart(self.displayLink);
+#endif
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+- (void)start
+{
+#if TARGET_OS_IPHONE
+ self.displayLink.paused = NO;
+#elif TARGET_OS_MAC
+ CVDisplayLinkStart(self.displayLink);
+#endif
+ self.stopped = NO;
+}
+
+//------------------------------------------------------------------------------
+
+- (void)stop
+{
+#if TARGET_OS_IPHONE
+ self.displayLink.paused = YES;
+#elif TARGET_OS_MAC
+ CVDisplayLinkStop(self.displayLink);
+#endif
+ self.stopped = YES;
+}
+
+//------------------------------------------------------------------------------
+
+- (void)update
+{
+ if (!self.stopped)
+ {
+ if ([self.delegate respondsToSelector:@selector(displayLinkNeedsDisplay:)])
+ {
+ [self.delegate displayLinkNeedsDisplay:self];
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - CVDisplayLink Callback (Implementation)
+//------------------------------------------------------------------------------
+
+#if TARGET_OS_IPHONE
+#elif TARGET_OS_MAC
+static CVReturn EZAudioDisplayLinkCallback(CVDisplayLinkRef displayLinkRef,
+ const CVTimeStamp *now,
+ const CVTimeStamp *outputTime,
+ CVOptionFlags flagsIn,
+ CVOptionFlags *flagsOut,
+ void *displayLinkContext)
+{
+ EZAudioDisplayLink *displayLink = (__bridge EZAudioDisplayLink*)displayLinkContext;
+ [displayLink update];
+ return kCVReturnSuccess;
+}
+#endif
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioFFT.h b/Pods/EZAudio/EZAudio/EZAudioFFT.h
new file mode 100644
index 0000000..6085497
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFFT.h
@@ -0,0 +1,392 @@
+//
+// EZAudioFFT.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 7/10/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+
+@class EZAudioFFT;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFTDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioFFTDelegate provides event callbacks for the EZAudioFFT (and subclasses such as the EZAudioFFTRolling) whenvever the FFT is computed.
+ */
+@protocol EZAudioFFTDelegate
+
+@optional
+
+///-----------------------------------------------------------
+/// @name Getting FFT Output Data
+///-----------------------------------------------------------
+
+/**
+ Triggered when the EZAudioFFT computes an FFT from a buffer of input data. Provides an array of float data representing the computed FFT.
+ @param fft The EZAudioFFT instance that triggered the event.
+ @param fftData A float pointer representing the float array of FFT data.
+ @param bufferSize A vDSP_Length (unsigned long) representing the length of the float array.
+ */
+- (void) fft:(EZAudioFFT *)fft
+ updatedWithFFTData:(float *)fftData
+ bufferSize:(vDSP_Length)bufferSize;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFT
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioFFT provides a base class to quickly calculate the FFT of incoming audio data using the Accelerate framework. In addition, the EZAudioFFT contains an EZAudioFFTDelegate to receive an event anytime an FFT is computed.
+ */
+@interface EZAudioFFT : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFT (or subclass) instance with a maximum buffer size and sample rate. The sample rate is used specifically to calculate the `maxFrequency` property. If you don't care about the `maxFrequency` property then you can set the sample rate to 0.
+ @param maximumBufferSize A vDSP_Length (unsigned long) representing the maximum length of the incoming audio data.
+ @param sampleRate A float representing the sample rate of the incoming audio data.
+
+ @return A newly created EZAudioFFT (or subclass) instance.
+ */
+- (instancetype)initWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFT (or subclass) instance with a maximum buffer size, sample rate, and EZAudioFFTDelegate. The sample rate is used specifically to calculate the `maxFrequency` property. If you don't care about the `maxFrequency` property then you can set the sample rate to 0. The EZAudioFFTDelegate will act as a receive to get an event whenever the FFT is calculated.
+ @param maximumBufferSize A vDSP_Length (unsigned long) representing the maximum length of the incoming audio data.
+ @param sampleRate A float representing the sample rate of the incoming audio data.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFT (or subclass) instance.
+ */
+- (instancetype)initWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFT (or subclass) instance with a maximum buffer size and sample rate. The sample rate is used specifically to calculate the `maxFrequency` property. If you don't care about the `maxFrequency` property then you can set the sample rate to 0.
+ @param maximumBufferSize A vDSP_Length (unsigned long) representing the maximum length of the incoming audio data.
+ @param sampleRate A float representing the sample rate of the incoming audio data.
+ @return A newly created EZAudioFFT (or subclass) instance.
+ */
++ (instancetype)fftWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFT (or subclass) instance with a maximum buffer size, sample rate, and EZAudioFFTDelegate. The sample rate is used specifically to calculate the `maxFrequency` property. If you don't care about the `maxFrequency` property then you can set the sample rate to 0. The EZAudioFFTDelegate will act as a receive to get an event whenever the FFT is calculated.
+ @param maximumBufferSize A vDSP_Length (unsigned long) representing the maximum length of the incoming audio data.
+ @param sampleRate A float representing the sample rate of the incoming audio data.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFT (or subclass) instance.
+ */
++ (instancetype)fftWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Properties
+///-----------------------------------------------------------
+
+/**
+ An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ */
+@property (weak, nonatomic) id delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ A COMPLEX_SPLIT data structure used to hold the FFT's imaginary and real components.
+ */
+@property (readonly, nonatomic) COMPLEX_SPLIT complexSplit;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float array containing the last calculated FFT data.
+ */
+@property (readonly, nonatomic) float *fftData;
+
+//------------------------------------------------------------------------------
+
+/**
+ An FFTSetup data structure used to internally calculate the FFT using Accelerate.
+ */
+@property (readonly, nonatomic) FFTSetup fftSetup;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float array containing the last calculated inverse FFT data (the time domain signal).
+ */
+@property (readonly, nonatomic) float *inversedFFTData;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float representing the frequency with the highest energy is the last FFT calculation.
+ */
+@property (readonly, nonatomic) float maxFrequency;
+
+//------------------------------------------------------------------------------
+
+/**
+ A vDSP_Length (unsigned long) representing the index of the frequency with the highest energy is the last FFT calculation.
+ */
+@property (readonly, nonatomic) vDSP_Length maxFrequencyIndex;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float representing the magnitude of the frequency with the highest energy is the last FFT calculation.
+ */
+@property (readonly, nonatomic) float maxFrequencyMagnitude;
+
+//------------------------------------------------------------------------------
+
+/**
+ A vDSP_Length (unsigned long) representing the maximum buffer size. This is the maximum length the incoming audio data in the `computeFFTWithBuffer:withBufferSize` method can be.
+ */
+@property (readonly, nonatomic) vDSP_Length maximumBufferSize;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float representing the sample rate of the incoming audio data.
+ */
+@property (readwrite, nonatomic) float sampleRate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Computing The FFT
+///-----------------------------------------------------------
+
+/**
+ Computes the FFT for a float array representing an incoming audio signal. This will trigger the EZAudioFFTDelegate method `fft:updatedWithFFTData:bufferSize:`.
+ @param buffer A float array representing the audio data.
+ @param bufferSize The length of the float array of audio data.
+ @return A float array containing the computed FFT data. The length of the output will be half the incoming buffer (half the `bufferSize` argument).
+ */
+- (float *)computeFFTWithBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the frequency corresponding to an index in the last computed FFT data.
+ @param index A vDSP_Length (unsigned integer) representing the index of the frequency bin value you'd like to get
+ @return A float representing the frequency value at that index.
+ */
+- (float)frequencyAtIndex:(vDSP_Length)index;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the magnitude of the frequenecy corresponding to an index in the last computed FFT data.
+ @param index A vDSP_Length (unsigned integer) representing the index of the frequency bin value you'd like to get
+ @return A float representing the frequency magnitude value at that index.
+ */
+- (float)frequencyMagnitudeAtIndex:(vDSP_Length)index;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFTRolling
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioFFTRolling, a subclass of EZAudioFFT, provides a class to calculate an FFT for an incoming audio signal while maintaining a history of audio data to allow much higher resolution FFTs. For instance, the EZMicrophone typically provides 512 frames at a time, but you would probably want to provide 2048 or 4096 frames for a decent looking FFT if you're trying to extract precise frequency components. You will typically be using this class for variable length FFTs instead of the EZAudioFFT base class.
+ */
+@interface EZAudioFFTRolling : EZAudioFFT
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFTRolling instance with a window size and a sample rate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT and a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property). The history buffer size in this case is the `windowSize` * 8, which is pretty good for most cases.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @return A newly created EZAudioFFTRolling instance.
+ */
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFTRolling instance with a window size, a sample rate, and an EZAudioFFTDelegate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property), and an EZAudioFFTDelegate to receive a callback anytime the FFT is calculated. The history buffer size in this case is the `windowSize` * 8, which is pretty good for most cases.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFTRolling instance.
+ */
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFTRolling instance with a window size, a history buffer size, and a sample rate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a history buffer size representing the maximum length of the sliding window's underlying circular buffer, and a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property).
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param historyBufferSize A vDSP_Length (unsigned long) representing the length of the history buffer. This should be AT LEAST the size of the window. A recommended value for this would be at least 8x greater than the `windowSize` argument.
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @return A newly created EZAudioFFTRolling instance.
+ */
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes an EZAudioFFTRolling instance with a window size, a history buffer size, a sample rate, and an EZAudioFFTDelegate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a history buffer size representing the maximum length of the sliding window's underlying circular buffer, a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property), and an EZAudioFFTDelegate to receive a callback anytime the FFT is calculated.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param historyBufferSize A vDSP_Length (unsigned long) representing the length of the history buffer. This should be AT LEAST the size of the window. A recommended value for this would be at least 8x greater than the `windowSize` argument.
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFTRolling instance.
+ */
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFTRolling instance with a window size and a sample rate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT and a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property). The history buffer size in this case is the `windowSize` * 8, which is pretty good for most cases.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @return A newly created EZAudioFFTRolling instance.
+ */
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFTRolling instance with a window size, a sample rate, and an EZAudioFFTDelegate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property), and an EZAudioFFTDelegate to receive a callback anytime the FFT is calculated. The history buffer size in this case is the `windowSize` * 8, which is pretty good for most cases.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFTRolling instance.
+ */
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFTRolling instance with a window size, a history buffer size, and a sample rate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a history buffer size representing the maximum length of the sliding window's underlying circular buffer, and a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property).
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param historyBufferSize A vDSP_Length (unsigned long) representing the length of the history buffer. This should be AT LEAST the size of the window. A recommended value for this would be at least 8x greater than the `windowSize` argument.
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @return A newly created EZAudioFFTRolling instance.
+ */
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to initialize an EZAudioFFTRolling instance with a window size, a history buffer size, a sample rate, and an EZAudioFFTDelegate. The EZAudioFFTRolling has an internal EZPlotHistoryInfo data structure that writes audio data to a circular buffer and manages sliding windows of audio data to support efficient, large FFT calculations. Here you provide a window size that represents how many audio sample will be used to calculate the FFT, a history buffer size representing the maximum length of the sliding window's underlying circular buffer, a float representing the sample rate of the incoming audio (can be 0 if you don't care about the `maxFrequency` property), and an EZAudioFFTDelegate to receive a callback anytime the FFT is calculated.
+ @param windowSize A vDSP_Length (unsigned long) representing the size of the window (i.e. the resolution) of data that should be used to calculate the FFT. A typical value for this would be something like 1024 - 4096 (or higher for an even higher resolution FFT).
+ @param historyBufferSize A vDSP_Length (unsigned long) representing the length of the history buffer. This should be AT LEAST the size of the window. A recommended value for this would be at least 8x greater than the `windowSize` argument.
+ @param sampleRate A float representing the sample rate of the incoming audio signal.
+ @param delegate An EZAudioFFTDelegate to receive an event whenever the FFT is calculated.
+ @return A newly created EZAudioFFTRolling instance.
+ */
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Properties
+///-----------------------------------------------------------
+
+/**
+ A vDSP_Length (unsigned long) representing the length of the FFT window.
+ */
+@property (readonly, nonatomic) vDSP_Length windowSize;
+
+//------------------------------------------------------------------------------
+
+/**
+ A float array representing the audio data in the internal circular buffer used to perform the FFT. This will increase as more data is appended to the internal circular buffer via the `computeFFTWithBuffer:withBufferSize:` method. The length of this array is the `timeDomainBufferSize` property.
+ */
+@property (readonly, nonatomic) float *timeDomainData;
+
+//------------------------------------------------------------------------------
+
+/**
+ A UInt32 representing the length of the audio data used to perform the FFT.
+ */
+@property (readonly, nonatomic) UInt32 timeDomainBufferSize;
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioFFT.m b/Pods/EZAudio/EZAudio/EZAudioFFT.m
new file mode 100644
index 0000000..02e7a5e
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFFT.m
@@ -0,0 +1,444 @@
+//
+// EZAudioFFT.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 7/10/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioFFT.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct EZAudioFFTInfo
+{
+ FFTSetup fftSetup;
+ COMPLEX_SPLIT complexA;
+ float *outFFTData;
+ vDSP_Length outFFTDataLength;
+ float *inversedFFTData;
+ vDSP_Length maxFrequencyIndex;
+ float maxFrequencyMangitude;
+ float maxFrequency;
+} EZAudioFFTInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFT (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZAudioFFT ()
+@property (assign, nonatomic) EZAudioFFTInfo *info;
+@property (readwrite, nonatomic) vDSP_Length maximumBufferSize;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFT (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioFFT
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ vDSP_destroy_fftsetup(self.info->fftSetup);
+ free(self.info->complexA.realp);
+ free(self.info->complexA.imagp);
+ free(self.info->outFFTData);
+ free(self.info->inversedFFTData);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+{
+ return [self initWithMaximumBufferSize:maximumBufferSize
+ sampleRate:sampleRate
+ delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ self = [super init];
+ if (self)
+ {
+ self.maximumBufferSize = (vDSP_Length)maximumBufferSize;
+ self.sampleRate = sampleRate;
+ self.delegate = delegate;
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+{
+ return [[self alloc] initWithMaximumBufferSize:maximumBufferSize
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithMaximumBufferSize:(vDSP_Length)maximumBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithMaximumBufferSize:maximumBufferSize
+ sampleRate:sampleRate
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ NSAssert(self.maximumBufferSize > 0, @"Expected FFT buffer size to be greater than 0!");
+
+ //
+ // Initialize FFT
+ //
+ float maximumBufferSizeBytes = self.maximumBufferSize * sizeof(float);
+ self.info = (EZAudioFFTInfo *)calloc(1, sizeof(EZAudioFFTInfo));
+ vDSP_Length log2n = log2f(self.maximumBufferSize);
+ self.info->fftSetup = vDSP_create_fftsetup(log2n, FFT_RADIX2);
+ long nOver2 = maximumBufferSizeBytes / 2;
+ size_t maximumSizePerComponentBytes = nOver2 * sizeof(float);
+ self.info->complexA.realp = (float *)malloc(maximumSizePerComponentBytes);
+ self.info->complexA.imagp = (float *)malloc(maximumSizePerComponentBytes);
+ self.info->outFFTData = (float *)malloc(maximumSizePerComponentBytes);
+ memset(self.info->outFFTData, 0, maximumSizePerComponentBytes);
+ self.info->inversedFFTData = (float *)malloc(maximumSizePerComponentBytes);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+- (float *)computeFFTWithBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize
+{
+ if (buffer == NULL)
+ {
+ return NULL;
+ }
+
+ //
+ // Calculate real + imaginary components and normalize
+ //
+ vDSP_Length log2n = log2f(bufferSize);
+ long nOver2 = bufferSize / 2;
+ float mFFTNormFactor = 10.0 / (2 * bufferSize);
+ vDSP_ctoz((COMPLEX*)buffer, 2, &(self.info->complexA), 1, nOver2);
+ vDSP_fft_zrip(self.info->fftSetup, &(self.info->complexA), 1, log2n, FFT_FORWARD);
+ vDSP_vsmul(self.info->complexA.realp, 1, &mFFTNormFactor, self.info->complexA.realp, 1, nOver2);
+ vDSP_vsmul(self.info->complexA.imagp, 1, &mFFTNormFactor, self.info->complexA.imagp, 1, nOver2);
+ vDSP_zvmags(&(self.info->complexA), 1, self.info->outFFTData, 1, nOver2);
+ vDSP_fft_zrip(self.info->fftSetup, &(self.info->complexA), 1, log2n, FFT_INVERSE);
+ vDSP_ztoc(&(self.info->complexA), 1, (COMPLEX *) self.info->inversedFFTData , 2, nOver2);
+ self.info->outFFTDataLength = nOver2;
+
+ //
+ // Calculate max freq
+ //
+ if (self.sampleRate > 0.0f)
+ {
+ vDSP_maxvi(self.info->outFFTData, 1, &self.info->maxFrequencyMangitude, &self.info->maxFrequencyIndex, nOver2);
+ self.info->maxFrequency = [self frequencyAtIndex:self.info->maxFrequencyIndex];
+ }
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(fft:updatedWithFFTData:bufferSize:)])
+ {
+ [self.delegate fft:self
+ updatedWithFFTData:self.info->outFFTData
+ bufferSize:nOver2];
+ }
+
+ //
+ // Return the FFT
+ //
+ return self.info->outFFTData;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)frequencyAtIndex:(vDSP_Length)index
+{
+ if (!(self.info->outFFTData == NULL || self.sampleRate == 0.0f))
+ {
+ float nyquistMaxFreq = self.sampleRate / 2.0;
+ return ((float)index / (float)self.info->outFFTDataLength) * nyquistMaxFreq;
+ }
+ return NSNotFound;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)frequencyMagnitudeAtIndex:(vDSP_Length)index
+{
+ if (self.info->outFFTData != NULL)
+ {
+ return self.info->outFFTData[index];
+ }
+ return NSNotFound;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (COMPLEX_SPLIT)complexSplit
+{
+ return self.info->complexA;
+}
+
+//------------------------------------------------------------------------------
+
+- (float *)fftData
+{
+ return self.info->outFFTData;
+}
+
+//------------------------------------------------------------------------------
+
+- (FFTSetup)fftSetup
+{
+ return self.info->fftSetup;
+}
+
+//------------------------------------------------------------------------------
+
+- (float *)inversedFFTData
+{
+ return self.info->inversedFFTData;
+}
+
+//------------------------------------------------------------------------------
+
+- (vDSP_Length)maxFrequencyIndex
+{
+ return self.info->maxFrequencyIndex;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)maxFrequencyMagnitude
+{
+ return self.info->maxFrequencyMangitude;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)maxFrequency
+{
+ return self.info->maxFrequency;
+}
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFFTRolling
+//------------------------------------------------------------------------------
+
+@interface EZAudioFFTRolling ()
+@property (assign, nonatomic) EZPlotHistoryInfo *historyInfo;
+@property (readwrite, nonatomic) vDSP_Length windowSize;
+
+@end
+
+@implementation EZAudioFFTRolling
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ [EZAudioUtilities freeHistoryInfo:self.historyInfo];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+{
+ return [self initWithWindowSize:windowSize
+ historyBufferSize:windowSize * 8
+ sampleRate:sampleRate
+ delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ return [self initWithWindowSize:windowSize
+ historyBufferSize:windowSize * 8
+ sampleRate:sampleRate
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+{
+ return [self initWithWindowSize:windowSize
+ historyBufferSize:historyBufferSize
+ sampleRate:sampleRate
+ delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ self = [super initWithMaximumBufferSize:historyBufferSize
+ sampleRate:sampleRate];
+ if (self)
+ {
+ self.delegate = delegate;
+ self.windowSize = windowSize;
+
+ //
+ // Allocate an appropriately sized history buffer in bytes
+ //
+ self.historyInfo = [EZAudioUtilities historyInfoWithDefaultLength:(UInt32)windowSize
+ maximumLength:(UInt32)historyBufferSize];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+{
+ return [[self alloc] initWithWindowSize:windowSize
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithWindowSize:windowSize
+ sampleRate:sampleRate
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+{
+ return [[self alloc] initWithWindowSize:windowSize
+ historyBufferSize:historyBufferSize
+ sampleRate:sampleRate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)fftWithWindowSize:(vDSP_Length)windowSize
+ historyBufferSize:(vDSP_Length)historyBufferSize
+ sampleRate:(float)sampleRate
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithWindowSize:windowSize
+ historyBufferSize:historyBufferSize
+ sampleRate:sampleRate
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+- (float *)computeFFTWithBuffer:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+{
+ if (buffer == NULL)
+ {
+ return NULL;
+ }
+
+ //
+ // Append buffer to history window
+ //
+ [EZAudioUtilities appendBuffer:buffer
+ withBufferSize:bufferSize
+ toHistoryInfo:self.historyInfo];
+
+ //
+ // Call super to calculate the FFT of the window
+ //
+ return [super computeFFTWithBuffer:self.historyInfo->buffer
+ withBufferSize:self.historyInfo->bufferSize];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (UInt32)timeDomainBufferSize
+{
+ return self.historyInfo->bufferSize;
+}
+
+//------------------------------------------------------------------------------
+
+- (float *)timeDomainData
+{
+ return self.historyInfo->buffer;
+}
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioFile.h b/Pods/EZAudio/EZAudio/EZAudioFile.h
new file mode 100644
index 0000000..d9977ce
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFile.h
@@ -0,0 +1,380 @@
+//
+// EZAudioFile.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/1/13.
+// Copyright (c) 2013 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+#import "EZAudioFloatData.h"
+
+//------------------------------------------------------------------------------
+
+@class EZAudio;
+@class EZAudioFile;
+
+//------------------------------------------------------------------------------
+#pragma mark - Blocks
+//------------------------------------------------------------------------------
+/**
+ A block used when returning back the waveform data. The waveform data itself will be an array of float arrays, one for each channel, and the length indicates the total length of each float array.
+ @param waveformData An array of float arrays, each representing a channel of audio data from the file
+ @param length An int representing the length of each channel of float audio data
+ */
+typedef void (^EZAudioWaveformDataCompletionBlock)(float **waveformData, int length);
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFileDelegate
+//------------------------------------------------------------------------------
+/**
+ The EZAudioFileDelegate provides event callbacks for the EZAudioFile object. These type of events are triggered by reads and seeks on the file and gives feedback such as the audio data read as a float array for visualizations and the new seek position for UI updating.
+ */
+@protocol EZAudioFileDelegate
+
+@optional
+/**
+ Triggered from the EZAudioFile function `readFrames:audioBufferList:bufferSize:eof:` to notify the delegate of the read audio data as a float array instead of a buffer list. Common use case of this would be to visualize the float data using an audio plot or audio data dependent OpenGL sketch.
+ @param audioFile The instance of the EZAudioFile that triggered the event.
+ @param buffer A float array of float arrays holding the audio data. buffer[0] would be the left channel's float array while buffer[1] would be the right channel's float array in a stereo file.
+ @param bufferSize The length of the buffers float arrays
+ @param numberOfChannels The number of channels. 2 for stereo, 1 for mono.
+ */
+- (void) audioFile:(EZAudioFile *)audioFile
+ readAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels;
+
+//------------------------------------------------------------------------------
+
+/**
+ Occurs when the audio file's internal seek position has been updated by the EZAudioFile functions `readFrames:audioBufferList:bufferSize:eof:` or `audioFile:updatedPosition:`. As of 0.8.0 this is the preferred method of listening for position updates on the audio file since a user may want the pull the currentTime, formattedCurrentTime, or the frame index from the EZAudioFile instance provided.
+ @param audioFile The instance of the EZAudio in which the change occured.
+ */
+- (void)audioFileUpdatedPosition:(EZAudioFile *)audioFile;
+
+//------------------------------------------------------------------------------
+
+/**
+ Occurs when the audio file's internal seek position has been updated by the EZAudioFile functions `readFrames:audioBufferList:bufferSize:eof:` or `audioFile:updatedPosition:`.
+ @param audioFile The instance of the EZAudio in which the change occured
+ @param framePosition The new frame index as a 64-bit signed integer
+ @deprecated This property is deprecated starting in version 0.8.0.
+ @note Please use `audioFileUpdatedPosition:` property instead.
+ */
+- (void)audioFile:(EZAudioFile *)audioFile
+ updatedPosition:(SInt64)framePosition __attribute__((deprecated));
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFile
+//------------------------------------------------------------------------------
+/**
+ The EZAudioFile provides a lightweight and intuitive way to asynchronously interact with audio files. These interactions included reading audio data, seeking within an audio file, getting information about the file, and pulling the waveform data for visualizing the contents of the audio file. The EZAudioFileDelegate provides event callbacks for when reads, seeks, and various updates happen within the audio file to allow the caller to interact with the action in meaningful ways. Common use cases here could be to read the audio file's data as AudioBufferList structures for output (see EZOutput) and visualizing the audio file's data as a float array using an audio plot (see EZAudioPlot).
+ */
+@interface EZAudioFile : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+/**
+ A EZAudioFileDelegate for the audio file that is used to return events such as new seek positions within the file and the read audio data as a float array.
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+/**
+ @name Initialization
+*/
+
+/**
+ Creates a new instance of the EZAudioFile using a file path URL.
+ @param url The file path reference of the audio file as an NSURL.
+ @return The newly created EZAudioFile instance. nil if the file path does not exist.
+ */
+- (instancetype)initWithURL:(NSURL *)url;
+
+/**
+ Creates a new instance of the EZAudioFile using a file path URL with a delegate conforming to the EZAudioFileDelegate protocol.
+ @param delegate The audio file delegate that receives events specified by the EZAudioFileDelegate protocol
+ @param url The file path reference of the audio file as an NSURL.
+ @return The newly created EZAudioFile instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a new instance of the EZAudioFile using a file path URL with a delegate conforming to the EZAudioFileDelegate protocol and a client format.
+ @param url The file path reference of the audio file as an NSURL.
+ @param delegate The audio file delegate that receives events specified by the EZAudioFileDelegate protocol
+ @param clientFormat An AudioStreamBasicDescription that will be used as the client format on the audio file. For instance, the audio file might be in a 22.5 kHz sample rate format in its file format, but your app wants to read the samples at a sample rate of 44.1 kHz so it can iterate with other components (like a audio processing graph) without any weird playback effects. If this initializer is not used then a non-interleaved float format will be assumed.
+ @return The newly created EZAudioFile instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ delegate:(id)delegate
+ clientFormat:(AudioStreamBasicDescription)clientFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+/**
+ @name Class Initializers
+ */
+
+/**
+ Class method that creates a new instance of the EZAudioFile using a file path URL.
+ @param url The file path reference of the audio file as an NSURL.
+ @return The newly created EZAudioFile instance.
+ */
++ (instancetype)audioFileWithURL:(NSURL *)url;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method that creates a new instance of the EZAudioFile using a file path URL with a delegate conforming to the EZAudioFileDelegate protocol.
+ @param url The file path reference of the audio file as an NSURL.
+ @param delegate The audio file delegate that receives events specified by the EZAudioFileDelegate protocol
+ @return The newly created EZAudioFile instance.
+ */
++ (instancetype)audioFileWithURL:(NSURL *)url
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method that creates a new instance of the EZAudioFile using a file path URL with a delegate conforming to the EZAudioFileDelegate protocol and a client format.
+ @param url The file path reference of the audio file as an NSURL.
+ @param delegate The audio file delegate that receives events specified by the EZAudioFileDelegate protocol
+ @param clientFormat An AudioStreamBasicDescription that will be used as the client format on the audio file. For instance, the audio file might be in a 22.5 kHz sample rate, interleaved MP3 file format, but your app wants to read linear PCM samples at a sample rate of 44.1 kHz so it can be read in the context of other components sharing a common stream format (like a audio processing graph). If this initializer is not used then the `defaultClientFormat` will be used as teh default value for the client format.
+ @return The newly created EZAudioFile instance.
+ */
++ (instancetype)audioFileWithURL:(NSURL *)url
+ delegate:(id)delegate
+ clientFormat:(AudioStreamBasicDescription)clientFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+/**
+ @name Class Methods
+ */
+
+/**
+ A class method that subclasses can override to specify the default client format that will be used to read audio data from this file. A client format is different from the file format in that it is the format of the other components interacting with this file. For instance, the file on disk could be a 22.5 kHz, float format, but we might have an audio processing graph that has a 44.1 kHz, signed integer format that we'd like to interact with. The client format lets us set that 44.1 kHz format on the audio file to properly read samples from it with any interpolation or format conversion that must take place done automatically within the EZAudioFile `readFrames:audioBufferList:bufferSize:eof:` method. Default is stereo, non-interleaved, 44.1 kHz.
+ @return An AudioStreamBasicDescription that serves as the audio file's client format.
+ */
++ (AudioStreamBasicDescription)defaultClientFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ A class method that subclasses can override to specify the default sample rate that will be used in the `defaultClientFormat` method. Default is 44100.0 (44.1 kHz).
+ @return A Float64 representing the sample rate that should be used in the default client format.
+ */
++ (Float64)defaultClientFormatSampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides an array of the supported audio files types. Each audio file type is provided as a string, i.e. @"caf". Useful for filtering lists of files in an open panel to only the types allowed.
+ @return An array of NSString objects representing the represented file types.
+ */
++ (NSArray *)supportedAudioFileTypes;
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+/**
+ @name Reading From The Audio File
+ */
+
+/**
+ Reads a specified number of frames from the audio file. In addition, this will notify the EZAudioFileDelegate (if specified) of the read data as a float array with the audioFile:readAudio:withBufferSize:withNumberOfChannels: event and the new seek position within the file with the audioFile:updatedPosition: event.
+ @param frames The number of frames to read from the file.
+ @param audioBufferList An allocated AudioBufferList structure in which to store the read audio data
+ @param bufferSize A pointer to a UInt32 in which to store the read buffersize
+ @param eof A pointer to a BOOL in which to store whether the read operation reached the end of the audio file.
+ */
+- (void)readFrames:(UInt32)frames
+ audioBufferList:(AudioBufferList *)audioBufferList
+ bufferSize:(UInt32 *)bufferSize
+ eof:(BOOL *)eof;
+
+//------------------------------------------------------------------------------
+
+/**
+ @name Seeking Through The Audio File
+ */
+
+/**
+ Seeks through an audio file to a specified frame. This will notify the EZAudioFileDelegate (if specified) with the audioFile:updatedPosition: function.
+ @param frame The new frame position to seek to as a SInt64.
+ */
+- (void)seekToFrame:(SInt64)frame;
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+/**
+ @name Getting Information About The Audio File
+ */
+
+/**
+ Provides the common AudioStreamBasicDescription that will be used for in-app interaction. The file's format will be converted to this format and then sent back as either a float array or a `AudioBufferList` pointer. For instance, the file on disk could be a 22.5 kHz, float format, but we might have an audio processing graph that has a 44.1 kHz, signed integer format that we'd like to interact with. The client format lets us set that 44.1 kHz format on the audio file to properly read samples from it with any interpolation or format conversion that must take place done automatically within the EZAudioFile `readFrames:audioBufferList:bufferSize:eof:` method. Default is stereo, non-interleaved, 44.1 kHz.
+ @warning This must be a linear PCM format!
+ @return An AudioStreamBasicDescription structure describing the format of the audio file.
+ */
+@property (readwrite) AudioStreamBasicDescription clientFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current offset in the audio file as an NSTimeInterval (i.e. in seconds). When setting this it will determine the correct frame offset and perform a `seekToFrame` to the new time offset.
+ @warning Make sure the new current time offset is less than the `duration` or you will receive an invalid seek assertion.
+ */
+@property (nonatomic, readwrite) NSTimeInterval currentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration of the audio file in seconds.
+ */
+@property (readonly) NSTimeInterval duration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the AudioStreamBasicDescription structure containing the format of the file.
+ @return An AudioStreamBasicDescription structure describing the format of the audio file.
+ */
+@property (readonly) AudioStreamBasicDescription fileFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current time as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedCurrentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedDuration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the frame index (a.k.a the seek positon) within the audio file as SInt64. This can be helpful when seeking through the audio file.
+ @return The current frame index within the audio file as a SInt64.
+ */
+@property (readonly) SInt64 frameIndex;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a dictionary containing the metadata (ID3) tags that are included in the header for the audio file. Typically this contains stuff like artist, title, release year, etc.
+ @return An NSDictionary containing the metadata for the audio file.
+ */
+@property (readonly) NSDictionary *metadata;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the total duration of the audio file in seconds.
+ @deprecated This property is deprecated starting in version 0.3.0.
+ @note Please use `duration` property instead.
+ @return The total duration of the audio file as a Float32.
+ */
+@property (readonly) NSTimeInterval totalDuration __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the total frame count of the audio file in the client format.
+ @return The total number of frames in the audio file in the AudioStreamBasicDescription representing the client format as a SInt64.
+ */
+@property (readonly) SInt64 totalClientFrames;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the total frame count of the audio file in the file format.
+ @return The total number of frames in the audio file in the AudioStreamBasicDescription representing the file format as a SInt64.
+ */
+@property (readonly) SInt64 totalFrames;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the NSURL for the audio file.
+ @return An NSURL representing the path of the EZAudioFile instance.
+ */
+@property (nonatomic, copy, readonly) NSURL *url;
+
+//------------------------------------------------------------------------------
+#pragma mark - Helpers
+//------------------------------------------------------------------------------
+
+/**
+ Synchronously pulls the waveform amplitude data into a float array for the receiver. This returns a waveform with a default resolution of 1024, meaning there are 1024 data points to plot the waveform.
+ @param numberOfPoints A UInt32 representing the number of data points you need. The higher the number of points the more detailed the waveform will be.
+ @return A EZAudioFloatData instance containing the audio data for all channels of the audio.
+ */
+- (EZAudioFloatData *)getWaveformData;
+
+//------------------------------------------------------------------------------
+
+/**
+ Synchronously pulls the waveform amplitude data into a float array for the receiver.
+ @param numberOfPoints A UInt32 representing the number of data points you need. The higher the number of points the more detailed the waveform will be.
+ @return A EZAudioFloatData instance containing the audio data for all channels of the audio.
+ */
+- (EZAudioFloatData *)getWaveformDataWithNumberOfPoints:(UInt32)numberOfPoints;
+
+//------------------------------------------------------------------------------
+
+/**
+ Asynchronously pulls the waveform amplitude data into a float array for the receiver. This returns a waveform with a default resolution of 1024, meaning there are 1024 data points to plot the waveform.
+ @param completion A EZAudioWaveformDataCompletionBlock that executes when the waveform data has been extracted. Provides a `EZAudioFloatData` instance containing the waveform data for all audio channels.
+ */
+- (void)getWaveformDataWithCompletionBlock:(EZAudioWaveformDataCompletionBlock)completion;
+
+//------------------------------------------------------------------------------
+
+/**
+ Asynchronously pulls the waveform amplitude data into a float array for the receiver.
+ @param numberOfPoints A UInt32 representing the number of data points you need. The higher the number of points the more detailed the waveform will be.
+ @param completion A EZAudioWaveformDataCompletionBlock that executes when the waveform data has been extracted. Provides a `EZAudioFloatData` instance containing the waveform data for all audio channels.
+ */
+- (void)getWaveformDataWithNumberOfPoints:(UInt32)numberOfPoints
+ completion:(EZAudioWaveformDataCompletionBlock)completion;
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioFile.m b/Pods/EZAudio/EZAudio/EZAudioFile.m
new file mode 100644
index 0000000..3a69a60
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFile.m
@@ -0,0 +1,729 @@
+//
+// EZAudioFile.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/1/13.
+// Copyright (c) 2013 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioFile.h"
+
+//------------------------------------------------------------------------------
+
+#import "EZAudio.h"
+#import "EZAudioFloatConverter.h"
+#import "EZAudioFloatData.h"
+#include
+
+// constants
+static UInt32 EZAudioFileWaveformDefaultResolution = 1024;
+static NSString *EZAudioFileWaveformDataQueueIdentifier = @"com.ezaudio.waveformQueue";
+
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ AudioFileID audioFileID;
+ AudioStreamBasicDescription clientFormat;
+ NSTimeInterval duration;
+ ExtAudioFileRef extAudioFileRef;
+ AudioStreamBasicDescription fileFormat;
+ SInt64 frames;
+ CFURLRef sourceURL;
+} EZAudioFileInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFile
+//------------------------------------------------------------------------------
+
+@interface EZAudioFile ()
+@property (nonatomic, strong) EZAudioFloatConverter *floatConverter;
+@property (nonatomic) float **floatData;
+@property (nonatomic) EZAudioFileInfo *info;
+@property (nonatomic) pthread_mutex_t lock;
+@property (nonatomic) dispatch_queue_t waveformQueue;
+@end
+
+//------------------------------------------------------------------------------
+
+@implementation EZAudioFile
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ self.floatConverter = nil;
+ pthread_mutex_destroy(&_lock);
+ [EZAudioUtilities freeFloatBuffers:self.floatData numberOfChannels:self.clientFormat.mChannelsPerFrame];
+ [EZAudioUtilities checkResult:ExtAudioFileDispose(self.info->extAudioFileRef) operation:"Failed to dispose of ext audio file"];
+ free(self.info);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype)init
+{
+ self = [super init];
+ if (self)
+ {
+ self.info = (EZAudioFileInfo *)malloc(sizeof(EZAudioFileInfo));
+ _floatData = NULL;
+ pthread_mutex_init(&_lock, NULL);
+ _waveformQueue = dispatch_queue_create(EZAudioFileWaveformDataQueueIdentifier.UTF8String, DISPATCH_QUEUE_PRIORITY_DEFAULT);
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+{
+ return [self initWithURL:url delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ delegate:(id)delegate
+{
+ return [self initWithURL:url
+ delegate:delegate
+ clientFormat:[self.class defaultClientFormat]];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ delegate:(id)delegate
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+{
+ self = [self init];
+ if (self)
+ {
+ self.info->sourceURL = (__bridge CFURLRef)(url);
+ self.info->clientFormat = clientFormat;
+ self.delegate = delegate;
+ if (![self setup])
+ {
+ return nil;
+ }
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (instancetype)audioFileWithURL:(NSURL *)url
+{
+ return [[self alloc] initWithURL:url];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioFileWithURL:(NSURL *)url
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithURL:url delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioFileWithURL:(NSURL *)url
+ delegate:(id)delegate
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+{
+ return [[self alloc] initWithURL:url
+ delegate:delegate
+ clientFormat:clientFormat];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - NSCopying
+//------------------------------------------------------------------------------
+
+- (id)copyWithZone:(NSZone *)zone
+{
+ return [EZAudioFile audioFileWithURL:self.url];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)defaultClientFormat
+{
+ return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:[self defaultClientFormatSampleRate]];
+}
+
+//------------------------------------------------------------------------------
+
++ (Float64)defaultClientFormatSampleRate
+{
+ return 44100.0f;
+}
+
+//------------------------------------------------------------------------------
+
++ (NSArray *)supportedAudioFileTypes
+{
+ return @
+ [
+ @"aac",
+ @"caf",
+ @"aif",
+ @"aiff",
+ @"aifc",
+ @"mp3",
+ @"mp4",
+ @"m4a",
+ @"snd",
+ @"au",
+ @"sd2",
+ @"wav"
+ ];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (BOOL)setup
+{
+ //
+ // Try to open the file, bail if the file could not be opened
+ //
+ BOOL success = [self openAudioFile];
+ if (!success)
+ {
+ return success;
+ }
+
+ //
+ // Set the client format
+ //
+ self.clientFormat = self.info->clientFormat;
+
+ return YES;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Creating/Opening Audio File
+//------------------------------------------------------------------------------
+
+- (BOOL)openAudioFile
+{
+ //
+ // Need a source url
+ //
+ NSAssert(self.info->sourceURL, @"EZAudioFile cannot be created without a source url!");
+
+ //
+ // Determine if the file actually exists
+ //
+ CFURLRef url = self.info->sourceURL;
+ NSURL *fileURL = (__bridge NSURL *)(url);
+ BOOL fileExists = [[NSFileManager defaultManager] fileExistsAtPath:fileURL.path];
+
+ //
+ // Create an ExtAudioFileRef for the file handle
+ //
+ if (fileExists)
+ {
+ [EZAudioUtilities checkResult:ExtAudioFileOpenURL(url, &self.info->extAudioFileRef)
+ operation:"Failed to create ExtAudioFileRef"];
+ }
+ else
+ {
+ return NO;
+ }
+
+ //
+ // Get the underlying AudioFileID
+ //
+ UInt32 propSize = sizeof(self.info->audioFileID);
+ [EZAudioUtilities checkResult:ExtAudioFileGetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_AudioFile,
+ &propSize,
+ &self.info->audioFileID)
+ operation:"Failed to get underlying AudioFileID"];
+
+ //
+ // Store the file format
+ //
+ propSize = sizeof(self.info->fileFormat);
+ [EZAudioUtilities checkResult:ExtAudioFileGetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_FileDataFormat,
+ &propSize,
+ &self.info->fileFormat)
+ operation:"Failed to get file audio format on existing audio file"];
+
+ //
+ // Get the total frames and duration
+ //
+ propSize = sizeof(SInt64);
+ [EZAudioUtilities checkResult:ExtAudioFileGetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_FileLengthFrames,
+ &propSize,
+ &self.info->frames)
+ operation:"Failed to get total frames"];
+ self.info->duration = (NSTimeInterval) self.info->frames / self.info->fileFormat.mSampleRate;
+
+ return YES;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+- (void)readFrames:(UInt32)frames
+ audioBufferList:(AudioBufferList *)audioBufferList
+ bufferSize:(UInt32 *)bufferSize
+ eof:(BOOL *)eof
+{
+ if (pthread_mutex_trylock(&_lock) == 0)
+ {
+ // perform read
+ [EZAudioUtilities checkResult:ExtAudioFileRead(self.info->extAudioFileRef,
+ &frames,
+ audioBufferList)
+ operation:"Failed to read audio data from file"];
+ *bufferSize = frames;
+ *eof = frames == 0;
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(audioFileUpdatedPosition:)])
+ {
+ [self.delegate audioFileUpdatedPosition:self];
+ }
+
+ //
+ // Deprecated, but supported until 1.0
+ //
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+ if ([self.delegate respondsToSelector:@selector(audioFile:updatedPosition:)])
+ {
+ [self.delegate audioFile:self updatedPosition:[self frameIndex]];
+ }
+#pragma GCC diagnostic pop
+
+ if ([self.delegate respondsToSelector:@selector(audioFile:readAudio:withBufferSize:withNumberOfChannels:)])
+ {
+ // convert into float data
+ [self.floatConverter convertDataFromAudioBufferList:audioBufferList
+ withNumberOfFrames:*bufferSize
+ toFloatBuffers:self.floatData];
+
+ // notify delegate
+ UInt32 channels = self.clientFormat.mChannelsPerFrame;
+ [self.delegate audioFile:self
+ readAudio:self.floatData
+ withBufferSize:*bufferSize
+ withNumberOfChannels:channels];
+ }
+
+ pthread_mutex_unlock(&_lock);
+
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)seekToFrame:(SInt64)frame
+{
+ if (pthread_mutex_trylock(&_lock) == 0)
+ {
+ [EZAudioUtilities checkResult:ExtAudioFileSeek(self.info->extAudioFileRef,
+ frame)
+ operation:"Failed to seek frame position within audio file"];
+
+ pthread_mutex_unlock(&_lock);
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(audioFileUpdatedPosition:)])
+ {
+ [self.delegate audioFileUpdatedPosition:self];
+ }
+
+ //
+ // Deprecated, but supported until 1.0
+ //
+#pragma GCC diagnostic push
+#pragma GCC diagnostic ignored "-Wdeprecated-declarations"
+ if ([self.delegate respondsToSelector:@selector(audioFile:updatedPosition:)])
+ {
+ [self.delegate audioFile:self updatedPosition:[self frameIndex]];
+ }
+#pragma GCC diagnostic pop
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)floatFormat
+{
+ return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:44100.0f];
+}
+
+//------------------------------------------------------------------------------
+
+- (EZAudioFloatData *)getWaveformData
+{
+ return [self getWaveformDataWithNumberOfPoints:EZAudioFileWaveformDefaultResolution];
+}
+
+//------------------------------------------------------------------------------
+
+- (EZAudioFloatData *)getWaveformDataWithNumberOfPoints:(UInt32)numberOfPoints
+{
+ EZAudioFloatData *waveformData;
+ if (pthread_mutex_trylock(&_lock) == 0)
+ {
+ // store current frame
+ SInt64 currentFrame = self.frameIndex;
+ BOOL interleaved = [EZAudioUtilities isInterleaved:self.clientFormat];
+ UInt32 channels = self.clientFormat.mChannelsPerFrame;
+ float **data = (float **)malloc( sizeof(float*) * channels );
+ for (int i = 0; i < channels; i++)
+ {
+ data[i] = (float *)malloc( sizeof(float) * numberOfPoints );
+ }
+
+ // seek to 0
+ [EZAudioUtilities checkResult:ExtAudioFileSeek(self.info->extAudioFileRef,
+ 0)
+ operation:"Failed to seek frame position within audio file"];
+
+ // calculate the required number of frames per buffer
+ SInt64 framesPerBuffer = ((SInt64) self.totalClientFrames / numberOfPoints);
+ SInt64 framesPerChannel = framesPerBuffer / channels;
+
+ // allocate an audio buffer list
+ AudioBufferList *audioBufferList = [EZAudioUtilities audioBufferListWithNumberOfFrames:(UInt32)framesPerBuffer
+ numberOfChannels:self.info->clientFormat.mChannelsPerFrame
+ interleaved:interleaved];
+
+ // read through file and calculate rms at each point
+ for (SInt64 i = 0; i < numberOfPoints; i++)
+ {
+ UInt32 bufferSize = (UInt32) framesPerBuffer;
+ [EZAudioUtilities checkResult:ExtAudioFileRead(self.info->extAudioFileRef,
+ &bufferSize,
+ audioBufferList)
+ operation:"Failed to read audio data from file waveform"];
+ if (interleaved)
+ {
+ float *buffer = (float *)audioBufferList->mBuffers[0].mData;
+ for (int channel = 0; channel < channels; channel++)
+ {
+ float channelData[framesPerChannel];
+ for (int frame = 0; frame < framesPerChannel; frame++)
+ {
+ channelData[frame] = buffer[frame * channels + channel];
+ }
+ float rms = [EZAudioUtilities RMS:channelData length:(UInt32)framesPerChannel];
+ data[channel][i] = rms;
+ }
+ }
+ else
+ {
+ for (int channel = 0; channel < channels; channel++)
+ {
+ float *channelData = audioBufferList->mBuffers[channel].mData;
+ float rms = [EZAudioUtilities RMS:channelData length:bufferSize];
+ data[channel][i] = rms;
+ }
+ }
+ }
+
+ // clean up
+ [EZAudioUtilities freeBufferList:audioBufferList];
+
+ // seek back to previous position
+ [EZAudioUtilities checkResult:ExtAudioFileSeek(self.info->extAudioFileRef,
+ currentFrame)
+ operation:"Failed to seek frame position within audio file"];
+
+ pthread_mutex_unlock(&_lock);
+
+ waveformData = [EZAudioFloatData dataWithNumberOfChannels:channels
+ buffers:(float **)data
+ bufferSize:numberOfPoints];
+
+ // cleanup
+ for (int i = 0; i < channels; i++)
+ {
+ free(data[i]);
+ }
+ free(data);
+ }
+ return waveformData;
+}
+
+//------------------------------------------------------------------------------
+
+- (void)getWaveformDataWithCompletionBlock:(EZAudioWaveformDataCompletionBlock)waveformDataCompletionBlock
+{
+ [self getWaveformDataWithNumberOfPoints:EZAudioFileWaveformDefaultResolution
+ completion:waveformDataCompletionBlock];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)getWaveformDataWithNumberOfPoints:(UInt32)numberOfPoints
+ completion:(EZAudioWaveformDataCompletionBlock)completion
+{
+ if (!completion)
+ {
+ return;
+ }
+
+ // async get waveform data
+ __weak EZAudioFile *weakSelf = self;
+ dispatch_async(self.waveformQueue, ^{
+ EZAudioFloatData *waveformData = [weakSelf getWaveformDataWithNumberOfPoints:numberOfPoints];
+ dispatch_async(dispatch_get_main_queue(), ^{
+ completion(waveformData.buffers, waveformData.bufferSize);
+ });
+ });
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)clientFormat
+{
+ return self.info->clientFormat;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)currentTime
+{
+ return [EZAudioUtilities MAP:(float)[self frameIndex]
+ leftMin:0.0f
+ leftMax:(float)[self totalFrames]
+ rightMin:0.0f
+ rightMax:[self duration]];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)duration
+{
+ return self.info->duration;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)fileFormat
+{
+ return self.info->fileFormat;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedCurrentTime
+{
+ return [EZAudioUtilities displayTimeStringFromSeconds:[self currentTime]];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedDuration
+{
+ return [EZAudioUtilities displayTimeStringFromSeconds:[self duration]];
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)frameIndex
+{
+ SInt64 frameIndex;
+ [EZAudioUtilities checkResult:ExtAudioFileTell(self.info->extAudioFileRef, &frameIndex)
+ operation:"Failed to get frame index"];
+ return frameIndex;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSDictionary *)metadata
+{
+ // get size of metadata property (dictionary)
+ UInt32 propSize = sizeof(self.info->audioFileID);
+ CFDictionaryRef metadata;
+ UInt32 writable;
+ [EZAudioUtilities checkResult:AudioFileGetPropertyInfo(self.info->audioFileID,
+ kAudioFilePropertyInfoDictionary,
+ &propSize,
+ &writable)
+ operation:"Failed to get the size of the metadata dictionary"];
+
+ // pull metadata
+ [EZAudioUtilities checkResult:AudioFileGetProperty(self.info->audioFileID,
+ kAudioFilePropertyInfoDictionary,
+ &propSize,
+ &metadata)
+ operation:"Failed to get metadata dictionary"];
+
+ // cast to NSDictionary
+ return (__bridge NSDictionary*)metadata;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)totalDuration
+{
+ return self.info->duration;
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)totalClientFrames
+{
+ SInt64 totalFrames = [self totalFrames];
+ AudioStreamBasicDescription clientFormat = self.info->clientFormat;
+ AudioStreamBasicDescription fileFormat = self.info->fileFormat;
+ BOOL sameSampleRate = clientFormat.mSampleRate == fileFormat.mSampleRate;
+ if (!sameSampleRate)
+ {
+ totalFrames = self.info->duration * clientFormat.mSampleRate;
+ }
+ return totalFrames;
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)totalFrames
+{
+ return self.info->frames;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSURL *)url
+{
+ return (__bridge NSURL*)self.info->sourceURL;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setClientFormat:(AudioStreamBasicDescription)clientFormat
+{
+ //
+ // Clear any float data currently cached
+ //
+ if (self.floatData)
+ {
+ self.floatData = nil;
+ }
+
+ //
+ // Client format can only be linear PCM!
+ //
+ NSAssert([EZAudioUtilities isLinearPCM:clientFormat], @"Client format must be linear PCM");
+
+ //
+ // Store the client format
+ //
+ self.info->clientFormat = clientFormat;
+
+ //
+ // Set the client format on the ExtAudioFileRef
+ //
+ [EZAudioUtilities checkResult:ExtAudioFileSetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_ClientDataFormat,
+ sizeof(clientFormat),
+ &clientFormat)
+ operation:"Couldn't set client data format on file"];
+
+ //
+ // Create a new float converter using the client format as the input format
+ //
+ self.floatConverter = [EZAudioFloatConverter converterWithInputFormat:clientFormat];
+
+ //
+ // Determine how big our float buffers need to be to hold a buffer of float
+ // data for the audio received callback.
+ //
+ UInt32 maxPacketSize;
+ UInt32 propSize = sizeof(maxPacketSize);
+ [EZAudioUtilities checkResult:ExtAudioFileGetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_ClientMaxPacketSize,
+ &propSize,
+ &maxPacketSize)
+ operation:"Failed to get max packet size"];
+
+ self.floatData = [EZAudioUtilities floatBuffersWithNumberOfFrames:1024
+ numberOfChannels:self.clientFormat.mChannelsPerFrame];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setCurrentTime:(NSTimeInterval)currentTime
+{
+ NSAssert(currentTime < [self duration], @"Invalid seek operation, expected current time to be less than duration");
+ SInt64 frame = [EZAudioUtilities MAP:currentTime
+ leftMin:0.0f
+ leftMax:[self duration]
+ rightMin:0.0f
+ rightMax:[self totalFrames]];
+ [self seekToFrame:frame];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Description
+//------------------------------------------------------------------------------
+
+- (NSString *)description
+{
+ return [NSString stringWithFormat:@"%@ {\n"
+ " url: %@,\n"
+ " duration: %f,\n"
+ " totalFrames: %lld,\n"
+ " metadata: %@,\n"
+ " fileFormat: { %@ },\n"
+ " clientFormat: { %@ } \n"
+ "}",
+ [super description],
+ [self url],
+ [self duration],
+ [self totalFrames],
+ [self metadata],
+ [EZAudioUtilities stringForAudioStreamBasicDescription:[self fileFormat]],
+ [EZAudioUtilities stringForAudioStreamBasicDescription:[self clientFormat]]];
+}
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioFloatConverter.h b/Pods/EZAudio/EZAudio/EZAudioFloatConverter.h
new file mode 100644
index 0000000..63d5635
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFloatConverter.h
@@ -0,0 +1,75 @@
+//
+// EZAudioFloatConverter.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+FOUNDATION_EXPORT UInt32 const EZAudioFloatConverterDefaultPacketSize;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFloatConverter
+//------------------------------------------------------------------------------
+
+@interface EZAudioFloatConverter : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
++ (instancetype)converterWithInputFormat:(AudioStreamBasicDescription)inputFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+@property (nonatomic, assign, readonly) AudioStreamBasicDescription inputFormat;
+@property (nonatomic, assign, readonly) AudioStreamBasicDescription floatFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Instance Methods
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithInputFormat:(AudioStreamBasicDescription)inputFormat;
+
+//------------------------------------------------------------------------------
+
+- (void)convertDataFromAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ toFloatBuffers:(float **)buffers;
+
+//------------------------------------------------------------------------------
+
+- (void)convertDataFromAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ toFloatBuffers:(float **)buffers
+ packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions;
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioFloatConverter.m b/Pods/EZAudio/EZAudio/EZAudioFloatConverter.m
new file mode 100644
index 0000000..1ef1eb4
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFloatConverter.m
@@ -0,0 +1,224 @@
+//
+// EZAudioFloatConverter.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioFloatConverter.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+static UInt32 EZAudioFloatConverterDefaultOutputBufferSize = 128 * 32;
+UInt32 const EZAudioFloatConverterDefaultPacketSize = 2048;
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ AudioConverterRef converterRef;
+ AudioBufferList *floatAudioBufferList;
+ AudioStreamBasicDescription inputFormat;
+ AudioStreamBasicDescription outputFormat;
+ AudioStreamPacketDescription *packetDescriptions;
+ UInt32 packetsPerBuffer;
+} EZAudioFloatConverterInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - Callbacks
+//------------------------------------------------------------------------------
+
+OSStatus EZAudioFloatConverterCallback(AudioConverterRef inAudioConverter,
+ UInt32 *ioNumberDataPackets,
+ AudioBufferList *ioData,
+ AudioStreamPacketDescription **outDataPacketDescription,
+ void *inUserData)
+{
+ AudioBufferList *sourceBuffer = (AudioBufferList *)inUserData;
+ memcpy(ioData,
+ sourceBuffer,
+ sizeof(AudioBufferList) + (sourceBuffer->mNumberBuffers - 1) * sizeof(AudioBuffer));
+ return noErr;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFloatConverter (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZAudioFloatConverter ()
+@property (nonatomic, assign) EZAudioFloatConverterInfo *info;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFloatConverter (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioFloatConverter
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
++ (instancetype)converterWithInputFormat:(AudioStreamBasicDescription)inputFormat
+{
+ return [[self alloc] initWithInputFormat:inputFormat];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ AudioConverterDispose(self.info->converterRef);
+ [EZAudioUtilities freeBufferList:self.info->floatAudioBufferList];
+ free(self.info->packetDescriptions);
+ free(self.info);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithInputFormat:(AudioStreamBasicDescription)inputFormat
+{
+ self = [super init];
+ if (self)
+ {
+ self.info = (EZAudioFloatConverterInfo *)malloc(sizeof(EZAudioFloatConverterInfo));
+ memset(self.info, 0, sizeof(EZAudioFloatConverterInfo));
+ self.info->inputFormat = inputFormat;
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ // create output format
+ self.info->outputFormat = [EZAudioUtilities floatFormatWithNumberOfChannels:self.info->inputFormat.mChannelsPerFrame
+ sampleRate:self.info->inputFormat.mSampleRate];
+
+ // create a new instance of the audio converter
+ [EZAudioUtilities checkResult:AudioConverterNew(&self.info->inputFormat,
+ &self.info->outputFormat,
+ &self.info->converterRef)
+ operation:"Failed to create new audio converter"];
+
+ // get max packets per buffer so you can allocate a proper AudioBufferList
+ UInt32 packetsPerBuffer = 0;
+ UInt32 outputBufferSize = EZAudioFloatConverterDefaultOutputBufferSize;
+ UInt32 sizePerPacket = self.info->inputFormat.mBytesPerPacket;
+ BOOL isVBR = sizePerPacket == 0;
+
+ // VBR
+ if (isVBR)
+ {
+ // determine the max output buffer size
+ UInt32 maxOutputPacketSize;
+ UInt32 propSize = sizeof(maxOutputPacketSize);
+ OSStatus result = AudioConverterGetProperty(self.info->converterRef,
+ kAudioConverterPropertyMaximumOutputPacketSize,
+ &propSize,
+ &maxOutputPacketSize);
+ if (result != noErr)
+ {
+ maxOutputPacketSize = EZAudioFloatConverterDefaultPacketSize;
+ }
+
+ // set the output buffer size to at least the max output size
+ if (maxOutputPacketSize > outputBufferSize)
+ {
+ outputBufferSize = maxOutputPacketSize;
+ }
+ packetsPerBuffer = outputBufferSize / maxOutputPacketSize;
+
+ // allocate memory for the packet descriptions
+ self.info->packetDescriptions = (AudioStreamPacketDescription *)malloc(sizeof(AudioStreamPacketDescription) * packetsPerBuffer);
+ }
+ else
+ {
+ packetsPerBuffer = outputBufferSize / sizePerPacket;
+ }
+ self.info->packetsPerBuffer = packetsPerBuffer;
+
+ // allocate the AudioBufferList to hold the float values
+ BOOL isInterleaved = [EZAudioUtilities isInterleaved:self.info->outputFormat];
+ self.info->floatAudioBufferList = [EZAudioUtilities audioBufferListWithNumberOfFrames:packetsPerBuffer
+ numberOfChannels:self.info->outputFormat.mChannelsPerFrame
+ interleaved:isInterleaved];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+- (void)convertDataFromAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ toFloatBuffers:(float **)buffers
+{
+ [self convertDataFromAudioBufferList:audioBufferList
+ withNumberOfFrames:frames
+ toFloatBuffers:buffers
+ packetDescriptions:self.info->packetDescriptions];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)convertDataFromAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ toFloatBuffers:(float **)buffers
+ packetDescriptions:(AudioStreamPacketDescription *)packetDescriptions
+{
+ if (frames == 0)
+ {
+
+ }
+ else
+ {
+ [EZAudioUtilities checkResult:AudioConverterFillComplexBuffer(self.info->converterRef,
+ EZAudioFloatConverterCallback,
+ audioBufferList,
+ &frames,
+ self.info->floatAudioBufferList,
+ packetDescriptions ? packetDescriptions : self.info->packetDescriptions)
+ operation:"Failed to fill complex buffer in float converter"];
+ for (int i = 0; i < self.info->floatAudioBufferList->mNumberBuffers; i++)
+ {
+ memcpy(buffers[i],
+ self.info->floatAudioBufferList->mBuffers[i].mData,
+ self.info->floatAudioBufferList->mBuffers[i].mDataByteSize);
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioFloatData.h b/Pods/EZAudio/EZAudio/EZAudioFloatData.h
new file mode 100644
index 0000000..547c715
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFloatData.h
@@ -0,0 +1,52 @@
+//
+// EZAudioFloatData.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFloatData
+//------------------------------------------------------------------------------
+
+@interface EZAudioFloatData : NSObject
+
+//------------------------------------------------------------------------------
+
++ (instancetype)dataWithNumberOfChannels:(int)numberOfChannels
+ buffers:(float **)buffers
+ bufferSize:(UInt32)bufferSize;
+
+//------------------------------------------------------------------------------
+
+@property (nonatomic, assign, readonly) int numberOfChannels;
+@property (nonatomic, assign, readonly) float **buffers;
+@property (nonatomic, assign, readonly) UInt32 bufferSize;
+
+//------------------------------------------------------------------------------
+
+- (float *)bufferForChannel:(int)channel;
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioFloatData.m b/Pods/EZAudio/EZAudio/EZAudioFloatData.m
new file mode 100644
index 0000000..674517e
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioFloatData.m
@@ -0,0 +1,85 @@
+//
+// EZAudioFloatData.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioFloatData.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFloatData
+//------------------------------------------------------------------------------
+
+@interface EZAudioFloatData ()
+@property (nonatomic, assign, readwrite) int numberOfChannels;
+@property (nonatomic, assign, readwrite) float **buffers;
+@property (nonatomic, assign, readwrite) UInt32 bufferSize;
+@end
+
+//------------------------------------------------------------------------------
+
+@implementation EZAudioFloatData
+
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ [EZAudioUtilities freeFloatBuffers:self.buffers
+ numberOfChannels:self.numberOfChannels];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)dataWithNumberOfChannels:(int)numberOfChannels
+ buffers:(float **)buffers
+ bufferSize:(UInt32)bufferSize
+{
+ id data = [[self alloc] init];
+ size_t size = sizeof(float) * bufferSize;
+ float **buffersCopy = [EZAudioUtilities floatBuffersWithNumberOfFrames:bufferSize
+ numberOfChannels:numberOfChannels];
+ for (int i = 0; i < numberOfChannels; i++)
+ {
+ memcpy(buffersCopy[i], buffers[i], size);
+ }
+ ((EZAudioFloatData *)data).buffers = buffersCopy;
+ ((EZAudioFloatData *)data).bufferSize = bufferSize;
+ ((EZAudioFloatData *)data).numberOfChannels = numberOfChannels;
+ return data;
+}
+
+//------------------------------------------------------------------------------
+
+- (float *)bufferForChannel:(int)channel
+{
+ float *buffer = NULL;
+ if (channel < self.numberOfChannels)
+ {
+ buffer = self.buffers[channel];
+ }
+ return buffer;
+}
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlayer.h b/Pods/EZAudio/EZAudio/EZAudioPlayer.h
new file mode 100644
index 0000000..245177b
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlayer.h
@@ -0,0 +1,423 @@
+//
+// EZAudioPlayer.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 1/16/14.
+// Copyright (c) 2014 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import "TargetConditionals.h"
+#import "EZAudioFile.h"
+#import "EZOutput.h"
+
+@class EZAudioPlayer;
+
+//------------------------------------------------------------------------------
+#pragma mark - Notifications
+//------------------------------------------------------------------------------
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `audioFile` property. Check the new value using the EZAudioPlayer's `audioFile` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeAudioFileNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `device` property. Check the new value using the EZAudioPlayer's `device` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeOutputDeviceNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's `pan` property. Check the new value using the EZAudioPlayer's `pan` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangePanNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's play state. Check the new value using the EZAudioPlayer's `isPlaying` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangePlayStateNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's `volume` property. Check the new value using the EZAudioPlayer's `volume` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeVolumeNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer has reached the end of a file and its `shouldLoop` property has been set to NO.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidReachEndOfFileNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer performs a seek via the `seekToFrame` method or `setCurrentTime:` property setter. Check the new `currentTime` or `frameIndex` value using the EZAudioPlayer's `currentTime` or `frameIndex` property, respectively.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidSeekNotification;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlayerDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioPlayerDelegate provides event callbacks for the EZAudioPlayer. Since 0.5.0 the EZAudioPlayerDelegate provides a smaller set of delegate methods in favor of notifications to allow multiple receivers of the EZAudioPlayer event callbacks since only one player is typically used in an application. Specifically, these methods are provided for high frequency callbacks that wrap the EZAudioPlayer's internal EZAudioFile and EZOutput instances.
+ @warning These callbacks don't necessarily occur on the main thread so make sure you wrap any UI code in a GCD block like: dispatch_async(dispatch_get_main_queue(), ^{ // Update UI });
+ */
+@protocol EZAudioPlayerDelegate
+
+@optional
+
+//------------------------------------------------------------------------------
+
+/**
+ Triggered by the EZAudioPlayer's internal EZAudioFile's EZAudioFileDelegate callback and notifies the delegate of the read audio data as a float array instead of a buffer list. Common use case of this would be to visualize the float data using an audio plot or audio data dependent OpenGL sketch.
+ @param audioPlayer The instance of the EZAudioPlayer that triggered the event
+ @param buffer A float array of float arrays holding the audio data. buffer[0] would be the left channel's float array while buffer[1] would be the right channel's float array in a stereo file.
+ @param bufferSize The length of the buffers float arrays
+ @param numberOfChannels The number of channels. 2 for stereo, 1 for mono.
+ @param audioFile The instance of the EZAudioFile that the event was triggered from
+ */
+- (void) audioPlayer:(EZAudioPlayer *)audioPlayer
+ playedAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+ inAudioFile:(EZAudioFile *)audioFile;;
+
+//------------------------------------------------------------------------------
+
+/**
+ Triggered by EZAudioPlayer's internal EZAudioFile's EZAudioFileDelegate callback and notifies the delegate of the current playback position. The framePosition provides the current frame position and can be calculated against the EZAudioPlayer's total frames using the `totalFrames` function from the EZAudioPlayer.
+ @param audioPlayer The instance of the EZAudioPlayer that triggered the event
+ @param framePosition The new frame index as a 64-bit signed integer
+ @param audioFile The instance of the EZAudioFile that the event was triggered from
+ */
+- (void)audioPlayer:(EZAudioPlayer *)audioPlayer
+ updatedPosition:(SInt64)framePosition
+ inAudioFile:(EZAudioFile *)audioFile;
+
+
+/**
+ Triggered by EZAudioPlayer's internal EZAudioFile's EZAudioFileDelegate callback and notifies the delegate that the end of the file has been reached.
+ @param audioPlayer The instance of the EZAudioPlayer that triggered the event
+ @param audioFile The instance of the EZAudioFile that the event was triggered from
+ */
+- (void)audioPlayer:(EZAudioPlayer *)audioPlayer
+reachedEndOfAudioFile:(EZAudioFile *)audioFile;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlayer
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioPlayer provides an interface that combines the EZAudioFile and EZOutput to play local audio files. This class acts as the master delegate (the EZAudioFileDelegate) over whatever EZAudioFile instance, the `audioFile` property, it is using for playback as well as the EZOutputDelegate and EZOutputDataSource over whatever EZOutput instance is set as the `output`. Classes that want to get the EZAudioFileDelegate callbacks should implement the EZAudioPlayer's EZAudioPlayerDelegate on the EZAudioPlayer instance. Since 0.5.0 the EZAudioPlayer offers notifications over the usual delegate methods to allow multiple receivers to get the EZAudioPlayer's state changes since one player will typically be used in one application. The EZAudioPlayerDelegate, the `delegate`, provides callbacks for high frequency methods that simply wrap the EZAudioFileDelegate and EZOutputDelegate callbacks for providing the audio buffer played as well as the position updating (you will typically have one scrub bar in an application).
+ */
+@interface EZAudioPlayer : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Properties
+///-----------------------------------------------------------
+
+/**
+ The EZAudioPlayerDelegate that will handle the audio player callbacks
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ A BOOL indicating whether the player should loop the file
+ */
+@property (nonatomic, assign) BOOL shouldLoop;
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Initializes the EZAudioPlayer with an EZAudioFile instance. This does not use the EZAudioFile by reference, but instead creates a separate EZAudioFile instance with the same file at the given file path provided by the internal NSURL to use for internal seeking so it doesn't cause any locking between the caller's instance of the EZAudioFile.
+ @param audioFile The instance of the EZAudioFile to use for initializing the EZAudioPlayer
+ @return The newly created instance of the EZAudioPlayer
+ */
+- (instancetype)initWithAudioFile:(EZAudioFile *)audioFile;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the EZAudioPlayer with an EZAudioFile instance and provides a way to assign the EZAudioPlayerDelegate on instantiation. This does not use the EZAudioFile by reference, but instead creates a separate EZAudioFile instance with the same file at the given file path provided by the internal NSURL to use for internal seeking so it doesn't cause any locking between the caller's instance of the EZAudioFile.
+ @param audioFile The instance of the EZAudioFile to use for initializing the EZAudioPlayer
+ @param delegate The receiver that will act as the EZAudioPlayerDelegate. Set to nil if it should have no delegate or use the initWithAudioFile: function instead.
+ @return The newly created instance of the EZAudioPlayer
+ */
+- (instancetype)initWithAudioFile:(EZAudioFile *)audioFile
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the EZAudioPlayer with an EZAudioPlayerDelegate.
+ @param delegate The receiver that will act as the EZAudioPlayerDelegate. Set to nil if it should have no delegate or use the initWithAudioFile: function instead.
+ @return The newly created instance of the EZAudioPlayer
+ */
+- (instancetype)initWithDelegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the EZAudioPlayer with an NSURL instance representing the file path of the audio file.
+ @param url The NSURL instance representing the file path of the audio file.
+ @return The newly created instance of the EZAudioPlayer
+ */
+- (instancetype)initWithURL:(NSURL*)url;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the EZAudioPlayer with an NSURL instance representing the file path of the audio file and a caller to assign as the EZAudioPlayerDelegate on instantiation.
+ @param url The NSURL instance representing the file path of the audio file.
+ @param delegate The receiver that will act as the EZAudioPlayerDelegate. Set to nil if it should have no delegate or use the initWithAudioFile: function instead.
+ @return The newly created instance of the EZAudioPlayer
+ */
+- (instancetype)initWithURL:(NSURL*)url
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Class initializer that creates a default EZAudioPlayer.
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayer;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class initializer that creates the EZAudioPlayer with an EZAudioFile instance. This does not use the EZAudioFile by reference, but instead creates a separate EZAudioFile instance with the same file at the given file path provided by the internal NSURL to use for internal seeking so it doesn't cause any locking between the caller's instance of the EZAudioFile.
+ @param audioFile The instance of the EZAudioFile to use for initializing the EZAudioPlayer
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayerWithAudioFile:(EZAudioFile *)audioFile;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class initializer that creates the EZAudioPlayer with an EZAudioFile instance and provides a way to assign the EZAudioPlayerDelegate on instantiation. This does not use the EZAudioFile by reference, but instead creates a separate EZAudioFile instance with the same file at the given file path provided by the internal NSURL to use for internal seeking so it doesn't cause any locking between the caller's instance of the EZAudioFile.
+ @param audioFile The instance of the EZAudioFile to use for initializing the EZAudioPlayer
+ @param delegate The receiver that will act as the EZAudioPlayerDelegate. Set to nil if it should have no delegate or use the audioPlayerWithAudioFile: function instead.
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayerWithAudioFile:(EZAudioFile *)audioFile
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class initializer that creates a default EZAudioPlayer with an EZAudioPlayerDelegate..
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayerWithDelegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class initializer that creates the EZAudioPlayer with an NSURL instance representing the file path of the audio file.
+ @param url The NSURL instance representing the file path of the audio file.
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayerWithURL:(NSURL*)url;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class initializer that creates the EZAudioPlayer with an NSURL instance representing the file path of the audio file and a caller to assign as the EZAudioPlayerDelegate on instantiation.
+ @param url The NSURL instance representing the file path of the audio file.
+ @param delegate The receiver that will act as the EZAudioPlayerDelegate. Set to nil if it should have no delegate or use the audioPlayerWithURL: function instead.
+ @return The newly created instance of the EZAudioPlayer
+ */
++ (instancetype)audioPlayerWithURL:(NSURL*)url
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Singleton
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Shared Instance
+///-----------------------------------------------------------
+
+/**
+ The shared instance (singleton) of the audio player. Most applications will only have one instance of the EZAudioPlayer that can be reused with multiple different audio files.
+ * @return The shared instance of the EZAudioPlayer.
+ */
++ (instancetype)sharedAudioPlayer;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Properties
+///-----------------------------------------------------------
+
+/**
+ Provides the EZAudioFile instance that is being used as the datasource for playback. When set it creates a copy of the EZAudioFile provided for internal use. This does not use the EZAudioFile by reference, but instead creates a copy of the EZAudioFile instance provided.
+ */
+@property (nonatomic, readwrite, copy) EZAudioFile *audioFile;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current offset in the audio file as an NSTimeInterval (i.e. in seconds). When setting this it will determine the correct frame offset and perform a `seekToFrame` to the new time offset.
+ @warning Make sure the new current time offset is less than the `duration` or you will receive an invalid seek assertion.
+ */
+@property (nonatomic, readwrite) NSTimeInterval currentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioDevice instance that is being used by the `output`. Similarly, setting this just sets the `device` property of the `output`.
+ */
+@property (readwrite) EZAudioDevice *device;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration of the audio file in seconds.
+ */
+@property (readonly) NSTimeInterval duration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current time as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedCurrentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedDuration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the EZOutput that is being used to handle the actual playback of the audio data. This property is also settable, but note that the EZAudioPlayer will become the output's EZOutputDataSource and EZOutputDelegate. To listen for the EZOutput's delegate methods your view should implement the EZAudioPlayerDelegate and set itself as the EZAudioPlayer's `delegate`.
+ */
+@property (nonatomic, strong, readwrite) EZOutput *output;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the frame index (a.k.a the seek positon) within the audio file being used for playback. This can be helpful when seeking through the audio file.
+ @return An SInt64 representing the current frame index within the audio file used for playback.
+ */
+@property (readonly) SInt64 frameIndex;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a flag indicating whether the EZAudioPlayer is currently playing back any audio.
+ @return A BOOL indicating whether or not the EZAudioPlayer is performing playback,
+ */
+@property (readonly) BOOL isPlaying;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current pan from the audio player's internal `output` component. Setting the pan adjusts the direction of the audio signal from left (0) to right (1). Default is 0.5 (middle).
+ */
+@property (nonatomic, assign) float pan;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the total amount of frames in the current audio file being used for playback.
+ @return A SInt64 representing the total amount of frames in the current audio file being used for playback.
+ */
+@property (readonly) SInt64 totalFrames;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the file path that's currently being used by the player for playback.
+ @return The NSURL representing the file path of the audio file being used for playback.
+ */
+@property (nonatomic, copy, readonly) NSURL *url;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current volume from the audio player's internal `output` component. Setting the volume adjusts the gain of the output between 0 and 1. Default is 1.
+ */
+@property (nonatomic, assign) float volume;
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Controlling Playback
+///-----------------------------------------------------------
+
+/**
+ Starts playback.
+ */
+- (void)play;
+
+//------------------------------------------------------------------------------
+
+/**
+ Loads an EZAudioFile and immediately starts playing it.
+ @param audioFile An EZAudioFile to use for immediate playback.
+ */
+- (void)playAudioFile:(EZAudioFile *)audioFile;
+
+//------------------------------------------------------------------------------
+
+/**
+ Pauses playback.
+ */
+- (void)pause;
+
+//------------------------------------------------------------------------------
+
+/**
+ Seeks playback to a specified frame within the internal EZAudioFile. This will notify the EZAudioFileDelegate (if specified) with the audioPlayer:updatedPosition:inAudioFile: function.
+ @param frame The new frame position to seek to as a SInt64.
+ */
+- (void)seekToFrame:(SInt64)frame;
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlayer.m b/Pods/EZAudio/EZAudio/EZAudioPlayer.m
new file mode 100644
index 0000000..77aeb29
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlayer.m
@@ -0,0 +1,445 @@
+//
+// EZAudioPlayer.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 1/16/14.
+// Copyright (c) 2014 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioPlayer.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Notifications
+//------------------------------------------------------------------------------
+
+NSString * const EZAudioPlayerDidChangeAudioFileNotification = @"EZAudioPlayerDidChangeAudioFileNotification";
+NSString * const EZAudioPlayerDidChangeOutputDeviceNotification = @"EZAudioPlayerDidChangeOutputDeviceNotification";
+NSString * const EZAudioPlayerDidChangePanNotification = @"EZAudioPlayerDidChangePanNotification";
+NSString * const EZAudioPlayerDidChangePlayStateNotification = @"EZAudioPlayerDidChangePlayStateNotification";
+NSString * const EZAudioPlayerDidChangeVolumeNotification = @"EZAudioPlayerDidChangeVolumeNotification";
+NSString * const EZAudioPlayerDidReachEndOfFileNotification = @"EZAudioPlayerDidReachEndOfFileNotification";
+NSString * const EZAudioPlayerDidSeekNotification = @"EZAudioPlayerDidSeekNotification";
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlayer (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioPlayer
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayer
+{
+ return [[self alloc] init];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayerWithDelegate:(id)delegate
+{
+ return [[self alloc] initWithDelegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayerWithAudioFile:(EZAudioFile *)audioFile
+{
+ return [[self alloc] initWithAudioFile:audioFile];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayerWithAudioFile:(EZAudioFile *)audioFile
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithAudioFile:audioFile
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayerWithURL:(NSURL *)url
+{
+ return [[self alloc] initWithURL:url];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)audioPlayerWithURL:(NSURL *)url
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithURL:url delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype)init
+{
+ self = [super init];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithDelegate:(id)delegate
+{
+ self = [self init];
+ if (self)
+ {
+ self.delegate = delegate;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithAudioFile:(EZAudioFile *)audioFile
+{
+ return [self initWithAudioFile:audioFile delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithAudioFile:(EZAudioFile *)audioFile
+ delegate:(id)delegate
+{
+ self = [self initWithDelegate:delegate];
+ if (self)
+ {
+ self.audioFile = audioFile;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+{
+ return [self initWithURL:url delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ delegate:(id)delegate
+{
+ self = [self initWithDelegate:delegate];
+ if (self)
+ {
+ self.audioFile = [EZAudioFile audioFileWithURL:url delegate:self];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Singleton
+//------------------------------------------------------------------------------
+
++ (instancetype)sharedAudioPlayer
+{
+ static EZAudioPlayer *player;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^
+ {
+ player = [[self alloc] init];
+ });
+ return player;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ self.output = [EZOutput output];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)currentTime
+{
+ return [self.audioFile currentTime];
+}
+
+//------------------------------------------------------------------------------
+
+- (EZAudioDevice *)device
+{
+ return [self.output device];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)duration
+{
+ return [self.audioFile duration];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedCurrentTime
+{
+ return [self.audioFile formattedCurrentTime];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedDuration
+{
+ return [self.audioFile formattedDuration];
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)frameIndex
+{
+ return [self.audioFile frameIndex];
+}
+
+//------------------------------------------------------------------------------
+
+- (BOOL)isPlaying
+{
+ return [self.output isPlaying];
+}
+
+//------------------------------------------------------------------------------
+
+- (float)pan
+{
+ return [self.output pan];
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)totalFrames
+{
+ return [self.audioFile totalFrames];
+}
+
+//------------------------------------------------------------------------------
+
+- (float)volume
+{
+ return [self.output volume];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setAudioFile:(EZAudioFile *)audioFile
+{
+ _audioFile = [audioFile copy];
+ _audioFile.delegate = self;
+ AudioStreamBasicDescription inputFormat = _audioFile.clientFormat;
+ [self.output setInputFormat:inputFormat];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidChangeAudioFileNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setCurrentTime:(NSTimeInterval)currentTime
+{
+ [self.audioFile setCurrentTime:currentTime];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidSeekNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setDevice:(EZAudioDevice *)device
+{
+ [self.output setDevice:device];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setOutput:(EZOutput *)output
+{
+ _output = output;
+ _output.dataSource = self;
+ _output.delegate = self;
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setPan:(float)pan
+{
+ [self.output setPan:pan];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidChangePanNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setVolume:(float)volume
+{
+ [self.output setVolume:volume];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidChangeVolumeNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+- (void)play
+{
+ [self.output startPlayback];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)playAudioFile:(EZAudioFile *)audioFile
+{
+ //
+ // stop playing anything that might currently be playing
+ //
+ [self pause];
+
+ //
+ // set new stream
+ //
+ self.audioFile = audioFile;
+
+ //
+ // begin playback
+ //
+ [self play];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)pause
+{
+ [self.output stopPlayback];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)seekToFrame:(SInt64)frame
+{
+ [self.audioFile seekToFrame:frame];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidSeekNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutputDataSource
+//------------------------------------------------------------------------------
+
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp
+{
+ if (self.audioFile)
+ {
+ UInt32 bufferSize;
+ BOOL eof;
+ [self.audioFile readFrames:frames
+ audioBufferList:audioBufferList
+ bufferSize:&bufferSize
+ eof:&eof];
+ if (eof && [self.delegate respondsToSelector:@selector(audioPlayer:reachedEndOfAudioFile:)])
+ {
+ [self.delegate audioPlayer:self reachedEndOfAudioFile:self.audioFile];
+ }
+ if (eof && self.shouldLoop)
+ {
+ [self seekToFrame:0];
+ }
+ else if (eof)
+ {
+ [self pause];
+ [self seekToFrame:0];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidReachEndOfFileNotification
+ object:self];
+ }
+ }
+ return noErr;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioFileDelegate
+//------------------------------------------------------------------------------
+
+- (void)audioFileUpdatedPosition:(EZAudioFile *)audioFile
+{
+ if ([self.delegate respondsToSelector:@selector(audioPlayer:updatedPosition:inAudioFile:)])
+ {
+ [self.delegate audioPlayer:self
+ updatedPosition:[audioFile frameIndex]
+ inAudioFile:audioFile];
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutputDelegate
+//------------------------------------------------------------------------------
+
+- (void)output:(EZOutput *)output changedDevice:(EZAudioDevice *)device
+{
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidChangeOutputDeviceNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)output:(EZOutput *)output changedPlayingState:(BOOL)isPlaying
+{
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidChangePlayStateNotification
+ object:self];
+}
+
+//------------------------------------------------------------------------------
+
+- (void) output:(EZOutput *)output
+ playedAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ if ([self.delegate respondsToSelector:@selector(audioPlayer:playedAudio:withBufferSize:withNumberOfChannels:inAudioFile:)])
+ {
+ [self.delegate audioPlayer:self
+ playedAudio:buffer
+ withBufferSize:bufferSize
+ withNumberOfChannels:numberOfChannels
+ inAudioFile:self.audioFile];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlot.h b/Pods/EZAudio/EZAudio/EZAudioPlot.h
new file mode 100644
index 0000000..f737ee9
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlot.h
@@ -0,0 +1,199 @@
+//
+// EZAudioPlot.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 9/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import "EZPlot.h"
+
+@class EZAudio;
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+/**
+ The default value used for the maximum rolling history buffer length of any EZAudioPlot.
+ @deprecated This constant is deprecated starting in version 0.2.0.
+ @note Please use EZAudioPlotDefaultMaxHistoryBufferLength instead.
+ */
+FOUNDATION_EXPORT UInt32 const kEZAudioPlotMaxHistoryBufferLength __attribute__((deprecated));
+
+/**
+ The default value used for the default rolling history buffer length of any EZAudioPlot.
+ @deprecated This constant is deprecated starting in version 0.2.0.
+ @note Please use EZAudioPlotDefaultHistoryBufferLength instead.
+ */
+FOUNDATION_EXPORT UInt32 const kEZAudioPlotDefaultHistoryBufferLength __attribute__((deprecated));
+
+/**
+ The default value used for the default rolling history buffer length of any EZAudioPlot.
+ */
+FOUNDATION_EXPORT UInt32 const EZAudioPlotDefaultHistoryBufferLength;
+
+/**
+ The default value used for the maximum rolling history buffer length of any EZAudioPlot.
+ */
+FOUNDATION_EXPORT UInt32 const EZAudioPlotDefaultMaxHistoryBufferLength;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlotWaveformLayer
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioPlotWaveformLayer is a lightweight subclass of the CAShapeLayer that allows implicit animations on the `path` key.
+ */
+@interface EZAudioPlotWaveformLayer : CAShapeLayer
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlot
+//------------------------------------------------------------------------------
+
+/**
+ `EZAudioPlot`, a subclass of `EZPlot`, is a cross-platform (iOS and OSX) class that plots an audio waveform using Core Graphics.
+
+ The caller provides updates a constant stream of updated audio data in the `updateBuffer:withBufferSize:` function, which in turn will be plotted in one of the plot types:
+
+ * Buffer (`EZPlotTypeBuffer`) - A plot that only consists of the current buffer and buffer size from the last call to `updateBuffer:withBufferSize:`. This looks similar to the default openFrameworks input audio example.
+ * Rolling (`EZPlotTypeRolling`) - A plot that consists of a rolling history of values averaged from each buffer. This is the traditional waveform look.
+
+ #Parent Methods and Properties#
+
+ See EZPlot for full API methods and properties (colors, plot type, update function)
+
+ */
+@interface EZAudioPlot : EZPlot
+
+/**
+ A BOOL that allows optimizing the audio plot's drawing for real-time displays. Since the update function may be updating the plot's data very quickly (over 60 frames per second) this property will throttle the drawing calls to be 60 frames per second (or whatever the screen rate is). Specifically, it disables implicit path change animations on the `waveformLayer` and sets up a display link to render 60 fps (audio updating the plot at 44.1 kHz causes it to re-render 86 fps - far greater than what is needed for a visual display).
+ */
+@property (nonatomic, assign) BOOL shouldOptimizeForRealtimePlot;
+
+//------------------------------------------------------------------------------
+
+/**
+ A BOOL indicating whether the plot should center itself vertically.
+ */
+@property (nonatomic, assign) BOOL shouldCenterYAxis;
+
+//------------------------------------------------------------------------------
+
+/**
+ An EZAudioPlotWaveformLayer that is used to render the actual waveform. By switching the drawing code to Core Animation layers in version 0.2.0 most work, specifically the compositing step, is now done on the GPU. Hence, multiple EZAudioPlot instances can be used simultaneously with very low CPU overhead so these are now practical for table and collection views.
+ */
+@property (nonatomic, strong) EZAudioPlotWaveformLayer *waveformLayer;
+
+//------------------------------------------------------------------------------
+#pragma mark - Adjust Resolution
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Adjusting The Resolution
+///-----------------------------------------------------------
+
+/**
+ Sets the length of the rolling history buffer (i.e. the number of points in the rolling plot's buffer). Can grow or shrink the display up to the maximum size specified by the `maximumRollingHistoryLength` method. Will return the actual set value, which will be either the given value if smaller than the `maximumRollingHistoryLength` or `maximumRollingHistoryLength` if a larger value is attempted to be set.
+ @param historyLength The new length of the rolling history buffer.
+ @return The new value equal to the historyLength or the `maximumRollingHistoryLength`.
+ */
+-(int)setRollingHistoryLength:(int)historyLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the length of the rolling history buffer (i.e. the number of points in the rolling plot's buffer).
+ * @return An int representing the length of the rolling history buffer
+ */
+-(int)rollingHistoryLength;
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass Methods
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Subclass Methods
+///-----------------------------------------------------------
+
+/**
+ Main method that handles converting the points created from the `updatedBuffer:withBufferSize:` method into a CGPathRef to store in the `waveformLayer`. In this method you can create any path you'd like using the point array (for instance, maybe mapping the points to a circle instead of the standard 2D plane).
+ @param points An array of CGPoint structures, with the x values ranging from 0 - (pointCount - 1) and y values containing the last audio data's buffer.
+ @param pointCount A UInt32 of the length of the point array.
+ @param rect An EZRect (CGRect on iOS or NSRect on OSX) that the path should be created relative to.
+ @return A CGPathRef that is the path you'd like to store on the `waveformLayer` to visualize the audio data.
+ */
+- (CGPathRef)createPathWithPoints:(CGPoint *)points
+ pointCount:(UInt32)pointCount
+ inRect:(EZRect)rect;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the default length of the rolling history buffer when the plot is initialized. Default is `EZAudioPlotDefaultHistoryBufferLength` constant.
+ @return An int describing the initial length of the rolling history buffer.
+ */
+- (int)defaultRollingHistoryLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Called after the view has been created. Subclasses should use to add any additional methods needed instead of overriding the init methods.
+ */
+- (void)setupPlot;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the default number of points that will be used to initialize the graph's points data structure that holds. Essentially the plot starts off as a flat line of this many points. Default is 100.
+ @return An int describing the initial number of points the plot should have when flat lined.
+ */
+- (int)initialPointCount;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the default maximum rolling history length - that is, the maximum amount of points the `setRollingHistoryLength:` method may be set to. If a length higher than this is set then the plot will likely crash because the appropriate resources are only allocated once during the plot's initialization step. Defualt is `EZAudioPlotDefaultMaxHistoryBufferLength` constant.
+ @return An int describing the maximum length of the absolute rolling history buffer.
+ */
+- (int)maximumRollingHistoryLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Method to cause the waveform layer's path to get recreated and redrawn on screen using the last buffer of data provided. This is the equivalent to the drawRect: method used to normally subclass a view's drawing. This normally don't need to be overrode though - a better approach would be to override the `createPathWithPoints:pointCount:inRect:` method.
+ */
+- (void)redraw;
+
+//------------------------------------------------------------------------------
+
+/**
+ Main method used to copy the sample data from the source buffer and update the
+ plot. Subclasses can overwrite this method for custom behavior.
+ @param data A float array of the sample data. Subclasses should copy this data to a separate array to avoid threading issues.
+ @param length The length of the float array as an int.
+ */
+-(void)setSampleData:(float *)data length:(int)length;
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlot.m b/Pods/EZAudio/EZAudio/EZAudioPlot.m
new file mode 100644
index 0000000..a73d110
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlot.m
@@ -0,0 +1,465 @@
+//
+// EZAudioPlot.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 9/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioPlot.h"
+#import "EZAudioDisplayLink.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+UInt32 const kEZAudioPlotMaxHistoryBufferLength = 8192;
+UInt32 const kEZAudioPlotDefaultHistoryBufferLength = 512;
+UInt32 const EZAudioPlotDefaultHistoryBufferLength = 512;
+UInt32 const EZAudioPlotDefaultMaxHistoryBufferLength = 8192;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlot (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZAudioPlot ()
+@property (nonatomic, strong) EZAudioDisplayLink *displayLink;
+@property (nonatomic, assign) EZPlotHistoryInfo *historyInfo;
+@property (nonatomic, assign) CGPoint *points;
+@property (nonatomic, assign) UInt32 pointCount;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlot (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioPlot
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ [EZAudioUtilities freeHistoryInfo:self.historyInfo];
+ free(self.points);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (id)init
+{
+ self = [super init];
+ if (self)
+ {
+ [self initPlot];
+ }
+ return self;
+}
+
+- (id)initWithCoder:(NSCoder *)aDecoder
+{
+ self = [super initWithCoder:aDecoder];
+ if (self)
+ {
+ [self initPlot];
+ }
+ return self;
+}
+
+#if TARGET_OS_IPHONE
+- (id)initWithFrame:(CGRect)frameRect
+#elif TARGET_OS_MAC
+- (id)initWithFrame:(NSRect)frameRect
+#endif
+{
+ self = [super initWithFrame:frameRect];
+ if (self)
+ {
+ [self initPlot];
+ }
+ return self;
+}
+
+#if TARGET_OS_IPHONE
+- (void)layoutSubviews
+{
+ [super layoutSubviews];
+ [CATransaction begin];
+ [CATransaction setDisableActions:YES];
+ self.waveformLayer.frame = self.bounds;
+ [self redraw];
+ [CATransaction commit];
+}
+#elif TARGET_OS_MAC
+- (void)layout
+{
+ [super layout];
+ [CATransaction begin];
+ [CATransaction setDisableActions:YES];
+ self.waveformLayer.frame = self.bounds;
+ [self redraw];
+ [CATransaction commit];
+}
+#endif
+
+- (void)initPlot
+{
+ self.shouldCenterYAxis = YES;
+ self.shouldOptimizeForRealtimePlot = YES;
+ self.gain = 1.0;
+ self.plotType = EZPlotTypeBuffer;
+ self.shouldMirror = NO;
+ self.shouldFill = NO;
+
+ // Setup history window
+ [self resetHistoryBuffers];
+
+ self.waveformLayer = [EZAudioPlotWaveformLayer layer];
+ self.waveformLayer.frame = self.bounds;
+ self.waveformLayer.lineWidth = 1.0f;
+ self.waveformLayer.fillColor = nil;
+ self.waveformLayer.backgroundColor = nil;
+ self.waveformLayer.opaque = YES;
+
+#if TARGET_OS_IPHONE
+ self.color = [UIColor colorWithHue:0 saturation:1.0 brightness:1.0 alpha:1.0];
+#elif TARGET_OS_MAC
+ self.color = [NSColor colorWithCalibratedHue:0 saturation:1.0 brightness:1.0 alpha:1.0];
+ self.wantsLayer = YES;
+ self.layerContentsRedrawPolicy = NSViewLayerContentsRedrawOnSetNeedsDisplay;
+#endif
+ self.backgroundColor = nil;
+ [self.layer insertSublayer:self.waveformLayer atIndex:0];
+
+ //
+ // Allow subclass to initialize plot
+ //
+ [self setupPlot];
+
+ self.points = calloc(EZAudioPlotDefaultMaxHistoryBufferLength, sizeof(CGPoint));
+ self.pointCount = [self initialPointCount];
+ [self redraw];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setupPlot
+{
+ //
+ // Override in subclass
+ //
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)resetHistoryBuffers
+{
+ //
+ // Clear any existing data
+ //
+ if (self.historyInfo)
+ {
+ [EZAudioUtilities freeHistoryInfo:self.historyInfo];
+ }
+
+ self.historyInfo = [EZAudioUtilities historyInfoWithDefaultLength:[self defaultRollingHistoryLength]
+ maximumLength:[self maximumRollingHistoryLength]];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setBackgroundColor:(id)backgroundColor
+{
+ [super setBackgroundColor:backgroundColor];
+ self.layer.backgroundColor = [backgroundColor CGColor];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setColor:(id)color
+{
+ [super setColor:color];
+ self.waveformLayer.strokeColor = [color CGColor];
+ if (self.shouldFill)
+ {
+ self.waveformLayer.fillColor = [color CGColor];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setShouldOptimizeForRealtimePlot:(BOOL)shouldOptimizeForRealtimePlot
+{
+ _shouldOptimizeForRealtimePlot = shouldOptimizeForRealtimePlot;
+ if (shouldOptimizeForRealtimePlot && !self.displayLink)
+ {
+ self.displayLink = [EZAudioDisplayLink displayLinkWithDelegate:self];
+ [self.displayLink start];
+ }
+ else
+ {
+ [self.displayLink stop];
+ self.displayLink = nil;
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setShouldFill:(BOOL)shouldFill
+{
+ [super setShouldFill:shouldFill];
+ self.waveformLayer.fillColor = shouldFill ? [self.color CGColor] : nil;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Drawing
+//------------------------------------------------------------------------------
+
+- (void)clear
+{
+ if (self.pointCount > 0)
+ {
+ [self resetHistoryBuffers];
+ float data[self.pointCount];
+ memset(data, 0, self.pointCount * sizeof(float));
+ [self setSampleData:data length:self.pointCount];
+ [self redraw];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)redraw
+{
+ EZRect frame = [self.waveformLayer frame];
+ CGPathRef path = [self createPathWithPoints:self.points
+ pointCount:self.pointCount
+ inRect:frame];
+ if (self.shouldOptimizeForRealtimePlot)
+ {
+ [CATransaction begin];
+ [CATransaction setDisableActions:YES];
+ self.waveformLayer.path = path;
+ [CATransaction commit];
+ }
+ else
+ {
+ self.waveformLayer.path = path;
+ }
+ CGPathRelease(path);
+}
+
+//------------------------------------------------------------------------------
+
+- (CGPathRef)createPathWithPoints:(CGPoint *)points
+ pointCount:(UInt32)pointCount
+ inRect:(EZRect)rect
+{
+ CGMutablePathRef path = NULL;
+ if (pointCount > 0)
+ {
+ path = CGPathCreateMutable();
+ double xscale = (rect.size.width) / ((float)self.pointCount);
+ double halfHeight = floor(rect.size.height / 2.0);
+ int deviceOriginFlipped = [self isDeviceOriginFlipped] ? -1 : 1;
+ CGAffineTransform xf = CGAffineTransformIdentity;
+ CGFloat translateY = 0.0f;
+ if (!self.shouldCenterYAxis)
+ {
+#if TARGET_OS_IPHONE
+ translateY = CGRectGetHeight(rect);
+#elif TARGET_OS_MAC
+ translateY = 0.0f;
+#endif
+ }
+ else
+ {
+ translateY = halfHeight + rect.origin.y;
+ }
+ xf = CGAffineTransformTranslate(xf, 0.0, translateY);
+ double yScaleFactor = halfHeight;
+ if (!self.shouldCenterYAxis)
+ {
+ yScaleFactor = 2.0 * halfHeight;
+ }
+ xf = CGAffineTransformScale(xf, xscale, deviceOriginFlipped * yScaleFactor);
+ CGPathAddLines(path, &xf, self.points, self.pointCount);
+ if (self.shouldMirror)
+ {
+ xf = CGAffineTransformScale(xf, 1.0f, -1.0f);
+ CGPathAddLines(path, &xf, self.points, self.pointCount);
+ }
+ if (self.shouldFill)
+ {
+ CGPathCloseSubpath(path);
+ }
+ }
+ return path;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Update
+//------------------------------------------------------------------------------
+
+- (void)updateBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize
+{
+ // append the buffer to the history
+ [EZAudioUtilities appendBufferRMS:buffer
+ withBufferSize:bufferSize
+ toHistoryInfo:self.historyInfo];
+
+ // copy samples
+ switch (self.plotType)
+ {
+ case EZPlotTypeBuffer:
+ [self setSampleData:buffer
+ length:bufferSize];
+ break;
+ case EZPlotTypeRolling:
+
+ [self setSampleData:self.historyInfo->buffer
+ length:self.historyInfo->bufferSize];
+ break;
+ default:
+ break;
+ }
+
+ // update drawing
+ if (!self.shouldOptimizeForRealtimePlot)
+ {
+ [self redraw];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setSampleData:(float *)data length:(int)length
+{
+ CGPoint *points = self.points;
+ for (int i = 0; i < length; i++)
+ {
+ points[i].x = i;
+ points[i].y = data[i] * self.gain;
+ }
+ points[0].y = points[length - 1].y = 0.0f;
+ self.pointCount = length;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Adjusting History Resolution
+//------------------------------------------------------------------------------
+
+- (int)rollingHistoryLength
+{
+ return self.historyInfo->bufferSize;
+}
+
+//------------------------------------------------------------------------------
+
+- (int)setRollingHistoryLength:(int)historyLength
+{
+ self.historyInfo->bufferSize = MIN(EZAudioPlotDefaultMaxHistoryBufferLength, historyLength);
+ return self.historyInfo->bufferSize;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+- (int)defaultRollingHistoryLength
+{
+ return EZAudioPlotDefaultHistoryBufferLength;
+}
+
+//------------------------------------------------------------------------------
+
+- (int)initialPointCount
+{
+ return 100;
+}
+
+//------------------------------------------------------------------------------
+
+- (int)maximumRollingHistoryLength
+{
+ return EZAudioPlotDefaultMaxHistoryBufferLength;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Utility
+//------------------------------------------------------------------------------
+
+- (BOOL)isDeviceOriginFlipped
+{
+ BOOL isDeviceOriginFlipped = NO;
+#if TARGET_OS_IPHONE
+ isDeviceOriginFlipped = YES;
+#elif TARGET_OS_MAC
+#endif
+ return isDeviceOriginFlipped;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLinkDelegate
+//------------------------------------------------------------------------------
+
+- (void)displayLinkNeedsDisplay:(EZAudioDisplayLink *)displayLink
+{
+ [self redraw];
+}
+
+//------------------------------------------------------------------------------
+
+@end
+
+////------------------------------------------------------------------------------
+#pragma mark - EZAudioPlotWaveformLayer (Implementation)
+////------------------------------------------------------------------------------
+
+@implementation EZAudioPlotWaveformLayer
+
+- (id)actionForKey:(NSString *)event
+{
+ if ([event isEqualToString:@"path"])
+ {
+ if ([CATransaction disableActions])
+ {
+ return nil;
+ }
+ else
+ {
+ CABasicAnimation *animation = [CABasicAnimation animation];
+ animation.timingFunction = [CATransaction animationTimingFunction];
+ animation.duration = [CATransaction animationDuration];
+ return animation;
+ }
+ return nil;
+ }
+ return [super actionForKey:event];
+}
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlotGL.h b/Pods/EZAudio/EZAudio/EZAudioPlotGL.h
new file mode 100644
index 0000000..70e5a00
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlotGL.h
@@ -0,0 +1,251 @@
+//
+// EZAudioPlotGL.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 11/22/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import "EZPlot.h"
+#if !TARGET_OS_IPHONE
+#import
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ GLfloat x;
+ GLfloat y;
+} EZAudioPlotGLPoint;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlotGL
+//------------------------------------------------------------------------------
+
+/**
+ EZAudioPlotGL is a subclass of either a GLKView on iOS or an NSOpenGLView on OSX. As of 0.6.0 this class no longer depends on an embedded GLKViewController for iOS as the display link is just manually managed within this single view instead. The EZAudioPlotGL provides the same kind of audio plot as the EZAudioPlot, but uses OpenGL to GPU-accelerate the drawing of the points, which means you can fit a lot more points and complex geometries.
+ */
+#if TARGET_OS_IPHONE
+@interface EZAudioPlotGL : GLKView
+#elif TARGET_OS_MAC
+@interface EZAudioPlotGL : NSOpenGLView
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Customizing The Plot's Appearance
+///-----------------------------------------------------------
+
+/**
+ The default background color of the plot. For iOS the color is specified as a UIColor while for OSX the color is an NSColor. The default value on both platforms is a sweet looking green.
+ @warning On OSX, if you set the background to a value where the alpha component is 0 then the EZAudioPlotGL will automatically set its superview to be layer-backed.
+ */
+#if TARGET_OS_IPHONE
+@property (nonatomic, strong) IBInspectable UIColor *backgroundColor;
+#elif TARGET_OS_MAC
+@property (nonatomic, strong) IBInspectable NSColor *backgroundColor;
+#endif
+
+//------------------------------------------------------------------------------
+
+/**
+ The default color of the plot's data (i.e. waveform, y-axis values). For iOS the color is specified as a UIColor while for OSX the color is an NSColor. The default value on both platforms is white.
+ */
+#if TARGET_OS_IPHONE
+@property (nonatomic, strong) IBInspectable UIColor *color;
+#elif TARGET_OS_MAC
+@property (nonatomic, strong) IBInspectable NSColor *color;
+#endif
+
+//------------------------------------------------------------------------------
+
+/**
+ The plot's gain value, which controls the scale of the y-axis values. The default value of the gain is 1.0f and should always be greater than 0.0f.
+ */
+@property (nonatomic, assign) IBInspectable float gain;
+
+//------------------------------------------------------------------------------
+
+/**
+ The type of plot as specified by the `EZPlotType` enumeration (i.e. a buffer or rolling plot type). Default is EZPlotTypeBuffer.
+ */
+@property (nonatomic, assign) EZPlotType plotType;
+
+//------------------------------------------------------------------------------
+
+/**
+ A BOOL indicating whether or not to fill in the graph. A value of YES will make a filled graph (filling in the space between the x-axis and the y-value), while a value of NO will create a stroked graph (connecting the points along the y-axis). Default is NO.
+ */
+@property (nonatomic, assign) IBInspectable BOOL shouldFill;
+
+//------------------------------------------------------------------------------
+
+/**
+ A boolean indicating whether the graph should be rotated along the x-axis to give a mirrored reflection. This is typical for audio plots to produce the classic waveform look. A value of YES will produce a mirrored reflection of the y-values about the x-axis, while a value of NO will only plot the y-values. Default is NO.
+ */
+@property (nonatomic, assign) IBInspectable BOOL shouldMirror;
+
+//------------------------------------------------------------------------------
+#pragma mark - Updating The Plot
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Updating The Plot
+///-----------------------------------------------------------
+
+/**
+ Updates the plot with the new buffer data and tells the view to redraw itself. Caller will provide a float array with the values they expect to see on the y-axis. The plot will internally handle mapping the x-axis and y-axis to the current view port, any interpolation for fills effects, and mirroring.
+ @param buffer A float array of values to map to the y-axis.
+ @param bufferSize The size of the float array that will be mapped to the y-axis.
+ */
+-(void)updateBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize;
+
+//------------------------------------------------------------------------------
+#pragma mark - Adjust Resolution
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Adjusting The Resolution
+///-----------------------------------------------------------
+
+/**
+ Sets the length of the rolling history buffer (i.e. the number of points in the rolling plot's buffer). Can grow or shrink the display up to the maximum size specified by the `maximumRollingHistoryLength` method. Will return the actual set value, which will be either the given value if smaller than the `maximumRollingHistoryLength` or `maximumRollingHistoryLength` if a larger value is attempted to be set.
+ @param historyLength The new length of the rolling history buffer.
+ @return The new value equal to the historyLength or the `maximumRollingHistoryLength`.
+ */
+-(int)setRollingHistoryLength:(int)historyLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the length of the rolling history buffer (i.e. the number of points in the rolling plot's buffer).
+ * @return An int representing the length of the rolling history buffer
+ */
+-(int)rollingHistoryLength;
+
+//------------------------------------------------------------------------------
+#pragma mark - Clearing The Plot
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Clearing The Plot
+///-----------------------------------------------------------
+
+/**
+ Clears all data from the audio plot (includes both EZPlotTypeBuffer and EZPlotTypeRolling)
+ */
+-(void)clear;
+
+//------------------------------------------------------------------------------
+#pragma mark - Start/Stop Display Link
+//------------------------------------------------------------------------------
+
+/**
+ Call this method to tell the EZAudioDisplayLink to stop drawing temporarily.
+ */
+- (void)pauseDrawing;
+
+//------------------------------------------------------------------------------
+
+/**
+ Call this method to manually tell the EZAudioDisplayLink to start drawing again.
+ */
+- (void)resumeDrawing;
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Customizing The Drawing
+///-----------------------------------------------------------
+
+/**
+ This method is used to perform the actual OpenGL drawing code to clear the background and draw the lines representing the 2D audio plot. Subclasses can use the current implementation as an example and implement their own custom geometries. This is the analogy of overriding the drawRect: method in an NSView or UIView.
+ @param points An array of EZAudioPlotGLPoint structures representing the mapped audio data to x,y coordinates. The x-axis goes from 0 to the number of points (pointCount) while the y-axis goes from -1 to 1. Check out the implementation of this method to see how the model view matrix of the base effect is transformed to map this properly to the viewport.
+ @param pointCount A UInt32 representing the number of points contained in the points array.
+ @param baseEffect An optional GLKBaseEffect to use as a default shader. Call prepareToDraw on the base effect before any glDrawArrays call.
+ @param vbo The Vertex Buffer Object used to buffer the point data.
+ @param vab The Vertex Array Buffer used to bind the Vertex Buffer Object. This is a Mac only thing, you can ignore this completely on iOS.
+ @param interpolated A BOOL indicating whether the data has been interpolated. This means the point data is twice as long, where every other point is 0 on the y-axis to allow drawing triangle stripes for filled in waveforms. Typically if the point data is interpolated you will be using the GL_TRIANGLE_STRIP drawing mode, while non-interpolated plots will just use a GL_LINE_STRIP drawing mode.
+ @param mirrored A BOOL indicating whether the plot should be mirrored about the y-axis (or whatever geometry you come up with).
+ @param gain A float representing a gain that should be used to influence the height or intensity of your geometry's shape. A gain of 0.0 means silence, a gain of 1.0 means full volume (you're welcome to boost this to whatever you want).
+ */
+- (void)redrawWithPoints:(EZAudioPlotGLPoint *)points
+ pointCount:(UInt32)pointCount
+ baseEffect:(GLKBaseEffect *)baseEffect
+ vertexBufferObject:(GLuint)vbo
+ vertexArrayBuffer:(GLuint)vab
+ interpolated:(BOOL)interpolated
+ mirrored:(BOOL)mirrored
+ gain:(float)gain;
+
+//------------------------------------------------------------------------------
+
+/**
+ Called during the OpenGL run loop to constantly update the drawing 60 fps. Callers can use this force update the screen while subclasses can override this for complete control over their rendering. However, subclasses are more encouraged to use the `redrawWithPoints:pointCount:baseEffect:vertexBufferObject:vertexArrayBuffer:interpolated:mirrored:gain:`
+ */
+- (void)redraw;
+
+//------------------------------------------------------------------------------
+
+/**
+ Called after the view has been created. Subclasses should use to add any additional methods needed instead of overriding the init methods.
+ */
+- (void)setup;
+
+//------------------------------------------------------------------------------
+
+/**
+ Main method used to copy the sample data from the source buffer and update the
+ plot. Subclasses can overwrite this method for custom behavior.
+ @param data A float array of the sample data. Subclasses should copy this data to a separate array to avoid threading issues.
+ @param length The length of the float array as an int.
+ */
+- (void)setSampleData:(float *)data length:(int)length;
+
+///-----------------------------------------------------------
+/// @name Subclass Methods
+///-----------------------------------------------------------
+
+/**
+ Provides the default length of the rolling history buffer when the plot is initialized. Default is `EZAudioPlotDefaultHistoryBufferLength` constant.
+ @return An int describing the initial length of the rolling history buffer.
+ */
+- (int)defaultRollingHistoryLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the default maximum rolling history length - that is, the maximum amount of points the `setRollingHistoryLength:` method may be set to. If a length higher than this is set then the plot will likely crash because the appropriate resources are only allocated once during the plot's initialization step. Defualt is `EZAudioPlotDefaultMaxHistoryBufferLength` constant.
+ @return An int describing the maximum length of the absolute rolling history buffer.
+ */
+- (int)maximumRollingHistoryLength;
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioPlotGL.m b/Pods/EZAudio/EZAudio/EZAudioPlotGL.m
new file mode 100644
index 0000000..1672850
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioPlotGL.m
@@ -0,0 +1,548 @@
+//
+// EZAudioPlotGL.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 11/22/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioPlotGL.h"
+#import "EZAudioDisplayLink.h"
+#import "EZAudioUtilities.h"
+#import "EZAudioPlot.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ BOOL interpolated;
+ EZPlotHistoryInfo *historyInfo;
+ EZAudioPlotGLPoint *points;
+ UInt32 pointCount;
+ GLuint vbo;
+ GLuint vab;
+} EZAudioPlotGLInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlotGL (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZAudioPlotGL ()
+@property (nonatomic, strong) GLKBaseEffect *baseEffect;
+@property (nonatomic, strong) EZAudioDisplayLink *displayLink;
+@property (nonatomic, assign) EZAudioPlotGLInfo *info;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioPlotGL (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZAudioPlotGL
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ [self.displayLink stop];
+ self.displayLink = nil;
+ [EZAudioUtilities freeHistoryInfo:self.info->historyInfo];
+#if !TARGET_OS_IPHONE
+ glDeleteVertexArrays(1, &self.info->vab);
+#endif
+ glDeleteBuffers(1, &self.info->vbo);
+ free(self.info->points);
+ free(self.info);
+ self.baseEffect = nil;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype)init
+{
+ self = [super init];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (id)initWithCoder:(NSCoder *)aDecoder
+{
+ self = [super initWithCoder:aDecoder];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithFrame:(EZRect)frame
+{
+ self = [super initWithFrame:frame];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+#if TARGET_OS_IPHONE
+- (instancetype)initWithFrame:(CGRect)frame
+ context:(EAGLContext *)context
+{
+ self = [super initWithFrame:frame context:context];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+#elif TARGET_OS_MAC
+- (instancetype)initWithFrame:(NSRect)frameRect
+ pixelFormat:(NSOpenGLPixelFormat *)format
+{
+ self = [super initWithFrame:frameRect pixelFormat:format];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ //
+ // Setup info data structure
+ //
+ self.info = (EZAudioPlotGLInfo *)malloc(sizeof(EZAudioPlotGLInfo));
+ memset(self.info, 0, sizeof(EZAudioPlotGLInfo));
+
+ //
+ // Create points array
+ //
+ UInt32 pointCount = [self maximumRollingHistoryLength];
+ self.info->points = (EZAudioPlotGLPoint *)calloc(sizeof(EZAudioPlotGLPoint), pointCount);
+ self.info->pointCount = pointCount;
+
+ //
+ // Create the history data structure to hold the rolling data
+ //
+ self.info->historyInfo = [EZAudioUtilities historyInfoWithDefaultLength:[self defaultRollingHistoryLength]
+ maximumLength:[self maximumRollingHistoryLength]];
+
+ //
+ // Setup OpenGL specific stuff
+ //
+ [self setupOpenGL];
+
+ //
+ // Setup view properties
+ //
+ self.gain = 1.0f;
+#if TARGET_OS_IPHONE
+ self.backgroundColor = [UIColor colorWithRed:0.569f green:0.82f blue:0.478f alpha:1.0f];
+ self.color = [UIColor colorWithRed:1.0f green:1.0f blue:1.0f alpha:1.0f];
+#elif TARGET_OS_MAC
+ self.backgroundColor = [NSColor colorWithCalibratedRed:0.569f green:0.82f blue:0.478f alpha:1.0f];
+ self.color = [NSColor colorWithCalibratedRed:1.0f green:1.0f blue:1.0f alpha:1.0f];
+#endif
+
+ //
+ // Allow subclass to initialize plot
+ //
+ [self setupPlot];
+
+ //
+ // Create the display link
+ //
+ self.displayLink = [EZAudioDisplayLink displayLinkWithDelegate:self];
+ [self.displayLink start];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setupPlot
+{
+ //
+ // Override in subclass
+ //
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setupOpenGL
+{
+ self.baseEffect = [[GLKBaseEffect alloc] init];
+ self.baseEffect.useConstantColor = YES;
+#if TARGET_OS_IPHONE
+ if (!self.context)
+ {
+ self.context = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
+ }
+ [EAGLContext setCurrentContext:self.context];
+ self.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888;
+ self.drawableDepthFormat = GLKViewDrawableDepthFormat24;
+ self.drawableStencilFormat = GLKViewDrawableStencilFormat8;
+ self.drawableMultisample = GLKViewDrawableMultisample4X;
+ self.opaque = NO;
+ self.enableSetNeedsDisplay = NO;
+#elif TARGET_OS_MAC
+ self.wantsBestResolutionOpenGLSurface = YES;
+ self.wantsLayer = YES;
+ self.layer.opaque = YES;
+ self.layer.backgroundColor = [NSColor clearColor].CGColor;
+ if (!self.pixelFormat)
+ {
+ NSOpenGLPixelFormatAttribute attrs[] =
+ {
+ NSOpenGLPFADoubleBuffer,
+ NSOpenGLPFAMultisample,
+ NSOpenGLPFASampleBuffers, 1,
+ NSOpenGLPFASamples, 4,
+ NSOpenGLPFADepthSize, 24,
+ NSOpenGLPFAOpenGLProfile,
+ NSOpenGLProfileVersion3_2Core, 0
+ };
+ self.pixelFormat = [[NSOpenGLPixelFormat alloc] initWithAttributes:attrs];
+ }
+#if DEBUG
+ NSAssert(self.pixelFormat, @"Could not create OpenGL pixel format so context is not valid");
+#endif
+ self.openGLContext = [[NSOpenGLContext alloc] initWithFormat:self.pixelFormat
+ shareContext:nil];
+ GLint swapInt = 1; GLint surfaceOpacity = 0;
+ [self.openGLContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+ [self.openGLContext setValues:&surfaceOpacity forParameter:NSOpenGLCPSurfaceOpacity];
+ [self.openGLContext lock];
+ glGenVertexArrays(1, &self.info->vab);
+ glBindVertexArray(self.info->vab);
+#endif
+ glGenBuffers(1, &self.info->vbo);
+ glBindBuffer(GL_ARRAY_BUFFER, self.info->vbo);
+ glBufferData(GL_ARRAY_BUFFER,
+ self.info->pointCount * sizeof(EZAudioPlotGLPoint),
+ self.info->points,
+ GL_STREAM_DRAW);
+#if !TARGET_OS_IPHONE
+ [self.openGLContext unlock];
+#endif
+ self.frame = self.frame;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Updating The Plot
+//------------------------------------------------------------------------------
+
+- (void)updateBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize
+{
+ //
+ // Update history
+ //
+ [EZAudioUtilities appendBufferRMS:buffer
+ withBufferSize:bufferSize
+ toHistoryInfo:self.info->historyInfo];
+
+ //
+ // Convert this data to point data
+ //
+ switch (self.plotType)
+ {
+ case EZPlotTypeBuffer:
+ [self setSampleData:buffer
+ length:bufferSize];
+ break;
+ case EZPlotTypeRolling:
+ [self setSampleData:self.info->historyInfo->buffer
+ length:self.info->historyInfo->bufferSize];
+ break;
+ default:
+ break;
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setSampleData:(float *)data length:(int)length
+{
+ int pointCount = self.shouldFill ? length * 2 : length;
+ EZAudioPlotGLPoint *points = self.info->points;
+ for (int i = 0; i < length; i++)
+ {
+ if (self.shouldFill)
+ {
+ points[i * 2].x = points[i * 2 + 1].x = i;
+ points[i * 2].y = data[i];
+ points[i * 2 + 1].y = 0.0f;
+ }
+ else
+ {
+ points[i].x = i;
+ points[i].y = data[i];
+ }
+ }
+ points[0].y = points[pointCount - 1].y = 0.0f;
+ self.info->pointCount = pointCount;
+ self.info->interpolated = self.shouldFill;
+#if !TARGET_OS_IPHONE
+ [self.openGLContext lock];
+ glBindVertexArray(self.info->vab);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, self.info->vbo);
+ glBufferSubData(GL_ARRAY_BUFFER,
+ 0,
+ pointCount * sizeof(EZAudioPlotGLPoint),
+ self.info->points);
+#if !TARGET_OS_IPHONE
+ [self.openGLContext unlock];
+#endif
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Adjusting History Resolution
+//------------------------------------------------------------------------------
+
+- (int)rollingHistoryLength
+{
+ return self.info->historyInfo->bufferSize;
+}
+
+//------------------------------------------------------------------------------
+
+- (int)setRollingHistoryLength:(int)historyLength
+{
+ self.info->historyInfo->bufferSize = MIN(EZAudioPlotDefaultMaxHistoryBufferLength, historyLength);
+ return self.info->historyInfo->bufferSize;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Clearing The Plot
+//------------------------------------------------------------------------------
+
+- (void)clear
+{
+ float emptyBuffer[1];
+ emptyBuffer[0] = 0.0f;
+ [self setSampleData:emptyBuffer length:1];
+ [EZAudioUtilities clearHistoryInfo:self.info->historyInfo];
+#if TARGET_OS_IPHONE
+ [self display];
+#elif TARGET_OS_MAC
+ [self redraw];
+#endif
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Start/Stop Display Link
+//------------------------------------------------------------------------------
+
+- (void)pauseDrawing
+{
+ [self.displayLink stop];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)resumeDrawing
+{
+ [self.displayLink start];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setBackgroundColor:(id)backgroundColor
+{
+ _backgroundColor = backgroundColor;
+ if (backgroundColor)
+ {
+ CGColorRef colorRef = [backgroundColor CGColor];
+ CGFloat red; CGFloat green; CGFloat blue; CGFloat alpha;
+ [EZAudioUtilities getColorComponentsFromCGColor:colorRef
+ red:&red
+ green:&green
+ blue:&blue
+ alpha:&alpha];
+ //
+ // Note! If you set the alpha to be 0 on mac for a transparent view
+ // the EZAudioPlotGL will make the superview layer-backed to make
+ // sure there is a surface to display itself on (or else you will get
+ // some pretty weird drawing glitches
+ //
+#if !TARGET_OS_IPHONE
+ if (alpha == 0.0f)
+ {
+ [self.superview setWantsLayer:YES];
+ }
+#endif
+ glClearColor(red, green, blue, alpha);
+ }
+ else
+ {
+ glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setColor:(id)color
+{
+ _color = color;
+ if (color)
+ {
+ CGColorRef colorRef = [color CGColor];
+ CGFloat red; CGFloat green; CGFloat blue; CGFloat alpha;
+ [EZAudioUtilities getColorComponentsFromCGColor:colorRef
+ red:&red
+ green:&green
+ blue:&blue
+ alpha:&alpha];
+ self.baseEffect.constantColor = GLKVector4Make(red, green, blue, alpha);
+ }
+ else
+ {
+ self.baseEffect.constantColor = GLKVector4Make(0.0f, 0.0f, 0.0f, 0.0f);
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Drawing
+//------------------------------------------------------------------------------
+
+- (void)drawRect:(EZRect)rect
+{
+ [self redraw];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)redraw
+{
+#if !TARGET_OS_IPHONE
+ [self.openGLContext makeCurrentContext];
+ [self.openGLContext lock];
+#endif
+ [self redrawWithPoints:self.info->points
+ pointCount:self.info->pointCount
+ baseEffect:self.baseEffect
+ vertexBufferObject:self.info->vbo
+ vertexArrayBuffer:self.info->vab
+ interpolated:self.info->interpolated
+ mirrored:self.shouldMirror
+ gain:self.gain];
+#if !TARGET_OS_IPHONE
+ [self.openGLContext flushBuffer];
+ [self.openGLContext unlock];
+#endif
+}
+
+//------------------------------------------------------------------------------
+
+- (void)redrawWithPoints:(EZAudioPlotGLPoint *)points
+ pointCount:(UInt32)pointCount
+ baseEffect:(GLKBaseEffect *)baseEffect
+ vertexBufferObject:(GLuint)vbo
+ vertexArrayBuffer:(GLuint)vab
+ interpolated:(BOOL)interpolated
+ mirrored:(BOOL)mirrored
+ gain:(float)gain
+{
+ glClear(GL_COLOR_BUFFER_BIT);
+ GLenum mode = interpolated ? GL_TRIANGLE_STRIP : GL_LINE_STRIP;
+ float interpolatedFactor = interpolated ? 2.0f : 1.0f;
+ float xscale = 2.0f / ((float)pointCount / interpolatedFactor);
+ float yscale = 1.0f * gain;
+ GLKMatrix4 transform = GLKMatrix4MakeTranslation(-1.0f, 0.0f, 0.0f);
+ transform = GLKMatrix4Scale(transform, xscale, yscale, 1.0f);
+ baseEffect.transform.modelviewMatrix = transform;
+#if !TARGET_OS_IPHONE
+ glBindVertexArray(vab);
+#endif
+ glBindBuffer(GL_ARRAY_BUFFER, vbo);
+ [baseEffect prepareToDraw];
+ glEnableVertexAttribArray(GLKVertexAttribPosition);
+ glVertexAttribPointer(GLKVertexAttribPosition,
+ 2,
+ GL_FLOAT,
+ GL_FALSE,
+ sizeof(EZAudioPlotGLPoint),
+ NULL);
+ glDrawArrays(mode, 0, pointCount);
+ if (mirrored)
+ {
+ baseEffect.transform.modelviewMatrix = GLKMatrix4Rotate(transform, M_PI, 1.0f, 0.0f, 0.0f);
+ [baseEffect prepareToDraw];
+ glDrawArrays(mode, 0, pointCount);
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+- (int)defaultRollingHistoryLength
+{
+ return EZAudioPlotDefaultHistoryBufferLength;
+}
+
+//------------------------------------------------------------------------------
+
+- (int)maximumRollingHistoryLength
+{
+ return EZAudioPlotDefaultMaxHistoryBufferLength;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioDisplayLinkDelegate
+//------------------------------------------------------------------------------
+
+- (void)displayLinkNeedsDisplay:(EZAudioDisplayLink *)displayLink
+{
+#if TARGET_OS_IPHONE
+ if ([[UIApplication sharedApplication] applicationState] == UIApplicationStateActive)
+ {
+ [self display];
+ }
+#elif TARGET_OS_MAC
+ [self redraw];
+#endif
+}
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioUtilities.h b/Pods/EZAudio/EZAudio/EZAudioUtilities.h
new file mode 100644
index 0000000..94d4179
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioUtilities.h
@@ -0,0 +1,549 @@
+//
+// EZAudioUtilities.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+#import
+#import "TPCircularBuffer.h"
+#if TARGET_OS_IPHONE
+#import
+#elif TARGET_OS_MAC
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+/**
+ A data structure that holds information about audio data over time. It contains a circular buffer to incrementally write the audio data to and a scratch buffer to hold a window of audio data relative to the whole circular buffer. In use, this will provide a way to continuously append data while having an adjustable viewable window described by the bufferSize.
+ */
+typedef struct
+{
+ float *buffer;
+ int bufferSize;
+ TPCircularBuffer circularBuffer;
+} EZPlotHistoryInfo;
+
+//------------------------------------------------------------------------------
+
+/**
+ A data structure that holds information about a node in the context of an AUGraph.
+ */
+typedef struct
+{
+ AudioUnit audioUnit;
+ AUNode node;
+} EZAudioNodeInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - Types
+//------------------------------------------------------------------------------
+
+#if TARGET_OS_IPHONE
+typedef CGRect EZRect;
+#elif TARGET_OS_MAC
+typedef NSRect EZRect;
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - EZAudioUtilities
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioUtilities class provides a set of class-level utility methods used throughout EZAudio to handle common operations such as allocating audio buffers and structures, creating various types of AudioStreamBasicDescription structures, string helpers for formatting and debugging, various math utilities, a very handy check result function (used everywhere!), and helpers for dealing with circular buffers. These were previously on the EZAudio class, but as of the 0.1.0 release have been moved here so the whole EZAudio is not needed when using only certain modules.
+ */
+@interface EZAudioUtilities : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Debugging
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Debugging EZAudio
+///-----------------------------------------------------------
+
+/**
+ Globally sets whether or not the program should exit if a `checkResult:operation:` operation fails. Currently the behavior on EZAudio is to quit if a `checkResult:operation:` fails, but this is not desirable in any production environment. Internally there are a lot of `checkResult:operation:` operations used on all the core classes. This should only ever be set to NO in production environments since a `checkResult:operation:` failing means something breaking has likely happened.
+ @param shouldExitOnCheckResultFail A BOOL indicating whether or not the running program should exist due to a `checkResult:operation:` fail.
+ */
++ (void)setShouldExitOnCheckResultFail:(BOOL)shouldExitOnCheckResultFail;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a flag indicating whether or not the program will exit if a `checkResult:operation:` fails.
+ @return A BOOL indicating whether or not the program will exit if a `checkResult:operation:` fails.
+ */
++ (BOOL)shouldExitOnCheckResultFail;
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioBufferList Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name AudioBufferList Utility
+///-----------------------------------------------------------
+
+/**
+ Allocates an AudioBufferList structure. Make sure to call freeBufferList when done using AudioBufferList or it will leak.
+ @param frames The number of frames that will be stored within each audio buffer
+ @param channels The number of channels (e.g. 2 for stereo, 1 for mono, etc.)
+ @param interleaved Whether the samples will be interleaved (if not it will be assumed to be non-interleaved and each channel will have an AudioBuffer allocated)
+ @return An AudioBufferList struct that has been allocated in memory
+ */
++ (AudioBufferList *)audioBufferListWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+ interleaved:(BOOL)interleaved;
+
+//------------------------------------------------------------------------------
+
+/**
+ Allocates an array of float arrays given the number of frames needed to store in each float array.
+ @param frames A UInt32 representing the number of frames to store in each float buffer
+ @param channels A UInt32 representing the number of channels (i.e. the number of float arrays to allocate)
+ @return An array of float arrays, each the length of the number of frames specified
+ */
++ (float **)floatBuffersWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels;
+
+//------------------------------------------------------------------------------
+
+/**
+ Deallocates an AudioBufferList structure from memory.
+ @param bufferList A pointer to the buffer list you would like to free
+ */
++ (void)freeBufferList:(AudioBufferList *)bufferList;
+
+//------------------------------------------------------------------------------
+
+/**
+ Deallocates an array of float buffers
+ @param buffers An array of float arrays
+ @param channels A UInt32 representing the number of channels (i.e. the number of float arrays to deallocate)
+ */
++ (void)freeFloatBuffers:(float **)buffers numberOfChannels:(UInt32)channels;
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioStreamBasicDescription Utilties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Creating An AudioStreamBasicDescription
+///-----------------------------------------------------------
+
+/**
+ Creates a signed-integer, interleaved AudioStreamBasicDescription for the number of channels specified for an AIFF format.
+ @param channels The desired number of channels
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)AIFFFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an AudioStreamBasicDescription for the iLBC narrow band speech codec.
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)iLBCFormatWithSampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a float-based, non-interleaved AudioStreamBasicDescription for the number of channels specified.
+ @param channels A UInt32 representing the number of channels.
+ @param sampleRate A float representing the sample rate.
+ @return A float-based AudioStreamBasicDescription with the number of channels specified.
+ */
++ (AudioStreamBasicDescription)floatFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an AudioStreamBasicDescription for an M4A AAC format.
+ @param channels The desired number of channels
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)M4AFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a single-channel, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)monoFloatFormatWithSampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a single-channel, float-based AudioStreamBasicDescription (as of 0.0.6 this is the same as `monoFloatFormatWithSampleRate:`).
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)monoCanonicalFormatWithSampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, non-interleaved, float-based AudioStreamBasicDescription (as of 0.0.6 this is the same as `stereoFloatNonInterleavedFormatWithSampleRate:`).
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoCanonicalNonInterleavedFormatWithSampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, interleaved, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoFloatInterleavedFormatWithSampleRate:(float)sampleRate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a two-channel, non-interleaved, float-based AudioStreamBasicDescription.
+ @param sampleRate A float representing the sample rate.
+ @return A new AudioStreamBasicDescription with the specified format.
+ */
++ (AudioStreamBasicDescription)stereoFloatNonInterleavedFormatWithSampleRate:(float)sameRate;
+
+//------------------------------------------------------------------------------
+// @name AudioStreamBasicDescription Helper Functions
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to see if it is a float-based format (as opposed to a signed integer based format).
+ @param asbd A valid AudioStreamBasicDescription
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is a float format.
+ */
++ (BOOL)isFloatFormat:(AudioStreamBasicDescription)asbd;
+
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to check for an interleaved flag (samples are
+ stored in one buffer one after another instead of two (or n channels) parallel buffers
+ @param asbd A valid AudioStreamBasicDescription
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is interleaved
+ */
++ (BOOL)isInterleaved:(AudioStreamBasicDescription)asbd;
+
+//------------------------------------------------------------------------------
+
+/**
+ Checks an AudioStreamBasicDescription to see if it is a linear PCM format (uncompressed,
+ 1 frame per packet)
+ @param asbd A valid AudioStreamBasicDescription
+ @return A BOOL indicating whether or not the AudioStreamBasicDescription is linear PCM.
+ */
++ (BOOL)isLinearPCM:(AudioStreamBasicDescription)asbd;
+
+///-----------------------------------------------------------
+/// @name AudioStreamBasicDescription Utilities
+///-----------------------------------------------------------
+
+/**
+ Nicely logs out the contents of an AudioStreamBasicDescription struct
+ @param asbd The AudioStreamBasicDescription struct with content to print out
+ */
++ (void)printASBD:(AudioStreamBasicDescription)asbd;
+
+//------------------------------------------------------------------------------
+
+/**
+ Converts seconds into a string formatted as MM:SS
+ @param seconds An NSTimeInterval representing the number of seconds
+ @return An NSString instance formatted as MM:SS from the seconds provided.
+ */
++ (NSString *)displayTimeStringFromSeconds:(NSTimeInterval)seconds;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a string to use when logging out the contents of an AudioStreamBasicDescription
+ @param asbd A valid AudioStreamBasicDescription struct.
+ @return An NSString representing the contents of the AudioStreamBasicDescription.
+ */
++ (NSString *)stringForAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd;
+
+//------------------------------------------------------------------------------
+
+/**
+ Just a wrapper around the setCanonical function provided in the Core Audio Utility C++ class.
+ @param asbd The AudioStreamBasicDescription structure to modify
+ @param nChannels The number of expected channels on the description
+ @param interleaved A flag indicating whether the stereo samples should be interleaved in the buffer
+ */
++ (void)setCanonicalAudioStreamBasicDescription:(AudioStreamBasicDescription*)asbd
+ numberOfChannels:(UInt32)nChannels
+ interleaved:(BOOL)interleaved;
+
+//------------------------------------------------------------------------------
+#pragma mark - Math Utilities
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Math Utilities
+///-----------------------------------------------------------
+
+/**
+ Appends an array of values to a history buffer and performs an internal shift to add the values to the tail and removes the same number of values from the head.
+ @param buffer A float array of values to append to the tail of the history buffer
+ @param bufferLength The length of the float array being appended to the history buffer
+ @param scrollHistory The target history buffer in which to append the values
+ @param scrollHistoryLength The length of the target history buffer
+ */
++ (void)appendBufferAndShift:(float*)buffer
+ withBufferSize:(int)bufferLength
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Appends a value to a history buffer and performs an internal shift to add the value to the tail and remove the 0th value.
+ @param value The float value to append to the history array
+ @param scrollHistory The target history buffer in which to append the values
+ @param scrollHistoryLength The length of the target history buffer
+ */
++(void) appendValue:(float)value
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength;
+
+//------------------------------------------------------------------------------
+
+/**
+ Maps a value from one coordinate system into another one. Takes in the current value to map, the minimum and maximum values of the first coordinate system, and the minimum and maximum values of the second coordinate system and calculates the mapped value in the second coordinate system's constraints.
+ @param value The value expressed in the first coordinate system
+ @param leftMin The minimum of the first coordinate system
+ @param leftMax The maximum of the first coordinate system
+ @param rightMin The minimum of the second coordindate system
+ @param rightMax The maximum of the second coordinate system
+ @return The mapped value in terms of the second coordinate system
+ */
++ (float)MAP:(float)value
+ leftMin:(float)leftMin
+ leftMax:(float)leftMax
+ rightMin:(float)rightMin
+ rightMax:(float)rightMax;
+
+//------------------------------------------------------------------------------
+
+/**
+ Calculates the root mean squared for a buffer.
+ @param buffer A float buffer array of values whose root mean squared to calculate
+ @param bufferSize The size of the float buffer
+ @return The root mean squared of the buffer
+ */
++ (float)RMS:(float*)buffer length:(int)bufferSize;
+
+//------------------------------------------------------------------------------
+
+/**
+ Calculate the sign function sgn(x) =
+ { -1 , x < 0,
+ { 0 , x = 0,
+ { 1 , x > 0
+ @param value The float value for which to use as x
+ @return The float sign value
+ */
++ (float)SGN:(float)value;
+
+//------------------------------------------------------------------------------
+#pragma mark - Music Utilities
+//------------------------------------------------------------------------------
+
++ (NSString *)noteNameStringForFrequency:(float)frequency
+ includeOctave:(BOOL)includeOctave;
+
+//------------------------------------------------------------------------------
+#pragma mark - OSStatus Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name OSStatus Utility
+///-----------------------------------------------------------
+
+/**
+ Basic check result function useful for checking each step of the audio setup process
+ @param result The OSStatus representing the result of an operation
+ @param operation A string (const char, not NSString) describing the operation taking place (will print if fails)
+ */
++ (void)checkResult:(OSStatus)result operation:(const char *)operation;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a string representation of the often cryptic Core Audio error codes
+ @param code A UInt32 representing an error code
+ @return An NSString with a human readable version of the error code.
+ */
++ (NSString *)stringFromUInt32Code:(UInt32)code;
+
+//------------------------------------------------------------------------------
+#pragma mark - Color Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Color Utility
+///-----------------------------------------------------------
+
+/**
+ Helper function to get the color components from a CGColorRef in the RGBA colorspace.
+ @param color A CGColorRef that represents a color.
+ @param red A pointer to a CGFloat to hold the value of the red component. This value will be between 0 and 1.
+ @param green A pointer to a CGFloat to hold the value of the green component. This value will be between 0 and 1.
+ @param blue A pointer to a CGFloat to hold the value of the blue component. This value will be between 0 and 1.
+ @param alpha A pointer to a CGFloat to hold the value of the alpha component. This value will be between 0 and 1.
+ */
++ (void)getColorComponentsFromCGColor:(CGColorRef)color
+ red:(CGFloat *)red
+ green:(CGFloat *)green
+ blue:(CGFloat *)blue
+ alpha:(CGFloat *)alpha;
+
+//------------------------------------------------------------------------------
+#pragma mark - Plot Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Plot Utility
+///-----------------------------------------------------------
+
+/**
+ Given a buffer representing a window of float history data this append the RMS of a buffer of incoming float data...This will likely be deprecated in a future version of EZAudio for a circular buffer based approach.
+ @param scrollHistory An array of float arrays being used to hold the history values for each channel.
+ @param scrollHistoryLength An int representing the length of the history window.
+ @param index An int pointer to the index of the current read index of the history buffer.
+ @param buffer A float array representing the incoming audio data.
+ @param bufferSize An int representing the length of the incoming audio data.
+ @param isChanging A BOOL pointer representing whether the resolution (length of the history window) is currently changing.
+ */
++ (void)updateScrollHistory:(float **)scrollHistory
+ withLength:(int)scrollHistoryLength
+ atIndex:(int *)index
+ withBuffer:(float *)buffer
+ withBufferSize:(int)bufferSize
+ isResolutionChanging:(BOOL *)isChanging;
+
+//------------------------------------------------------------------------------
+#pragma mark - TPCircularBuffer Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name TPCircularBuffer Utility
+///-----------------------------------------------------------
+
+/**
+ Appends the data from the audio buffer list to the circular buffer
+ @param circularBuffer Pointer to the instance of the TPCircularBuffer to add the audio data to
+ @param audioBufferList Pointer to the instance of the AudioBufferList with the audio data
+ */
++ (void)appendDataToCircularBuffer:(TPCircularBuffer*)circularBuffer
+ fromAudioBufferList:(AudioBufferList*)audioBufferList;
+
+//------------------------------------------------------------------------------
+
+/**
+ Initializes the circular buffer (just a wrapper around the C method)
+ * @param circularBuffer Pointer to an instance of the TPCircularBuffer
+ * @param size The length of the TPCircularBuffer (usually 1024)
+ */
++ (void)circularBuffer:(TPCircularBuffer*)circularBuffer
+ withSize:(int)size;
+
+//------------------------------------------------------------------------------
+
+/**
+ Frees a circular buffer
+ @param circularBuffer Pointer to the circular buffer to clear
+ */
++ (void)freeCircularBuffer:(TPCircularBuffer*)circularBuffer;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZPlotHistoryInfo Utility
+//------------------------------------------------------------------------------
+
+/**
+ Calculates the RMS of a float array containing audio data and appends it to the tail of a EZPlotHistoryInfo data structure. Thread-safe.
+ @param buffer A float array containing the incoming audio buffer to append to the history buffer
+ @param bufferSize A UInt32 representing the length of the incoming audio buffer
+ @param historyInfo A pointer to a EZPlotHistoryInfo structure to use for managing the history buffers
+ */
++ (void)appendBufferRMS:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+ toHistoryInfo:(EZPlotHistoryInfo *)historyInfo;
+
+//------------------------------------------------------------------------------
+
+/**
+ Appends a buffer of audio data to the tail of a EZPlotHistoryInfo data structure. Thread-safe.
+ @param buffer A float array containing the incoming audio buffer to append to the history buffer
+ @param bufferSize A UInt32 representing the length of the incoming audio buffer
+ @param historyInfo A pointer to a EZPlotHistoryInfo structure to use for managing the history buffers
+ */
++ (void)appendBuffer:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+ toHistoryInfo:(EZPlotHistoryInfo *)historyInfo;
+
+//------------------------------------------------------------------------------
+
+/**
+ Zeroes out a EZPlotHistoryInfo data structure without freeing the resources.
+ @param historyInfo A pointer to a EZPlotHistoryInfo data structure
+ */
++ (void)clearHistoryInfo:(EZPlotHistoryInfo *)historyInfo;
+
+//------------------------------------------------------------------------------
+
+/**
+ Frees a EZPlotHistoryInfo data structure
+ @param historyInfo A pointer to a EZPlotHistoryInfo data structure
+ */
++ (void)freeHistoryInfo:(EZPlotHistoryInfo *)historyInfo;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an EZPlotHistoryInfo data structure with a default length for the window buffer and a maximum length capacity for the internal circular buffer that holds all the audio data.
+ @param defaultLength An int representing the default length (i.e. the number of points that will be displayed on screen) of the history window.
+ @param maximumLength An int representing the default maximum length that is the absolute maximum amount of values that can be held in the history's circular buffer.
+ @return A pointer to the EZPlotHistoryInfo created. The caller is responsible for freeing this structure using the `freeHistoryInfo` method above.
+ */
++ (EZPlotHistoryInfo *)historyInfoWithDefaultLength:(int)defaultLength
+ maximumLength:(int)maximumLength;
+
+//------------------------------------------------------------------------------
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZAudioUtilities.m b/Pods/EZAudio/EZAudio/EZAudioUtilities.m
new file mode 100644
index 0000000..82402da
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZAudioUtilities.m
@@ -0,0 +1,744 @@
+//
+// EZAudioUtilities.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 6/23/15.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZAudioUtilities.h"
+
+static float const EZAudioUtilitiesFixedNoteA = 440.0f;
+static int const EZAudioUtilitiesFixedNoteAIndex = 9;
+static int const EZAudioUtilitiesFixedNoteAOctave = 4;
+static float const EZAudioUtilitiesEQFrequencyRatio = 1.059463094359f;
+static int const EZAudioUtilitiesNotesLength = 12;
+static NSString * const EZAudioUtilitiesNotes[EZAudioUtilitiesNotesLength] =
+{
+ @"C", @"C#",
+ @"D", @"D#",
+ @"E",
+ @"F", @"F#",
+ @"G", @"G#",
+ @"A", @"A#",
+ @"B"
+};
+
+BOOL __shouldExitOnCheckResultFail = YES;
+
+@implementation EZAudioUtilities
+
+//------------------------------------------------------------------------------
+#pragma mark - Debugging
+//------------------------------------------------------------------------------
+
++ (void)setShouldExitOnCheckResultFail:(BOOL)shouldExitOnCheckResultFail
+{
+ __shouldExitOnCheckResultFail = shouldExitOnCheckResultFail;
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)shouldExitOnCheckResultFail
+{
+ return __shouldExitOnCheckResultFail;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioBufferList Utility
+//------------------------------------------------------------------------------
+
++ (AudioBufferList *)audioBufferListWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+ interleaved:(BOOL)interleaved
+{
+ unsigned nBuffers;
+ unsigned bufferSize;
+ unsigned channelsPerBuffer;
+ if (interleaved)
+ {
+ nBuffers = 1;
+ bufferSize = sizeof(float) * frames * channels;
+ channelsPerBuffer = channels;
+ }
+ else
+ {
+ nBuffers = channels;
+ bufferSize = sizeof(float) * frames;
+ channelsPerBuffer = 1;
+ }
+
+ AudioBufferList *audioBufferList = (AudioBufferList *)malloc(sizeof(AudioBufferList) + sizeof(AudioBuffer) * (channels-1));
+ audioBufferList->mNumberBuffers = nBuffers;
+ for(unsigned i = 0; i < nBuffers; i++)
+ {
+ audioBufferList->mBuffers[i].mNumberChannels = channelsPerBuffer;
+ audioBufferList->mBuffers[i].mDataByteSize = bufferSize;
+ audioBufferList->mBuffers[i].mData = calloc(bufferSize, 1);
+ }
+ return audioBufferList;
+}
+
+//------------------------------------------------------------------------------
+
++ (float **)floatBuffersWithNumberOfFrames:(UInt32)frames
+ numberOfChannels:(UInt32)channels
+{
+ size_t size = sizeof(float *) * channels;
+ float **buffers = (float **)malloc(size);
+ for (int i = 0; i < channels; i++)
+ {
+ size = sizeof(float) * frames;
+ buffers[i] = (float *)malloc(size);
+ }
+ return buffers;
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeBufferList:(AudioBufferList *)bufferList
+{
+ if (bufferList)
+ {
+ if (bufferList->mNumberBuffers)
+ {
+ for( int i = 0; i < bufferList->mNumberBuffers; i++)
+ {
+ if (bufferList->mBuffers[i].mData)
+ {
+ free(bufferList->mBuffers[i].mData);
+ }
+ }
+ }
+ free(bufferList);
+ }
+ bufferList = NULL;
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeFloatBuffers:(float **)buffers numberOfChannels:(UInt32)channels
+{
+ if (!buffers || !*buffers)
+ {
+ return;
+ }
+
+ for (int i = 0; i < channels; i++)
+ {
+ free(buffers[i]);
+ }
+ free(buffers);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - AudioStreamBasicDescription Utility
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)AIFFFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ memset(&asbd, 0, sizeof(asbd));
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFormatFlags = kAudioFormatFlagIsBigEndian|kAudioFormatFlagIsPacked|kAudioFormatFlagIsSignedInteger;
+ asbd.mSampleRate = sampleRate;
+ asbd.mChannelsPerFrame = channels;
+ asbd.mBitsPerChannel = 32;
+ asbd.mBytesPerPacket = (asbd.mBitsPerChannel / 8) * asbd.mChannelsPerFrame;
+ asbd.mFramesPerPacket = 1;
+ asbd.mBytesPerFrame = (asbd.mBitsPerChannel / 8) * asbd.mChannelsPerFrame;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)iLBCFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ memset(&asbd, 0, sizeof(asbd));
+ asbd.mFormatID = kAudioFormatiLBC;
+ asbd.mChannelsPerFrame = 1;
+ asbd.mSampleRate = sampleRate;
+
+ // Fill in the rest of the descriptions using the Audio Format API
+ UInt32 propSize = sizeof(asbd);
+ [EZAudioUtilities checkResult:AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
+ 0,
+ NULL,
+ &propSize,
+ &asbd)
+ operation:"Failed to fill out the rest of the iLBC AudioStreamBasicDescription"];
+
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)floatFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 floatByteSize = sizeof(float);
+ asbd.mBitsPerChannel = 8 * floatByteSize;
+ asbd.mBytesPerFrame = floatByteSize;
+ asbd.mBytesPerPacket = floatByteSize;
+ asbd.mChannelsPerFrame = channels;
+ asbd.mFormatFlags = kAudioFormatFlagIsFloat|kAudioFormatFlagIsNonInterleaved;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFramesPerPacket = 1;
+ asbd.mSampleRate = sampleRate;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)M4AFormatWithNumberOfChannels:(UInt32)channels
+ sampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ memset(&asbd, 0, sizeof(asbd));
+ asbd.mFormatID = kAudioFormatMPEG4AAC;
+ asbd.mChannelsPerFrame = channels;
+ asbd.mSampleRate = sampleRate;
+
+ // Fill in the rest of the descriptions using the Audio Format API
+ UInt32 propSize = sizeof(asbd);
+ [EZAudioUtilities checkResult:AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
+ 0,
+ NULL,
+ &propSize,
+ &asbd)
+ operation:"Failed to fill out the rest of the m4a AudioStreamBasicDescription"];
+
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)monoFloatFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 byteSize = sizeof(float);
+ asbd.mBitsPerChannel = 8 * byteSize;
+ asbd.mBytesPerFrame = byteSize;
+ asbd.mBytesPerPacket = byteSize;
+ asbd.mChannelsPerFrame = 1;
+ asbd.mFormatFlags = kAudioFormatFlagIsPacked|kAudioFormatFlagIsFloat;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFramesPerPacket = 1;
+ asbd.mSampleRate = sampleRate;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)monoCanonicalFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 byteSize = sizeof(float);
+ asbd.mBitsPerChannel = 8 * byteSize;
+ asbd.mBytesPerFrame = byteSize;
+ asbd.mBytesPerPacket = byteSize;
+ asbd.mChannelsPerFrame = 1;
+ asbd.mFormatFlags = kAudioFormatFlagsNativeFloatPacked|kAudioFormatFlagIsNonInterleaved;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFramesPerPacket = 1;
+ asbd.mSampleRate = sampleRate;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoCanonicalNonInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 byteSize = sizeof(float);
+ asbd.mBitsPerChannel = 8 * byteSize;
+ asbd.mBytesPerFrame = byteSize;
+ asbd.mBytesPerPacket = byteSize;
+ asbd.mChannelsPerFrame = 2;
+ asbd.mFormatFlags = kAudioFormatFlagsNativeFloatPacked|kAudioFormatFlagIsNonInterleaved;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFramesPerPacket = 1;
+ asbd.mSampleRate = sampleRate;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoFloatInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 floatByteSize = sizeof(float);
+ asbd.mChannelsPerFrame = 2;
+ asbd.mBitsPerChannel = 8 * floatByteSize;
+ asbd.mBytesPerFrame = asbd.mChannelsPerFrame * floatByteSize;
+ asbd.mFramesPerPacket = 1;
+ asbd.mBytesPerPacket = asbd.mFramesPerPacket * asbd.mBytesPerFrame;
+ asbd.mFormatFlags = kAudioFormatFlagIsFloat;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mSampleRate = sampleRate;
+ asbd.mReserved = 0;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)stereoFloatNonInterleavedFormatWithSampleRate:(float)sampleRate
+{
+ AudioStreamBasicDescription asbd;
+ UInt32 floatByteSize = sizeof(float);
+ asbd.mBitsPerChannel = 8 * floatByteSize;
+ asbd.mBytesPerFrame = floatByteSize;
+ asbd.mChannelsPerFrame = 2;
+ asbd.mFormatFlags = kAudioFormatFlagIsFloat|kAudioFormatFlagIsNonInterleaved;
+ asbd.mFormatID = kAudioFormatLinearPCM;
+ asbd.mFramesPerPacket = 1;
+ asbd.mBytesPerPacket = asbd.mFramesPerPacket * asbd.mBytesPerFrame;
+ asbd.mSampleRate = sampleRate;
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isFloatFormat:(AudioStreamBasicDescription)asbd
+{
+ return asbd.mFormatFlags & kAudioFormatFlagIsFloat;
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isInterleaved:(AudioStreamBasicDescription)asbd
+{
+ return !(asbd.mFormatFlags & kAudioFormatFlagIsNonInterleaved);
+}
+
+//------------------------------------------------------------------------------
+
++ (BOOL)isLinearPCM:(AudioStreamBasicDescription)asbd
+{
+ return asbd.mFormatID == kAudioFormatLinearPCM;
+}
+
+//------------------------------------------------------------------------------
+
++ (void)printASBD:(AudioStreamBasicDescription)asbd
+{
+ char formatIDString[5];
+ UInt32 formatID = CFSwapInt32HostToBig(asbd.mFormatID);
+ bcopy (&formatID, formatIDString, 4);
+ formatIDString[4] = '\0';
+ NSLog (@" Sample Rate: %10.0f", asbd.mSampleRate);
+ NSLog (@" Format ID: %10s", formatIDString);
+ NSLog (@" Format Flags: %10X", (unsigned int)asbd.mFormatFlags);
+ NSLog (@" Bytes per Packet: %10d", (unsigned int)asbd.mBytesPerPacket);
+ NSLog (@" Frames per Packet: %10d", (unsigned int)asbd.mFramesPerPacket);
+ NSLog (@" Bytes per Frame: %10d", (unsigned int)asbd.mBytesPerFrame);
+ NSLog (@" Channels per Frame: %10d", (unsigned int)asbd.mChannelsPerFrame);
+ NSLog (@" Bits per Channel: %10d", (unsigned int)asbd.mBitsPerChannel);
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)displayTimeStringFromSeconds:(NSTimeInterval)seconds
+{
+ int totalSeconds = (int)ceil(seconds);
+ int secondsComponent = totalSeconds % 60;
+ int minutesComponent = (totalSeconds / 60) % 60;
+ return [NSString stringWithFormat:@"%02d:%02d", minutesComponent, secondsComponent];
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)stringForAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd
+{
+ char formatIDString[5];
+ UInt32 formatID = CFSwapInt32HostToBig(asbd.mFormatID);
+ bcopy (&formatID, formatIDString, 4);
+ formatIDString[4] = '\0';
+ return [NSString stringWithFormat:
+ @"\nSample Rate: %10.0f,\n"
+ @"Format ID: %10s,\n"
+ @"Format Flags: %10X,\n"
+ @"Bytes per Packet: %10d,\n"
+ @"Frames per Packet: %10d,\n"
+ @"Bytes per Frame: %10d,\n"
+ @"Channels per Frame: %10d,\n"
+ @"Bits per Channel: %10d,\n"
+ @"IsInterleaved: %i,\n"
+ @"IsFloat: %i,",
+ asbd.mSampleRate,
+ formatIDString,
+ (unsigned int)asbd.mFormatFlags,
+ (unsigned int)asbd.mBytesPerPacket,
+ (unsigned int)asbd.mFramesPerPacket,
+ (unsigned int)asbd.mBytesPerFrame,
+ (unsigned int)asbd.mChannelsPerFrame,
+ (unsigned int)asbd.mBitsPerChannel,
+ [self isInterleaved:asbd],
+ [self isFloatFormat:asbd]];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)setCanonicalAudioStreamBasicDescription:(AudioStreamBasicDescription*)asbd
+ numberOfChannels:(UInt32)nChannels
+ interleaved:(BOOL)interleaved
+{
+
+ asbd->mFormatID = kAudioFormatLinearPCM;
+#if TARGET_OS_IPHONE
+ int sampleSize = sizeof(float);
+ asbd->mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
+#elif TARGET_OS_MAC
+ int sampleSize = sizeof(Float32);
+ asbd->mFormatFlags = kAudioFormatFlagsNativeFloatPacked;
+#endif
+ asbd->mBitsPerChannel = 8 * sampleSize;
+ asbd->mChannelsPerFrame = nChannels;
+ asbd->mFramesPerPacket = 1;
+ if (interleaved)
+ asbd->mBytesPerPacket = asbd->mBytesPerFrame = nChannels * sampleSize;
+ else {
+ asbd->mBytesPerPacket = asbd->mBytesPerFrame = sampleSize;
+ asbd->mFormatFlags |= kAudioFormatFlagIsNonInterleaved;
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Math Utilities
+//------------------------------------------------------------------------------
+
++ (void)appendBufferAndShift:(float*)buffer
+ withBufferSize:(int)bufferLength
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength
+{
+ int shiftLength = scrollHistoryLength - bufferLength;
+ size_t floatByteSize = sizeof(float);
+ size_t shiftByteSize = shiftLength * floatByteSize;
+ size_t bufferByteSize = bufferLength * floatByteSize;
+ memmove(&scrollHistory[0],
+ &scrollHistory[bufferLength],
+ shiftByteSize);
+ memmove(&scrollHistory[shiftLength],
+ &buffer[0],
+ bufferByteSize);
+}
+
+//------------------------------------------------------------------------------
+
++ (void) appendValue:(float)value
+ toScrollHistory:(float*)scrollHistory
+ withScrollHistorySize:(int)scrollHistoryLength
+{
+ float val[1]; val[0] = value;
+ [self appendBufferAndShift:val
+ withBufferSize:1
+ toScrollHistory:scrollHistory
+ withScrollHistorySize:scrollHistoryLength];
+}
+
+//------------------------------------------------------------------------------
+
++ (float)MAP:(float)value
+ leftMin:(float)leftMin
+ leftMax:(float)leftMax
+ rightMin:(float)rightMin
+ rightMax:(float)rightMax
+{
+ float leftSpan = leftMax - leftMin;
+ float rightSpan = rightMax - rightMin;
+ float valueScaled = ( value - leftMin) / leftSpan;
+ return rightMin + (valueScaled * rightSpan);
+}
+
+//------------------------------------------------------------------------------
+
++ (float)RMS:(float *)buffer length:(int)bufferSize
+{
+ float sum = 0.0;
+ for(int i = 0; i < bufferSize; i++)
+ sum += buffer[i] * buffer[i];
+ return sqrtf( sum / bufferSize);
+}
+
+//------------------------------------------------------------------------------
+
++ (float)SGN:(float)value
+{
+ return value < 0 ? -1.0f : ( value > 0 ? 1.0f : 0.0f);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Music Utilities
+//------------------------------------------------------------------------------
+
++ (NSString *)noteNameStringForFrequency:(float)frequency
+ includeOctave:(BOOL)includeOctave
+{
+ NSMutableString *noteName = [NSMutableString string];
+ int halfStepsFromFixedNote = roundf(log(frequency / EZAudioUtilitiesFixedNoteA) / log(EZAudioUtilitiesEQFrequencyRatio));
+ int halfStepsModOctaves = halfStepsFromFixedNote % EZAudioUtilitiesNotesLength;
+ int indexOfNote = EZAudioUtilitiesFixedNoteAIndex + halfStepsModOctaves;
+ float octaves = halfStepsFromFixedNote / EZAudioUtilitiesNotesLength;
+ if (indexOfNote >= EZAudioUtilitiesNotesLength)
+ {
+ indexOfNote -= EZAudioUtilitiesNotesLength;
+ octaves += 1;
+ }
+ else if (indexOfNote < 0)
+ {
+ indexOfNote += EZAudioUtilitiesNotesLength;
+ octaves = -1;
+ }
+ [noteName appendString:EZAudioUtilitiesNotes[indexOfNote]];
+ if (includeOctave)
+ {
+ int noteOctave = EZAudioUtilitiesFixedNoteAOctave + octaves;
+ [noteName appendFormat:@"%i", noteOctave];
+ }
+ return noteName;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - OSStatus Utility
+//------------------------------------------------------------------------------
+
++ (void)checkResult:(OSStatus)result operation:(const char *)operation
+{
+ if (result == noErr) return;
+ char errorString[20];
+ // see if it appears to be a 4-char-code
+ *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(result);
+ if (isprint(errorString[1]) && isprint(errorString[2]) && isprint(errorString[3]) && isprint(errorString[4]))
+ {
+ errorString[0] = errorString[5] = '\'';
+ errorString[6] = '\0';
+ } else
+ // no, format it as an integer
+ sprintf(errorString, "%d", (int)result);
+ fprintf(stderr, "Error: %s (%s)\n", operation, errorString);
+ if (__shouldExitOnCheckResultFail)
+ {
+ exit(-1);
+ }
+}
+
+//------------------------------------------------------------------------------
+
++ (NSString *)stringFromUInt32Code:(UInt32)code
+{
+ char errorString[20];
+ // see if it appears to be a 4-char-code
+ *(UInt32 *)(errorString + 1) = CFSwapInt32HostToBig(code);
+ if (isprint(errorString[1]) &&
+ isprint(errorString[2]) &&
+ isprint(errorString[3]) &&
+ isprint(errorString[4]))
+ {
+ errorString[0] = errorString[5] = '\'';
+ errorString[6] = '\0';
+ }
+ return [NSString stringWithUTF8String:errorString];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Plot Utility
+//------------------------------------------------------------------------------
+
++ (void)updateScrollHistory:(float **)scrollHistory
+ withLength:(int)scrollHistoryLength
+ atIndex:(int *)index
+ withBuffer:(float *)buffer
+ withBufferSize:(int)bufferSize
+ isResolutionChanging:(BOOL *)isChanging
+{
+ //
+ size_t floatByteSize = sizeof(float);
+ if(*scrollHistory == NULL)
+ {
+ // Create the history buffer
+ *scrollHistory = (float *)calloc(8192, floatByteSize);
+ }
+
+ //
+ if(!*isChanging)
+ {
+ float rms = [EZAudioUtilities RMS:buffer length:bufferSize];
+ if(*index < scrollHistoryLength)
+ {
+ float *hist = *scrollHistory;
+ hist[*index] = rms;
+ (*index)++;
+ }
+ else
+ {
+ [EZAudioUtilities appendValue:rms
+ toScrollHistory:*scrollHistory
+ withScrollHistorySize:scrollHistoryLength];
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Color Utility
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Color Utility
+///-----------------------------------------------------------
+
++ (void)getColorComponentsFromCGColor:(CGColorRef)color
+ red:(CGFloat *)red
+ green:(CGFloat *)green
+ blue:(CGFloat *)blue
+ alpha:(CGFloat *)alpha
+{
+ size_t componentCount = CGColorGetNumberOfComponents(color);
+ if (componentCount == 4)
+ {
+ const CGFloat *components = CGColorGetComponents(color);
+ *red = components[0];
+ *green = components[1];
+ *blue = components[2];
+ *alpha = components[3];
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - TPCircularBuffer Utility
+//------------------------------------------------------------------------------
+
++ (void)appendDataToCircularBuffer:(TPCircularBuffer *)circularBuffer
+ fromAudioBufferList:(AudioBufferList *)audioBufferList
+{
+ TPCircularBufferProduceBytes(circularBuffer,
+ audioBufferList->mBuffers[0].mData,
+ audioBufferList->mBuffers[0].mDataByteSize);
+}
+
+//------------------------------------------------------------------------------
+
++ (void)circularBuffer:(TPCircularBuffer *)circularBuffer withSize:(int)size
+{
+ TPCircularBufferInit(circularBuffer, size);
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeCircularBuffer:(TPCircularBuffer *)circularBuffer
+{
+ TPCircularBufferClear(circularBuffer);
+ TPCircularBufferCleanup(circularBuffer);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZPlotHistoryInfo Utility
+//------------------------------------------------------------------------------
+
++ (void)appendBufferRMS:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+ toHistoryInfo:(EZPlotHistoryInfo *)historyInfo
+{
+ //
+ // Calculate RMS and append to buffer
+ //
+ float rms = [EZAudioUtilities RMS:buffer length:bufferSize];
+ float src[1];
+ src[0] = isnan(rms) ? 0.0 : rms;
+ [self appendBuffer:src withBufferSize:1 toHistoryInfo:historyInfo];
+}
+
+//------------------------------------------------------------------------------
+
++ (void)appendBuffer:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+ toHistoryInfo:(EZPlotHistoryInfo *)historyInfo
+{
+ //
+ // Do nothing if there is no buffer
+ //
+ if (bufferSize == 0)
+ {
+ return;
+ }
+
+ //
+ // Update the scroll history datasource
+ //
+ TPCircularBufferProduceBytes(&historyInfo->circularBuffer, buffer, bufferSize * sizeof(float));
+ int32_t targetBytes = historyInfo->bufferSize * sizeof(float);
+ int32_t availableBytes = 0;
+ float *historyBuffer = TPCircularBufferTail(&historyInfo->circularBuffer, &availableBytes);
+ int32_t bytes = MIN(targetBytes, availableBytes);
+ memmove(historyInfo->buffer, historyBuffer, bytes);
+ if (targetBytes <= availableBytes)
+ {
+ TPCircularBufferConsume(&historyInfo->circularBuffer, availableBytes - targetBytes);
+ }
+}
+
+//------------------------------------------------------------------------------
+
++ (void)clearHistoryInfo:(EZPlotHistoryInfo *)historyInfo
+{
+ memset(historyInfo->buffer, 0, historyInfo->bufferSize * sizeof(float));
+ TPCircularBufferClear(&historyInfo->circularBuffer);
+}
+
+//------------------------------------------------------------------------------
+
++ (void)freeHistoryInfo:(EZPlotHistoryInfo *)historyInfo
+{
+ free(historyInfo->buffer);
+ free(historyInfo);
+ TPCircularBufferCleanup(&historyInfo->circularBuffer);
+}
+
+//------------------------------------------------------------------------------
+
++ (EZPlotHistoryInfo *)historyInfoWithDefaultLength:(int)defaultLength
+ maximumLength:(int)maximumLength
+{
+ //
+ // Setup buffers
+ //
+ EZPlotHistoryInfo *historyInfo = (EZPlotHistoryInfo *)malloc(sizeof(EZPlotHistoryInfo));
+ historyInfo->bufferSize = defaultLength;
+ historyInfo->buffer = calloc(maximumLength, sizeof(float));
+ TPCircularBufferInit(&historyInfo->circularBuffer, maximumLength);
+
+ //
+ // Zero out circular buffer
+ //
+ float emptyBuffer[maximumLength];
+ memset(emptyBuffer, 0, sizeof(emptyBuffer));
+ TPCircularBufferProduceBytes(&historyInfo->circularBuffer,
+ emptyBuffer,
+ (int32_t)sizeof(emptyBuffer));
+
+ return historyInfo;
+}
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZMicrophone.h b/Pods/EZAudio/EZAudio/EZMicrophone.h
new file mode 100644
index 0000000..ddac53e
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZMicrophone.h
@@ -0,0 +1,381 @@
+//
+// EZMicrophone.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 9/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+#import "TargetConditionals.h"
+#import "EZAudioDevice.h"
+#import "EZOutput.h"
+
+@class EZMicrophone;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZMicrophoneDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZMicrophoneDelegate for the EZMicrophone provides a receiver for the incoming audio data events. When the microphone has been successfully internally configured it will try to send its delegate an AudioStreamBasicDescription describing the format of the incoming audio data.
+
+ The audio data itself is sent back to the delegate in various forms:
+
+ -`microphone:hasAudioReceived:withBufferSize:withNumberOfChannels:`
+ Provides float arrays instead of the AudioBufferList structure to hold the audio data. There could be a number of float arrays depending on the number of channels (see the function description below). These are useful for doing any visualizations that would like to make use of the raw audio data.
+
+ -`microphone:hasBufferList:withBufferSize:withNumberOfChannels:`
+ Provides the AudioBufferList structures holding the audio data. These are the native structures Core Audio uses to hold the buffer information and useful for piping out directly to an output (see EZOutput).
+
+ */
+@protocol EZMicrophoneDelegate
+
+@optional
+///-----------------------------------------------------------
+/// @name Audio Data Description
+///-----------------------------------------------------------
+
+/**
+ Called anytime the EZMicrophone starts or stops.
+ @param output The instance of the EZMicrophone that triggered the event.
+ @param isPlaying A BOOL indicating whether the EZMicrophone instance is playing or not.
+ */
+- (void)microphone:(EZMicrophone *)microphone changedPlayingState:(BOOL)isPlaying;
+
+//------------------------------------------------------------------------------
+
+/**
+ Called anytime the input device changes on an `EZMicrophone` instance.
+ @param microphone The instance of the EZMicrophone that triggered the event.
+ @param device The instance of the new EZAudioDevice the microphone is using to pull input.
+ */
+- (void)microphone:(EZMicrophone *)microphone changedDevice:(EZAudioDevice *)device;
+
+//------------------------------------------------------------------------------
+
+/**
+ Returns back the audio stream basic description as soon as it has been initialized. This is guaranteed to occur before the stream callbacks, `microphone:hasBufferList:withBufferSize:withNumberOfChannels:` or `microphone:hasAudioReceived:withBufferSize:withNumberOfChannels:`
+ @param microphone The instance of the EZMicrophone that triggered the event.
+ @param audioStreamBasicDescription The AudioStreamBasicDescription that was created for the microphone instance.
+ */
+- (void) microphone:(EZMicrophone *)microphone
+ hasAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription;
+
+///-----------------------------------------------------------
+/// @name Audio Data Callbacks
+///-----------------------------------------------------------
+
+/**
+ This method provides an array of float arrays of the audio received, each float array representing a channel of audio data This occurs on the background thread so any drawing code must explicity perform its functions on the main thread.
+ @param microphone The instance of the EZMicrophone that triggered the event.
+ @param buffer The audio data as an array of float arrays. In a stereo signal buffer[0] represents the left channel while buffer[1] would represent the right channel.
+ @param bufferSize The size of each of the buffers (the length of each float array).
+ @param numberOfChannels The number of channels for the incoming audio.
+ @warning This function executes on a background thread to avoid blocking any audio operations. If operations should be performed on any other thread (like the main thread) it should be performed within a dispatch block like so: dispatch_async(dispatch_get_main_queue(), ^{ ...Your Code... })
+ */
+- (void) microphone:(EZMicrophone *)microphone
+ hasAudioReceived:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels;
+
+//------------------------------------------------------------------------------
+
+/**
+ Returns back the buffer list containing the audio received. This occurs on the background thread so any drawing code must explicity perform its functions on the main thread.
+ @param microphone The instance of the EZMicrophone that triggered the event.
+ @param bufferList The AudioBufferList holding the audio data.
+ @param bufferSize The size of each of the buffers of the AudioBufferList.
+ @param numberOfChannels The number of channels for the incoming audio.
+ @warning This function executes on a background thread to avoid blocking any audio operations. If operations should be performed on any other thread (like the main thread) it should be performed within a dispatch block like so: dispatch_async(dispatch_get_main_queue(), ^{ ...Your Code... })
+ */
+- (void) microphone:(EZMicrophone *)microphone
+ hasBufferList:(AudioBufferList *)bufferList
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZMicrophone
+//------------------------------------------------------------------------------
+
+/**
+ The EZMicrophone provides a component to get audio data from the default device microphone. On OSX this is the default selected input device in the system preferences while on iOS this defaults to use the default RemoteIO audio unit. The microphone data is converted to a float buffer array and returned back to the caller via the EZMicrophoneDelegate protocol.
+ */
+@interface EZMicrophone : NSObject
+
+//------------------------------------------------------------------------------
+
+/**
+ The EZMicrophoneDelegate for which to handle the microphone callbacks
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ The EZAudioDevice being used to pull the microphone data.
+ - On iOS this can be any of the available microphones on the iPhone/iPad devices (usually there are 3). Defaults to the first microphone found (bottom mic)
+ - On OSX this can be any of the plugged in devices that Core Audio can detect (see kAudioUnitSubType_HALOutput for more information)
+ System Preferences -> Sound for the available inputs)
+ */
+@property (nonatomic, strong) EZAudioDevice *device;
+
+//------------------------------------------------------------------------------
+
+/**
+ A BOOL describing whether the microphone is on and passing back audio data to its delegate.
+ */
+@property (nonatomic, assign) BOOL microphoneOn;
+
+//------------------------------------------------------------------------------
+
+/**
+ An EZOutput to use for porting the microphone input out (passthrough).
+ */
+@property (nonatomic, strong) EZOutput *output;
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback. This will not start fetching the audio until startFetchingAudio has been called. Use initWithMicrophoneDelegate:startsImmediately: to instantiate this class and immediately start fetching audio data.
+ @param delegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
+- (EZMicrophone *)initWithMicrophoneDelegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a custom AudioStreamBasicDescription and provides the caller to specify a delegate to respond to the audioReceived callback. This will not start fetching the audio until startFetchingAudio has been called. Use initWithMicrophoneDelegate:startsImmediately: to instantiate this class and immediately start fetching audio data.
+ @param microphoneDelegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param audioStreamBasicDescription A custom AudioStreamBasicFormat for the microphone input.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
+-(EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback and allows the caller to specify whether they'd immediately like to start fetching the audio data.
+ @param delegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param startsImmediately A boolean indicating whether to start fetching the data immediately. IF YES, the delegate's audioReceived callback will immediately start getting called.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
+- (EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ startsImmediately:(BOOL)startsImmediately;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a custom AudioStreamBasicDescription and provides the caller with a delegate to respond to the audioReceived callback and allows the caller to specify whether they'd immediately like to start fetching the audio data.
+ @param delegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param audioStreamBasicDescription A custom AudioStreamBasicFormat for the microphone input.
+ @param startsImmediately A boolean indicating whether to start fetching the data immediately. IF YES, the delegate's audioReceived callback will immediately start getting called.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
+- (EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+ startsImmediately:(BOOL)startsImmediately;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback. This will not start fetching the audio until startFetchingAudio has been called. Use microphoneWithDelegate:startsImmediately: to instantiate this class and immediately start fetching audio data.
+ @param delegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @return An instance of the EZMicrophone class. This should be declared as a strong property!
+ */
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback. This will not start fetching the audio until startFetchingAudio has been called. Use microphoneWithDelegate:startsImmediately: to instantiate this class and immediately start fetching audio data.
+ @param delegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param audioStreamBasicDescription A custom AudioStreamBasicFormat for the microphone input.
+ @return An instance of the EZMicrophone class. This should be declared as a strong property!
+ */
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback and allows the caller to specify whether they'd immediately like to start fetching the audio data.
+
+ @param microphoneDelegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param startsImmediately A boolean indicating whether to start fetching the data immediately. IF YES, the delegate's audioReceived callback will immediately start getting called.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ startsImmediately:(BOOL)startsImmediately;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZMicrophone with a delegate to respond to the audioReceived callback and allows the caller to specify whether they'd immediately like to start fetching the audio data.
+
+ @param microphoneDelegate A EZMicrophoneDelegate delegate that will receive the audioReceived callback.
+ @param audioStreamBasicDescription A custom AudioStreamBasicFormat for the microphone input.
+ @param startsImmediately A boolean indicating whether to start fetching the data immediately. IF YES, the delegate's audioReceived callback will immediately start getting called.
+ @return An instance of the EZMicrophone class. This should be strongly retained.
+ */
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+ startsImmediately:(BOOL)startsImmediately;
+
+//------------------------------------------------------------------------------
+#pragma mark - Shared Instance
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Shared Instance
+///-----------------------------------------------------------
+
+/**
+ A shared instance of the microphone component. Most applications will only need to use one instance of the microphone component across multiple views. Make sure to call the `startFetchingAudio` method to receive the audio data in the microphone delegate.
+ @return A shared instance of the `EZAudioMicrophone` component.
+ */
++ (EZMicrophone *)sharedMicrophone;
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Starting/Stopping The Microphone
+///-----------------------------------------------------------
+
+/**
+ Starts fetching audio from the default microphone. Will notify delegate with audioReceived callback.
+ */
+- (void)startFetchingAudio;
+
+//------------------------------------------------------------------------------
+
+/**
+ Stops fetching audio. Will stop notifying the delegate's audioReceived callback.
+ */
+- (void)stopFetchingAudio;
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Getting The Microphone's Audio Format
+///-----------------------------------------------------------
+
+/**
+ Provides the AudioStreamBasicDescription structure containing the format of the microphone's audio.
+ @return An AudioStreamBasicDescription structure describing the format of the microphone's audio.
+ */
+- (AudioStreamBasicDescription)audioStreamBasicDescription;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the underlying Audio Unit that is being used to fetch the audio.
+ @return The AudioUnit used for the microphone
+ */
+- (AudioUnit *)audioUnit;
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Customizing The Microphone Stream Format
+///-----------------------------------------------------------
+
+/**
+ Sets the AudioStreamBasicDescription on the microphone input. Must be linear PCM and must be the same sample rate as the stream format coming in (check the current `audioStreamBasicDescription` before setting).
+ @warning Do not set this while fetching audio (startFetchingAudio)
+ @param asbd The new AudioStreamBasicDescription to use in place of the current audio format description.
+ */
+- (void)setAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd;
+
+///-----------------------------------------------------------
+/// @name Setting The Microphone's Hardware Device
+///-----------------------------------------------------------
+
+/**
+ Sets the EZAudioDevice being used to pull the microphone data.
+ - On iOS this can be any of the available microphones on the iPhone/iPad devices (usually there are 3). Defaults to the first microphone found (bottom mic)
+ - On OSX this can be any of the plugged in devices that Core Audio can detect (see kAudioUnitSubType_HALOutput for more information)
+ System Preferences -> Sound for the available inputs)
+ @param device An EZAudioDevice instance that should be used to fetch the microphone data.
+ */
+- (void)setDevice:(EZAudioDevice *)device;
+
+//------------------------------------------------------------------------------
+#pragma mark - Direct Output
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Setting The Microphone's Output (Direct Out)
+///-----------------------------------------------------------
+
+/**
+ When set this will pipe out the contents of the microphone into an EZOutput. This is known as a passthrough or direct out that will simply pipe the microphone input to an output.
+ @param output An EZOutput instance that the microphone will use to output its audio data to the speaker.
+ */
+- (void)setOutput:(EZOutput *)output;
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass Methods
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Subclass
+///-----------------------------------------------------------
+
+/**
+ The default AudioStreamBasicDescription set as the stream format of the microphone if no custom description is set. Defaults to a non-interleaved float format with the number of channels specified by the `numberOfChannels` method.
+ @return An AudioStreamBasicDescription that will be used as the default stream format.
+ */
+- (AudioStreamBasicDescription)defaultStreamFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ The number of channels the input microphone is expected to have. Defaults to 1 (assumes microphone is mono).
+ @return A UInt32 representing the number of channels expected for the microphone.
+ */
+- (UInt32)numberOfChannels;
+
+//------------------------------------------------------------------------------
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZMicrophone.m b/Pods/EZAudio/EZAudio/EZMicrophone.m
new file mode 100644
index 0000000..dfed3dd
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZMicrophone.m
@@ -0,0 +1,637 @@
+//
+// EZMicrophone.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 9/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZMicrophone.h"
+#import "EZAudioFloatConverter.h"
+#import "EZAudioUtilities.h"
+#import "EZAudioDevice.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct EZMicrophoneInfo
+{
+ AudioUnit audioUnit;
+ AudioBufferList *audioBufferList;
+ float **floatData;
+ AudioStreamBasicDescription inputFormat;
+ AudioStreamBasicDescription streamFormat;
+} EZMicrophoneInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - Callbacks
+//------------------------------------------------------------------------------
+
+static OSStatus EZAudioMicrophoneCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData);
+
+//------------------------------------------------------------------------------
+#pragma mark - EZMicrophone (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZMicrophone ()
+@property (nonatomic, strong) EZAudioFloatConverter *floatConverter;
+@property (nonatomic, assign) EZMicrophoneInfo *info;
+@end
+
+@implementation EZMicrophone
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ [[NSNotificationCenter defaultCenter] removeObserver:self];
+ [EZAudioUtilities checkResult:AudioUnitUninitialize(self.info->audioUnit)
+ operation:"Failed to unintialize audio unit for microphone"];
+ [EZAudioUtilities freeBufferList:self.info->audioBufferList];
+ [EZAudioUtilities freeFloatBuffers:self.info->floatData
+ numberOfChannels:self.info->streamFormat.mChannelsPerFrame];
+ free(self.info);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (id)init
+{
+ self = [super init];
+ if(self)
+ {
+ self.info = (EZMicrophoneInfo *)malloc(sizeof(EZMicrophoneInfo));
+ memset(self.info, 0, sizeof(EZMicrophoneInfo));
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+{
+ self = [super init];
+ if(self)
+ {
+ self.info = (EZMicrophoneInfo *)malloc(sizeof(EZMicrophoneInfo));
+ memset(self.info, 0, sizeof(EZMicrophoneInfo));
+ _delegate = delegate;
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+-(EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+{
+ self = [self initWithMicrophoneDelegate:delegate];
+ if(self)
+ {
+ [self setAudioStreamBasicDescription:audioStreamBasicDescription];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ startsImmediately:(BOOL)startsImmediately
+{
+ self = [self initWithMicrophoneDelegate:delegate];
+ if(self)
+ {
+ startsImmediately ? [self startFetchingAudio] : -1;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+-(EZMicrophone *)initWithMicrophoneDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+ startsImmediately:(BOOL)startsImmediately
+{
+ self = [self initWithMicrophoneDelegate:delegate
+ withAudioStreamBasicDescription:audioStreamBasicDescription];
+ if(self)
+ {
+ startsImmediately ? [self startFetchingAudio] : -1;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+{
+ return [[EZMicrophone alloc] initWithMicrophoneDelegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+{
+ return [[EZMicrophone alloc] initWithMicrophoneDelegate:delegate
+ withAudioStreamBasicDescription:audioStreamBasicDescription];
+}
+
+//------------------------------------------------------------------------------
+
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ startsImmediately:(BOOL)startsImmediately
+{
+ return [[EZMicrophone alloc] initWithMicrophoneDelegate:delegate
+ startsImmediately:startsImmediately];
+}
+
+//------------------------------------------------------------------------------
+
++ (EZMicrophone *)microphoneWithDelegate:(id)delegate
+ withAudioStreamBasicDescription:(AudioStreamBasicDescription)audioStreamBasicDescription
+ startsImmediately:(BOOL)startsImmediately
+{
+ return [[EZMicrophone alloc] initWithMicrophoneDelegate:delegate
+ withAudioStreamBasicDescription:audioStreamBasicDescription
+ startsImmediately:startsImmediately];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Singleton
+//------------------------------------------------------------------------------
+
++ (EZMicrophone *)sharedMicrophone
+{
+ static EZMicrophone *_sharedMicrophone = nil;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^{
+ _sharedMicrophone = [[EZMicrophone alloc] init];
+ });
+ return _sharedMicrophone;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ // Create an input component description for mic input
+ AudioComponentDescription inputComponentDescription;
+ inputComponentDescription.componentType = kAudioUnitType_Output;
+ inputComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
+#if TARGET_OS_IPHONE
+ inputComponentDescription.componentSubType = kAudioUnitSubType_RemoteIO;
+#elif TARGET_OS_MAC
+ inputComponentDescription.componentSubType = kAudioUnitSubType_HALOutput;
+#endif
+
+ // get the first matching component
+ AudioComponent inputComponent = AudioComponentFindNext( NULL , &inputComponentDescription);
+ NSAssert(inputComponent, @"Couldn't get input component unit!");
+
+ // create new instance of component
+ [EZAudioUtilities checkResult:AudioComponentInstanceNew(inputComponent, &self.info->audioUnit)
+ operation:"Failed to get audio component instance"];
+
+#if TARGET_OS_IPHONE
+ // must enable input scope for remote IO unit
+ UInt32 flag = 1;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input,
+ 1,
+ &flag,
+ sizeof(flag))
+ operation:"Couldn't enable input on remote IO unit."];
+#endif
+ [self setDevice:[EZAudioDevice currentInputDevice]];
+
+ UInt32 propSize = sizeof(self.info->inputFormat);
+ [EZAudioUtilities checkResult:AudioUnitGetProperty(self.info->audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input,
+ 1,
+ &self.info->inputFormat,
+ &propSize)
+ operation:"Failed to get stream format of microphone input scope"];
+#if TARGET_OS_IPHONE
+ self.info->inputFormat.mSampleRate = [[AVAudioSession sharedInstance] sampleRate];
+ NSAssert(self.info->inputFormat.mSampleRate, @"Expected AVAudioSession sample rate to be greater than 0.0. Did you setup the audio session?");
+#elif TARGET_OS_MAC
+#endif
+ [self setAudioStreamBasicDescription:[self defaultStreamFormat]];
+
+ // render callback
+ AURenderCallbackStruct renderCallbackStruct;
+ renderCallbackStruct.inputProc = EZAudioMicrophoneCallback;
+ renderCallbackStruct.inputProcRefCon = (__bridge void *)(self);
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioOutputUnitProperty_SetInputCallback,
+ kAudioUnitScope_Global,
+ 1,
+ &renderCallbackStruct,
+ sizeof(renderCallbackStruct))
+ operation:"Failed to set render callback"];
+
+ [EZAudioUtilities checkResult:AudioUnitInitialize(self.info->audioUnit)
+ operation:"Failed to initialize input unit"];
+
+ // setup notifications
+ [self setupNotifications];
+}
+
+- (void)setupNotifications
+{
+#if TARGET_OS_IPHONE
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(microphoneWasInterrupted:)
+ name:AVAudioSessionInterruptionNotification
+ object:nil];
+ [[NSNotificationCenter defaultCenter] addObserver:self
+ selector:@selector(microphoneRouteChanged:)
+ name:AVAudioSessionRouteChangeNotification
+ object:nil];
+#elif TARGET_OS_MAC
+#endif
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Notifications
+//------------------------------------------------------------------------------
+
+#if TARGET_OS_IPHONE
+
+- (void)microphoneWasInterrupted:(NSNotification *)notification
+{
+ AVAudioSessionInterruptionType type = [notification.userInfo[AVAudioSessionInterruptionTypeKey] unsignedIntegerValue];
+ switch (type)
+ {
+ case AVAudioSessionInterruptionTypeBegan:
+ {
+ [self stopFetchingAudio];
+ break;
+ }
+ case AVAudioSessionInterruptionTypeEnded:
+ {
+ AVAudioSessionInterruptionOptions option = [notification.userInfo[AVAudioSessionInterruptionOptionKey] unsignedIntegerValue];
+ if (option == AVAudioSessionInterruptionOptionShouldResume)
+ {
+ [self startFetchingAudio];
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)microphoneRouteChanged:(NSNotification *)notification
+{
+ EZAudioDevice *device = [EZAudioDevice currentInputDevice];
+ [self setDevice:device];
+}
+
+#elif TARGET_OS_MAC
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+-(void)startFetchingAudio
+{
+ //
+ // Start output unit
+ //
+ [EZAudioUtilities checkResult:AudioOutputUnitStart(self.info->audioUnit)
+ operation:"Failed to start microphone audio unit"];
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(microphone:changedPlayingState:)])
+ {
+ [self.delegate microphone:self changedPlayingState:YES];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+-(void)stopFetchingAudio
+{
+ //
+ // Stop output unit
+ //
+ [EZAudioUtilities checkResult:AudioOutputUnitStop(self.info->audioUnit)
+ operation:"Failed to stop microphone audio unit"];
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(microphone:changedPlayingState:)])
+ {
+ [self.delegate microphone:self changedPlayingState:NO];
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+-(AudioStreamBasicDescription)audioStreamBasicDescription
+{
+ return self.info->streamFormat;
+}
+
+//------------------------------------------------------------------------------
+
+-(AudioUnit *)audioUnit
+{
+ return &self.info->audioUnit;
+}
+
+//------------------------------------------------------------------------------
+
+- (UInt32)maximumBufferSize
+{
+ UInt32 maximumBufferSize;
+ UInt32 propSize = sizeof(maximumBufferSize);
+ [EZAudioUtilities checkResult:AudioUnitGetProperty(self.info->audioUnit,
+ kAudioUnitProperty_MaximumFramesPerSlice,
+ kAudioUnitScope_Global,
+ 0,
+ &maximumBufferSize,
+ &propSize)
+ operation:"Failed to get maximum number of frames per slice"];
+ return maximumBufferSize;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setMicrophoneOn:(BOOL)microphoneOn
+{
+ _microphoneOn = microphoneOn;
+ if (microphoneOn)
+ {
+ [self startFetchingAudio];
+ }
+ else {
+ [self stopFetchingAudio];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setAudioStreamBasicDescription:(AudioStreamBasicDescription)asbd
+{
+ if (self.floatConverter)
+ {
+ [EZAudioUtilities freeBufferList:self.info->audioBufferList];
+ [EZAudioUtilities freeFloatBuffers:self.info->floatData
+ numberOfChannels:self.info->streamFormat.mChannelsPerFrame];
+ }
+
+ // set new stream format
+ self.info->streamFormat = asbd;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input,
+ 0,
+ &asbd,
+ sizeof(asbd))
+ operation:"Failed to set stream format on input scope"];
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output,
+ 1,
+ &asbd,
+ sizeof(asbd))
+ operation:"Failed to set stream format on output scope"];
+
+ // allocate float buffers
+ UInt32 maximumBufferSize = [self maximumBufferSize];
+ BOOL isInterleaved = [EZAudioUtilities isInterleaved:asbd];
+ UInt32 channels = asbd.mChannelsPerFrame;
+ self.floatConverter = [[EZAudioFloatConverter alloc] initWithInputFormat:asbd];
+ self.info->floatData = [EZAudioUtilities floatBuffersWithNumberOfFrames:maximumBufferSize
+ numberOfChannels:channels];
+ self.info->audioBufferList = [EZAudioUtilities audioBufferListWithNumberOfFrames:maximumBufferSize
+ numberOfChannels:channels
+ interleaved:isInterleaved];
+
+ // notify delegate
+ if ([self.delegate respondsToSelector:@selector(microphone:hasAudioStreamBasicDescription:)])
+ {
+ [self.delegate microphone:self hasAudioStreamBasicDescription:asbd];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setDevice:(EZAudioDevice *)device
+{
+#if TARGET_OS_IPHONE
+
+ // if the devices are equal then ignore
+ if ([device isEqual:self.device])
+ {
+ return;
+ }
+
+ NSError *error;
+ [[AVAudioSession sharedInstance] setPreferredInput:device.port error:&error];
+ if (error)
+ {
+ NSLog(@"Error setting input device port (%@), reason: %@",
+ device.port,
+ error.localizedDescription);
+ }
+ else
+ {
+ if (device.dataSource)
+ {
+ [[AVAudioSession sharedInstance] setInputDataSource:device.dataSource error:&error];
+ if (error)
+ {
+ NSLog(@"Error setting input data source (%@), reason: %@",
+ device.dataSource,
+ error.localizedDescription);
+ }
+ }
+ }
+
+#elif TARGET_OS_MAC
+ UInt32 inputEnabled = device.inputChannelCount > 0;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input,
+ 1,
+ &inputEnabled,
+ sizeof(inputEnabled))
+ operation:"Failed to set flag on device input"];
+
+ UInt32 outputEnabled = device.outputChannelCount > 0;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Output,
+ 0,
+ &outputEnabled,
+ sizeof(outputEnabled))
+ operation:"Failed to set flag on device output"];
+
+ AudioDeviceID deviceId = device.deviceID;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->audioUnit,
+ kAudioOutputUnitProperty_CurrentDevice,
+ kAudioUnitScope_Global,
+ 0,
+ &deviceId,
+ sizeof(AudioDeviceID))
+ operation:"Couldn't set default device on I/O unit"];
+#endif
+
+ // store device
+ _device = device;
+
+ // notify delegate
+ if ([self.delegate respondsToSelector:@selector(microphone:changedDevice:)])
+ {
+ [self.delegate microphone:self changedDevice:device];
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Output
+//------------------------------------------------------------------------------
+
+- (void)setOutput:(EZOutput *)output
+{
+ _output = output;
+ _output.inputFormat = self.audioStreamBasicDescription;
+ _output.dataSource = self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutputDataSource
+//------------------------------------------------------------------------------
+
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp
+{
+ memcpy(audioBufferList,
+ self.info->audioBufferList,
+ sizeof(AudioBufferList) + (self.info->audioBufferList->mNumberBuffers - 1)*sizeof(AudioBuffer));
+ return noErr;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)defaultStreamFormat
+{
+ return [EZAudioUtilities floatFormatWithNumberOfChannels:[self numberOfChannels]
+ sampleRate:self.info->inputFormat.mSampleRate];
+}
+
+//------------------------------------------------------------------------------
+
+- (UInt32)numberOfChannels
+{
+#if TARGET_OS_IPHONE
+ return 1;
+#elif TARGET_OS_MAC
+ return (UInt32)self.device.inputChannelCount;
+#endif
+}
+
+//------------------------------------------------------------------------------
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - Callbacks
+//------------------------------------------------------------------------------
+
+static OSStatus EZAudioMicrophoneCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData)
+{
+ EZMicrophone *microphone = (__bridge EZMicrophone *)inRefCon;
+ EZMicrophoneInfo *info = (EZMicrophoneInfo *)microphone.info;
+
+ // render audio into buffer
+ OSStatus result = AudioUnitRender(info->audioUnit,
+ ioActionFlags,
+ inTimeStamp,
+ inBusNumber,
+ inNumberFrames,
+ info->audioBufferList);
+
+ // notify delegate of new buffer list to process
+ if ([microphone.delegate respondsToSelector:@selector(microphone:hasBufferList:withBufferSize:withNumberOfChannels:)])
+ {
+ [microphone.delegate microphone:microphone
+ hasBufferList:info->audioBufferList
+ withBufferSize:inNumberFrames
+ withNumberOfChannels:info->streamFormat.mChannelsPerFrame];
+ }
+
+ // notify delegate of new float data processed
+ if ([microphone.delegate respondsToSelector:@selector(microphone:hasAudioReceived:withBufferSize:withNumberOfChannels:)])
+ {
+ // convert to float
+ [microphone.floatConverter convertDataFromAudioBufferList:info->audioBufferList
+ withNumberOfFrames:inNumberFrames
+ toFloatBuffers:info->floatData];
+ [microphone.delegate microphone:microphone
+ hasAudioReceived:info->floatData
+ withBufferSize:inNumberFrames
+ withNumberOfChannels:info->streamFormat.mChannelsPerFrame];
+ }
+
+ return result;
+}
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZOutput.h b/Pods/EZAudio/EZAudio/EZOutput.h
new file mode 100644
index 0000000..701894e
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZOutput.h
@@ -0,0 +1,376 @@
+//
+// EZOutput.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+#if TARGET_OS_IPHONE
+#elif TARGET_OS_MAC
+#import
+#endif
+
+@class EZAudioDevice;
+@class EZOutput;
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+FOUNDATION_EXPORT UInt32 const EZOutputMaximumFramesPerSlice;
+FOUNDATION_EXPORT Float64 const EZOutputDefaultSampleRate;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutputDataSource
+//------------------------------------------------------------------------------
+
+/**
+ The EZOutputDataSource specifies a receiver to provide audio data when the EZOutput is started. Since the 0.4.0 release this has been simplified to only one data source method.
+ */
+@protocol EZOutputDataSource
+
+@optional
+///-----------------------------------------------------------
+/// @name Providing Audio Data
+///-----------------------------------------------------------
+
+@required
+
+/**
+ Provides a way to provide output with data anytime the EZOutput needs audio data to play. This function provides an already allocated AudioBufferList to use for providing audio data into the output buffer. The expected format of the audio data provided here is specified by the EZOutput `inputFormat` property. This audio data will be converted into the client format specified by the EZOutput `clientFormat` property.
+ @param output The instance of the EZOutput that asked for the data.
+ @param audioBufferList The AudioBufferList structure pointer that needs to be filled with audio data
+ @param frames The amount of frames as a UInt32 that output will need to properly fill its output buffer.
+ @param timestamp A AudioTimeStamp pointer to use if you need the current host time.
+ @return An OSStatus code. If there was no error then use the noErr status code.
+ */
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutputDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZOutputDelegate for the EZOutput component provides a receiver to handle play state, device, and audio data change events. This is very similar to the EZMicrophoneDelegate for the EZMicrophone and the EZAudioFileDelegate for the EZAudioFile.
+ */
+@protocol EZOutputDelegate
+
+@optional
+
+/**
+ Called anytime the EZOutput starts or stops.
+ @param output The instance of the EZOutput that triggered the event.
+ @param isPlaying A BOOL indicating whether the EZOutput instance is playing or not.
+ */
+- (void)output:(EZOutput *)output changedPlayingState:(BOOL)isPlaying;
+
+//------------------------------------------------------------------------------
+
+/**
+ Called anytime the `device` changes on an EZOutput instance.
+ @param output The instance of the EZOutput that triggered the event.
+ @param device The instance of the new EZAudioDevice the output is using to play audio data.
+ */
+- (void)output:(EZOutput *)output changedDevice:(EZAudioDevice *)device;
+
+//------------------------------------------------------------------------------
+
+/**
+ Like the EZMicrophoneDelegate, for the EZOutput this method provides an array of float arrays of the audio received, each float array representing a channel of audio data. This occurs on the background thread so any drawing code must explicity perform its functions on the main thread.
+ @param output The instance of the EZOutput that triggered the event.
+ @param buffer The audio data as an array of float arrays. In a stereo signal buffer[0] represents the left channel while buffer[1] would represent the right channel.
+ @param bufferSize A UInt32 representing the size of each of the buffers (the length of each float array).
+ @param numberOfChannels A UInt32 representing the number of channels (you can use this to know how many float arrays are in the `buffer` parameter.
+ @warning This function executes on a background thread to avoid blocking any audio operations. If operations should be performed on any other thread (like the main thread) it should be performed within a dispatch block like so: dispatch_async(dispatch_get_main_queue(), ^{ ...Your Code... })
+ */
+- (void) output:(EZOutput *)output
+ playedAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels;
+
+//------------------------------------------------------------------------------
+
+@end
+
+/**
+ The EZOutput component provides a generic output to glue all the other EZAudio components together and push whatever sound you've created to the default output device (think opposite of the microphone). The EZOutputDataSource provides the required AudioBufferList needed to populate the output buffer while the EZOutputDelegate provides the same kind of mechanism as the EZMicrophoneDelegate or EZAudioFileDelegate in that you will receive a callback that provides non-interleaved, float data for visualizing the output (done using an internal float converter). As of 0.4.0 the EZOutput has been simplified to a single EZOutputDataSource method and now uses an AUGraph to provide format conversion from the `inputFormat` to the playback graph's `clientFormat` linear PCM formats, mixer controls for setting volume and pan settings, hooks to add in any number of effect audio units (see the `connectOutputOfSourceNode:sourceNodeOutputBus:toDestinationNode:destinationNodeInputBus:inGraph:` subclass method), and hardware device toggling (via EZAudioDevice).
+ */
+@interface EZOutput : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Creates a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource.
+ @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback.
+ @return A newly created instance of the EZOutput class.
+ */
+- (instancetype)initWithDataSource:(id)dataSource;
+
+/**
+ Creates a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource.
+ @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback.
+ @param inputFormat The AudioStreamBasicDescription of the EZOutput.
+ @warning AudioStreamBasicDescription input formats must be linear PCM!
+ @return A newly created instance of the EZOutput class.
+ */
+- (instancetype)initWithDataSource:(id)dataSource
+ inputFormat:(AudioStreamBasicDescription)inputFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Class method to create a new instance of the EZOutput
+ @return A newly created instance of the EZOutput class.
+ */
++ (instancetype)output;
+
+/**
+ Class method to create a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource.
+ @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback.
+ @return A newly created instance of the EZOutput class.
+ */
++ (instancetype)outputWithDataSource:(id)dataSource;
+
+/**
+ Class method to create a new instance of the EZOutput and allows the caller to specify an EZOutputDataSource.
+ @param dataSource The EZOutputDataSource that will be used to pull the audio data for the output callback.
+ @param audioStreamBasicDescription The AudioStreamBasicDescription of the EZOutput.
+ @warning AudioStreamBasicDescriptions that are invalid will cause the EZOutput to fail to initialize
+ @return A newly created instance of the EZOutput class.
+ */
++ (instancetype)outputWithDataSource:(id)dataSource
+ inputFormat:(AudioStreamBasicDescription)inputFormat;
+
+//------------------------------------------------------------------------------
+#pragma mark - Singleton
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Shared Instance
+///-----------------------------------------------------------
+
+/**
+ Creates a shared instance of the EZOutput (one app will usually only need one output and share the role of the EZOutputDataSource).
+ @return The shared instance of the EZOutput class.
+ */
++ (instancetype)sharedOutput;
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Setting/Getting The Stream Formats
+///-----------------------------------------------------------
+
+/**
+ Provides the AudioStreamBasicDescription structure used at the beginning of the playback graph which is then converted into the `clientFormat` using the AUConverter audio unit.
+ @warning The AudioStreamBasicDescription set here must be linear PCM. Compressed formats are not supported...the EZAudioFile's clientFormat performs the audio conversion on the fly from compressed to linear PCM so there is no additional work to be done there.
+ @return An AudioStreamBasicDescription structure describing
+ */
+@property (nonatomic, readwrite) AudioStreamBasicDescription inputFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the AudioStreamBasicDescription structure that serves as the common format used throughout the playback graph (similar to how the EZAudioFile as a clientFormat that is linear PCM to be shared amongst other components). The `inputFormat` is converted into this format at the beginning of the playback graph using an AUConverter audio unit. Defaults to the whatever the `defaultClientFormat` method returns is if a custom one isn't explicitly set.
+ @warning The AudioStreamBasicDescription set here must be linear PCM. Compressed formats are not supported by Audio Units.
+ @return An AudioStreamBasicDescription structure describing the common client format for the playback graph.
+ */
+@property (nonatomic, readwrite) AudioStreamBasicDescription clientFormat;
+
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Setting/Getting The Data Source and Delegate
+///-----------------------------------------------------------
+
+/**
+ The EZOutputDataSource that provides the audio data in the `inputFormat` for the EZOutput to play. If an EZOutputDataSource is not specified then the EZOutput will just output silence.
+ */
+@property (nonatomic, weak) id dataSource;
+
+//------------------------------------------------------------------------------
+
+/**
+ The EZOutputDelegate for which to handle the output callbacks
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides a flag indicating whether the EZOutput is pulling audio data from the EZOutputDataSource for playback.
+ @return YES if the EZOutput is running, NO if it is stopped
+ */
+@property (readonly) BOOL isPlaying;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current pan from the audio player's mixer audio unit in the playback graph. Setting the pan adjusts the direction of the audio signal from left (0) to right (1). Default is 0.5 (middle).
+ */
+@property (nonatomic, assign) float pan;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current volume from the audio player's mixer audio unit in the playback graph. Setting the volume adjusts the gain of the output between 0 and 1. Default is 1.
+ */
+@property (nonatomic, assign) float volume;
+
+//------------------------------------------------------------------------------
+#pragma mark - Core Audio Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Core Audio Properties
+///-----------------------------------------------------------
+
+/**
+ The AUGraph used to chain together the converter, mixer, and output audio units.
+ */
+@property (readonly) AUGraph graph;
+
+//------------------------------------------------------------------------------
+
+/**
+ The AudioUnit that is being used to convert the audio data coming into the output's playback graph.
+ */
+@property (readonly) AudioUnit converterAudioUnit;
+
+//------------------------------------------------------------------------------
+
+/**
+ The AudioUnit that is being used as the mixer to adjust the volume on the output's playback graph.
+ */
+@property (readonly) AudioUnit mixerAudioUnit;
+
+//------------------------------------------------------------------------------
+
+/**
+ The AudioUnit that is being used as the hardware output for the output's playback graph.
+ */
+@property (readonly) AudioUnit outputAudioUnit;
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Getting/Setting The Output's Hardware Device
+///-----------------------------------------------------------
+
+/**
+ An EZAudioDevice instance that is used to route the audio data out to the speaker. To find a list of available output devices see the EZAudioDevice `outputDevices` method.
+ */
+@property (nonatomic, strong, readwrite) EZAudioDevice *device;
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Starting/Stopping The Output
+///-----------------------------------------------------------
+
+/**
+ Starts pulling audio data from the EZOutputDataSource to the default device output.
+ */
+- (void)startPlayback;
+
+///-----------------------------------------------------------
+
+/**
+ Stops pulling audio data from the EZOutputDataSource to the default device output.
+ */
+- (void)stopPlayback;
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Subclass
+///-----------------------------------------------------------
+
+/**
+ This method handles connecting the converter node to the mixer node within the AUGraph that is being used as the playback graph. Subclasses can override this method and insert their custom nodes to perform effects processing on the audio data being rendered.
+
+ This was inspired by Daniel Kennett's blog post on how to add a custom equalizer to a CocoaLibSpotify SPCoreAudioController's AUGraph. For more information see Daniel's post and example code here: http://ikennd.ac/blog/2012/04/augraph-basics-in-cocoalibspotify/.
+ @param sourceNode An AUNode representing the node the audio data is coming from.
+ @param sourceNodeOutputBus A UInt32 representing the output bus from the source node that should be connected into the next node's input bus.
+ @param destinationNode An AUNode representing the node the audio data should be connected to.
+ @param destinationNodeInputBus A UInt32 representing the input bus the source node's output bus should be connecting to.
+ @param graph The AUGraph that is being used to hold the playback graph. Same as from the `graph` property.
+ @return An OSStatus code. For no error return back `noErr`.
+ */
+- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode
+ sourceNodeOutputBus:(UInt32)sourceNodeOutputBus
+ toDestinationNode:(AUNode)destinationNode
+ destinationNodeInputBus:(UInt32)destinationNodeInputBus
+ inGraph:(AUGraph)graph;
+
+//------------------------------------------------------------------------------
+
+/**
+ The default AudioStreamBasicDescription set as the client format of the output if no custom `clientFormat` is set. Defaults to a 44.1 kHz stereo, non-interleaved, float format.
+ @return An AudioStreamBasicDescription that will be used as the default stream format.
+ */
+- (AudioStreamBasicDescription)defaultClientFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ The default AudioStreamBasicDescription set as the `inputFormat` of the output if no custom `inputFormat` is set. Defaults to a 44.1 kHz stereo, non-interleaved, float format.
+ @return An AudioStreamBasicDescription that will be used as the default stream format.
+ */
+- (AudioStreamBasicDescription)defaultInputFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ The default value used as the AudioUnit subtype when creating the hardware output component. By default this is kAudioUnitSubType_RemoteIO for iOS and kAudioUnitSubType_HALOutput for OSX.
+ @warning If you change this to anything other than kAudioUnitSubType_HALOutput for OSX you will get a failed assertion because devices can only be set when using the HAL audio unit.
+ @return An OSType that represents the AudioUnit subtype for the hardware output component.
+ */
+- (OSType)outputAudioUnitSubType;
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZOutput.m b/Pods/EZAudio/EZAudio/EZOutput.m
new file mode 100644
index 0000000..0d660d6
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZOutput.m
@@ -0,0 +1,753 @@
+//
+// EZOutput.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/2/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZOutput.h"
+#import "EZAudioDevice.h"
+#import "EZAudioFloatConverter.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Constants
+//------------------------------------------------------------------------------
+
+UInt32 const EZOutputMaximumFramesPerSlice = 4096;
+Float64 const EZOutputDefaultSampleRate = 44100.0f;
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ // stream format params
+ AudioStreamBasicDescription inputFormat;
+ AudioStreamBasicDescription clientFormat;
+
+ // float converted data
+ float **floatData;
+
+ // nodes
+ EZAudioNodeInfo converterNodeInfo;
+ EZAudioNodeInfo mixerNodeInfo;
+ EZAudioNodeInfo outputNodeInfo;
+
+ // audio graph
+ AUGraph graph;
+} EZOutputInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - Callbacks (Declaration)
+//------------------------------------------------------------------------------
+
+OSStatus EZOutputConverterInputCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData);
+
+//------------------------------------------------------------------------------
+
+OSStatus EZOutputGraphRenderCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData);
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutput (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZOutput ()
+@property (nonatomic, strong) EZAudioFloatConverter *floatConverter;
+@property (nonatomic, assign) EZOutputInfo *info;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZOutput (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZOutput
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ if (self.floatConverter)
+ {
+ self.floatConverter = nil;
+ [EZAudioUtilities freeFloatBuffers:self.info->floatData
+ numberOfChannels:self.info->clientFormat.mChannelsPerFrame];
+ }
+ [EZAudioUtilities checkResult:AUGraphStop(self.info->graph)
+ operation:"Failed to stop graph"];
+ [EZAudioUtilities checkResult:AUGraphClose(self.info->graph)
+ operation:"Failed to close graph"];
+ [EZAudioUtilities checkResult:DisposeAUGraph(self.info->graph)
+ operation:"Failed to dispose of graph"];
+ free(self.info);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initialization
+//------------------------------------------------------------------------------
+
+- (instancetype) init
+{
+ self = [super init];
+ if (self)
+ {
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithDataSource:(id)dataSource
+{
+ self = [self init];
+ if (self)
+ {
+ self.dataSource = dataSource;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithDataSource:(id)dataSource
+ inputFormat:(AudioStreamBasicDescription)inputFormat
+{
+ self = [self initWithDataSource:dataSource];
+ if (self)
+ {
+ self.inputFormat = inputFormat;
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (instancetype)output
+{
+ return [[self alloc] init];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)outputWithDataSource:(id)dataSource
+{
+ return [[self alloc] initWithDataSource:dataSource];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)outputWithDataSource:(id)dataSource
+ inputFormat:(AudioStreamBasicDescription)inputFormat
+{
+ return [[self alloc] initWithDataSource:dataSource
+ inputFormat:inputFormat];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Singleton
+//------------------------------------------------------------------------------
+
++ (instancetype)sharedOutput
+{
+ static EZOutput *output;
+ static dispatch_once_t onceToken;
+ dispatch_once(&onceToken, ^
+ {
+ output = [[self alloc] init];
+ });
+ return output;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setup
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ //
+ // Create structure to hold state data
+ //
+ self.info = (EZOutputInfo *)malloc(sizeof(EZOutputInfo));
+ memset(self.info, 0, sizeof(EZOutputInfo));
+
+ //
+ // Setup the audio graph
+ //
+ [EZAudioUtilities checkResult:NewAUGraph(&self.info->graph)
+ operation:"Failed to create graph"];
+
+ //
+ // Add converter node
+ //
+ AudioComponentDescription converterDescription;
+ converterDescription.componentType = kAudioUnitType_FormatConverter;
+ converterDescription.componentSubType = kAudioUnitSubType_AUConverter;
+ converterDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
+ [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph,
+ &converterDescription,
+ &self.info->converterNodeInfo.node)
+ operation:"Failed to add converter node to audio graph"];
+
+ //
+ // Add mixer node
+ //
+ AudioComponentDescription mixerDescription;
+ mixerDescription.componentType = kAudioUnitType_Mixer;
+#if TARGET_OS_IPHONE
+ mixerDescription.componentSubType = kAudioUnitSubType_MultiChannelMixer;
+#elif TARGET_OS_MAC
+ mixerDescription.componentSubType = kAudioUnitSubType_StereoMixer;
+#endif
+ mixerDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
+ [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph,
+ &mixerDescription,
+ &self.info->mixerNodeInfo.node)
+ operation:"Failed to add mixer node to audio graph"];
+
+ //
+ // Add output node
+ //
+ AudioComponentDescription outputDescription;
+ outputDescription.componentType = kAudioUnitType_Output;
+ outputDescription.componentSubType = [self outputAudioUnitSubType];
+ outputDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
+ [EZAudioUtilities checkResult:AUGraphAddNode(self.info->graph,
+ &outputDescription,
+ &self.info->outputNodeInfo.node)
+ operation:"Failed to add output node to audio graph"];
+
+ //
+ // Open the graph
+ //
+ [EZAudioUtilities checkResult:AUGraphOpen(self.info->graph)
+ operation:"Failed to open graph"];
+
+ //
+ // Make node connections
+ //
+ OSStatus status = [self connectOutputOfSourceNode:self.info->converterNodeInfo.node
+ sourceNodeOutputBus:0
+ toDestinationNode:self.info->mixerNodeInfo.node
+ destinationNodeInputBus:0
+ inGraph:self.info->graph];
+ [EZAudioUtilities checkResult:status
+ operation:"Failed to connect output of source node to destination node in graph"];
+
+ //
+ // Connect mixer to output
+ //
+ [EZAudioUtilities checkResult:AUGraphConnectNodeInput(self.info->graph,
+ self.info->mixerNodeInfo.node,
+ 0,
+ self.info->outputNodeInfo.node,
+ 0)
+ operation:"Failed to connect mixer node to output node"];
+
+ //
+ // Get the audio units
+ //
+ [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph,
+ self.info->converterNodeInfo.node,
+ &converterDescription,
+ &self.info->converterNodeInfo.audioUnit)
+ operation:"Failed to get converter audio unit"];
+ [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph,
+ self.info->mixerNodeInfo.node,
+ &mixerDescription,
+ &self.info->mixerNodeInfo.audioUnit)
+ operation:"Failed to get mixer audio unit"];
+ [EZAudioUtilities checkResult:AUGraphNodeInfo(self.info->graph,
+ self.info->outputNodeInfo.node,
+ &outputDescription,
+ &self.info->outputNodeInfo.audioUnit)
+ operation:"Failed to get output audio unit"];
+
+ //
+ // Add a node input callback for the converter node
+ //
+ AURenderCallbackStruct converterCallback;
+ converterCallback.inputProc = EZOutputConverterInputCallback;
+ converterCallback.inputProcRefCon = (__bridge void *)(self);
+ [EZAudioUtilities checkResult:AUGraphSetNodeInputCallback(self.info->graph,
+ self.info->converterNodeInfo.node,
+ 0,
+ &converterCallback)
+ operation:"Failed to set render callback on converter node"];
+
+ //
+ // Set stream formats
+ //
+ [self setClientFormat:[self defaultClientFormat]];
+ [self setInputFormat:[self defaultInputFormat]];
+
+ //
+ // Use the default device
+ //
+ EZAudioDevice *currentOutputDevice = [EZAudioDevice currentOutputDevice];
+ [self setDevice:currentOutputDevice];
+
+ //
+ // Set maximum frames per slice to 4096 to allow playback during
+ // lock screen (iOS only?)
+ //
+ UInt32 maximumFramesPerSlice = EZOutputMaximumFramesPerSlice;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit,
+ kAudioUnitProperty_MaximumFramesPerSlice,
+ kAudioUnitScope_Global,
+ 0,
+ &maximumFramesPerSlice,
+ sizeof(maximumFramesPerSlice))
+ operation:"Failed to set maximum frames per slice on mixer node"];
+
+ //
+ // Initialize all the audio units in the graph
+ //
+ [EZAudioUtilities checkResult:AUGraphInitialize(self.info->graph)
+ operation:"Failed to initialize graph"];
+
+ //
+ // Add render callback
+ //
+ [EZAudioUtilities checkResult:AudioUnitAddRenderNotify(self.info->mixerNodeInfo.audioUnit,
+ EZOutputGraphRenderCallback,
+ (__bridge void *)(self))
+ operation:"Failed to add render callback"];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Actions
+//------------------------------------------------------------------------------
+
+- (void)startPlayback
+{
+ //
+ // Start the AUGraph
+ //
+ [EZAudioUtilities checkResult:AUGraphStart(self.info->graph)
+ operation:"Failed to start graph"];
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(output:changedPlayingState:)])
+ {
+ [self.delegate output:self changedPlayingState:[self isPlaying]];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)stopPlayback
+{
+ //
+ // Stop the AUGraph
+ //
+ [EZAudioUtilities checkResult:AUGraphStop(self.info->graph)
+ operation:"Failed to stop graph"];
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(output:changedPlayingState:)])
+ {
+ [self.delegate output:self changedPlayingState:[self isPlaying]];
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)clientFormat
+{
+ return self.info->clientFormat;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)inputFormat
+{
+ return self.info->inputFormat;
+}
+
+//------------------------------------------------------------------------------
+
+- (BOOL)isPlaying
+{
+ Boolean isPlaying;
+ [EZAudioUtilities checkResult:AUGraphIsRunning(self.info->graph,
+ &isPlaying)
+ operation:"Failed to check if graph is running"];
+ return isPlaying;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)pan
+{
+ AudioUnitParameterID param;
+#if TARGET_OS_IPHONE
+ param = kMultiChannelMixerParam_Pan;
+#elif TARGET_OS_MAC
+ param = kStereoMixerParam_Pan;
+#endif
+ AudioUnitParameterValue pan;
+ [EZAudioUtilities checkResult:AudioUnitGetParameter(self.info->mixerNodeInfo.audioUnit,
+ param,
+ kAudioUnitScope_Input,
+ 0,
+ &pan) operation:"Failed to get pan from mixer unit"];
+ return pan;
+}
+
+//------------------------------------------------------------------------------
+
+- (float)volume
+{
+ AudioUnitParameterID param;
+#if TARGET_OS_IPHONE
+ param = kMultiChannelMixerParam_Volume;
+#elif TARGET_OS_MAC
+ param = kStereoMixerParam_Volume;
+#endif
+ AudioUnitParameterValue volume;
+ [EZAudioUtilities checkResult:AudioUnitGetParameter(self.info->mixerNodeInfo.audioUnit,
+ param,
+ kAudioUnitScope_Input,
+ 0,
+ &volume)
+ operation:"Failed to get volume from mixer unit"];
+ return volume;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setClientFormat:(AudioStreamBasicDescription)clientFormat
+{
+ if (self.floatConverter)
+ {
+ self.floatConverter = nil;
+ [EZAudioUtilities freeFloatBuffers:self.info->floatData
+ numberOfChannels:self.clientFormat.mChannelsPerFrame];
+ }
+
+ self.info->clientFormat = clientFormat;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->converterNodeInfo.audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output,
+ 0,
+ &self.info->clientFormat,
+ sizeof(self.info->clientFormat))
+ operation:"Failed to set output client format on converter audio unit"];
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input,
+ 0,
+ &self.info->clientFormat,
+ sizeof(self.info->clientFormat))
+ operation:"Failed to set input client format on mixer audio unit"];
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->mixerNodeInfo.audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Output,
+ 0,
+ &self.info->clientFormat,
+ sizeof(self.info->clientFormat))
+ operation:"Failed to set output client format on mixer audio unit"];
+
+ self.floatConverter = [[EZAudioFloatConverter alloc] initWithInputFormat:clientFormat];
+ self.info->floatData = [EZAudioUtilities floatBuffersWithNumberOfFrames:EZOutputMaximumFramesPerSlice
+ numberOfChannels:clientFormat.mChannelsPerFrame];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setDevice:(EZAudioDevice *)device
+{
+#if TARGET_OS_IPHONE
+
+ // if the devices are equal then ignore
+ if ([device isEqual:self.device])
+ {
+ return;
+ }
+
+ NSError *error;
+ [[AVAudioSession sharedInstance] setOutputDataSource:device.dataSource error:&error];
+ if (error)
+ {
+ NSLog(@"Error setting output device data source (%@), reason: %@",
+ device.dataSource,
+ error.localizedDescription);
+ }
+
+#elif TARGET_OS_MAC
+ UInt32 outputEnabled = device.outputChannelCount > 0;
+ NSAssert(outputEnabled, @"Selected EZAudioDevice does not have any output channels");
+ NSAssert([self outputAudioUnitSubType] == kAudioUnitSubType_HALOutput,
+ @"Audio device selection on OSX is only available when using the kAudioUnitSubType_HALOutput output unit subtype");
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->outputNodeInfo.audioUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Output,
+ 0,
+ &outputEnabled,
+ sizeof(outputEnabled))
+ operation:"Failed to set flag on device output"];
+
+ AudioDeviceID deviceId = device.deviceID;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->outputNodeInfo.audioUnit,
+ kAudioOutputUnitProperty_CurrentDevice,
+ kAudioUnitScope_Global,
+ 0,
+ &deviceId,
+ sizeof(AudioDeviceID))
+ operation:"Couldn't set default device on I/O unit"];
+#endif
+
+ // store device
+ _device = device;
+
+ // notify delegate
+ if ([self.delegate respondsToSelector:@selector(output:changedDevice:)])
+ {
+ [self.delegate output:self changedDevice:device];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setInputFormat:(AudioStreamBasicDescription)inputFormat
+{
+ self.info->inputFormat = inputFormat;
+ [EZAudioUtilities checkResult:AudioUnitSetProperty(self.info->converterNodeInfo.audioUnit,
+ kAudioUnitProperty_StreamFormat,
+ kAudioUnitScope_Input,
+ 0,
+ &inputFormat,
+ sizeof(inputFormat))
+ operation:"Failed to set input format on converter audio unit"];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setPan:(float)pan
+{
+ AudioUnitParameterID param;
+#if TARGET_OS_IPHONE
+ param = kMultiChannelMixerParam_Pan;
+#elif TARGET_OS_MAC
+ param = kStereoMixerParam_Pan;
+#endif
+ [EZAudioUtilities checkResult:AudioUnitSetParameter(self.info->mixerNodeInfo.audioUnit,
+ param,
+ kAudioUnitScope_Input,
+ 0,
+ pan,
+ 0)
+ operation:"Failed to set volume on mixer unit"];
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setVolume:(float)volume
+{
+ AudioUnitParameterID param;
+#if TARGET_OS_IPHONE
+ param = kMultiChannelMixerParam_Volume;
+#elif TARGET_OS_MAC
+ param = kStereoMixerParam_Volume;
+#endif
+ [EZAudioUtilities checkResult:AudioUnitSetParameter(self.info->mixerNodeInfo.audioUnit,
+ param,
+ kAudioUnitScope_Input,
+ 0,
+ volume,
+ 0)
+ operation:"Failed to set volume on mixer unit"];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Core Audio Properties
+//------------------------------------------------------------------------------
+
+- (AUGraph)graph
+{
+ return self.info->graph;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioUnit)converterAudioUnit
+{
+ return self.info->converterNodeInfo.audioUnit;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioUnit)mixerAudioUnit
+{
+ return self.info->mixerNodeInfo.audioUnit;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioUnit)outputAudioUnit
+{
+ return self.info->outputNodeInfo.audioUnit;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Subclass
+//------------------------------------------------------------------------------
+
+- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode
+ sourceNodeOutputBus:(UInt32)sourceNodeOutputBus
+ toDestinationNode:(AUNode)destinationNode
+ destinationNodeInputBus:(UInt32)destinationNodeInputBus
+ inGraph:(AUGraph)graph
+{
+ //
+ // Default implementation is to just connect the source to destination
+ //
+ [EZAudioUtilities checkResult:AUGraphConnectNodeInput(graph,
+ sourceNode,
+ sourceNodeOutputBus,
+ destinationNode,
+ destinationNodeInputBus)
+ operation:"Failed to connect converter node to mixer node"];
+ return noErr;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)defaultClientFormat
+{
+ return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:EZOutputDefaultSampleRate];
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)defaultInputFormat
+{
+ return [EZAudioUtilities stereoFloatNonInterleavedFormatWithSampleRate:EZOutputDefaultSampleRate];
+}
+
+//------------------------------------------------------------------------------
+
+- (OSType)outputAudioUnitSubType
+{
+#if TARGET_OS_IPHONE
+ return kAudioUnitSubType_RemoteIO;
+#elif TARGET_OS_MAC
+ return kAudioUnitSubType_HALOutput;
+#endif
+}
+
+//------------------------------------------------------------------------------
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - Callbacks (Implementation)
+//------------------------------------------------------------------------------
+
+OSStatus EZOutputConverterInputCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData)
+{
+ EZOutput *output = (__bridge EZOutput *)inRefCon;
+
+ //
+ // Try to ask the data source for audio data to fill out the output's
+ // buffer list
+ //
+ if ([output.dataSource respondsToSelector:@selector(output:shouldFillAudioBufferList:withNumberOfFrames:timestamp:)])
+ {
+ return [output.dataSource output:output
+ shouldFillAudioBufferList:ioData
+ withNumberOfFrames:inNumberFrames
+ timestamp:inTimeStamp];
+ }
+ else
+ {
+ //
+ // Silence if there is nothing to output
+ //
+ for (int i = 0; i < ioData->mNumberBuffers; i++)
+ {
+ memset(ioData->mBuffers[i].mData,
+ 0,
+ ioData->mBuffers[i].mDataByteSize);
+ }
+ }
+ return noErr;
+}
+
+//------------------------------------------------------------------------------
+
+OSStatus EZOutputGraphRenderCallback(void *inRefCon,
+ AudioUnitRenderActionFlags *ioActionFlags,
+ const AudioTimeStamp *inTimeStamp,
+ UInt32 inBusNumber,
+ UInt32 inNumberFrames,
+ AudioBufferList *ioData)
+{
+ EZOutput *output = (__bridge EZOutput *)inRefCon;
+
+ //
+ // provide the audio received delegate callback
+ //
+ if (*ioActionFlags & kAudioUnitRenderAction_PostRender)
+ {
+ if ([output.delegate respondsToSelector:@selector(output:playedAudio:withBufferSize:withNumberOfChannels:)])
+ {
+ UInt32 frames = ioData->mBuffers[0].mDataByteSize / output.info->clientFormat.mBytesPerFrame;
+ [output.floatConverter convertDataFromAudioBufferList:ioData
+ withNumberOfFrames:frames
+ toFloatBuffers:output.info->floatData];
+ [output.delegate output:output
+ playedAudio:output.info->floatData
+ withBufferSize:inNumberFrames
+ withNumberOfChannels:output.info->clientFormat.mChannelsPerFrame];
+ }
+ }
+ return noErr;
+}
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZPlot.h b/Pods/EZAudio/EZAudio/EZPlot.h
new file mode 100644
index 0000000..bbde1f5
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZPlot.h
@@ -0,0 +1,142 @@
+//
+// EZPlot.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 11/24/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Enumerations
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Plot Types
+///-----------------------------------------------------------
+
+/**
+ The types of plots that can be displayed in the view using the data.
+ */
+typedef NS_ENUM(NSInteger, EZPlotType)
+{
+ /**
+ Plot that displays only the samples of the current buffer
+ */
+ EZPlotTypeBuffer,
+
+ /**
+ Plot that displays a rolling history of values using the RMS calculated for each incoming buffer
+ */
+ EZPlotTypeRolling
+};
+
+/**
+ EZPlot is a cross-platform (iOS and OSX) class used to subclass the default view type (either UIView or NSView, respectively).
+
+ ## Subclassing Notes
+
+ This class isn't meant to be directly used in practice, but instead establishes the default properties and behaviors subclasses should obey to provide consistent behavior accross multiple types of graphs (i.e. set background color, plot type, should fill in, etc.). Subclasses should make use of the inherited properties from this class to allow all child plots to benefit from the same
+ */
+#if TARGET_OS_IPHONE
+#import
+@interface EZPlot : UIView
+#elif TARGET_OS_MAC
+#import
+@interface EZPlot : NSView
+#endif
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Customizing The Plot's Appearance
+///-----------------------------------------------------------
+/**
+ The default background color of the plot. For iOS the color is specified as a UIColor while for OSX the color is an NSColor. The default value on both platforms is black.
+ */
+#if TARGET_OS_IPHONE
+@property (nonatomic, strong) IBInspectable UIColor *backgroundColor;
+#elif TARGET_OS_MAC
+@property (nonatomic, strong) IBInspectable NSColor *backgroundColor;
+#endif
+
+/**
+ The default color of the plot's data (i.e. waveform, y-axis values). For iOS the color is specified as a UIColor while for OSX the color is an NSColor. The default value on both platforms is red.
+ */
+#if TARGET_OS_IPHONE
+@property (nonatomic, strong) IBInspectable UIColor *color;
+#elif TARGET_OS_MAC
+@property (nonatomic, strong) IBInspectable NSColor *color;
+#endif
+
+/**
+ The plot's gain value, which controls the scale of the y-axis values. The default value of the gain is 1.0f and should always be greater than 0.0f.
+ */
+@property (nonatomic, assign) IBInspectable float gain;
+
+/**
+ The type of plot as specified by the `EZPlotType` enumeration (i.e. a buffer or rolling plot type).
+ */
+@property (nonatomic, assign) IBInspectable EZPlotType plotType;
+
+/**
+ A boolean indicating whether or not to fill in the graph. A value of YES will make a filled graph (filling in the space between the x-axis and the y-value), while a value of NO will create a stroked graph (connecting the points along the y-axis).
+ */
+@property (nonatomic, assign) IBInspectable BOOL shouldFill;
+
+/**
+ A boolean indicating whether the graph should be rotated along the x-axis to give a mirrored reflection. This is typical for audio plots to produce the classic waveform look. A value of YES will produce a mirrored reflection of the y-values about the x-axis, while a value of NO will only plot the y-values.
+ */
+@property (nonatomic, assign) IBInspectable BOOL shouldMirror;
+
+//------------------------------------------------------------------------------
+#pragma mark - Clearing
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Clearing The Plot
+///-----------------------------------------------------------
+
+/**
+ Clears all data from the audio plot (includes both EZPlotTypeBuffer and EZPlotTypeRolling)
+ */
+-(void)clear;
+
+//------------------------------------------------------------------------------
+#pragma mark - Get Samples
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Updating The Plot
+///-----------------------------------------------------------
+
+/**
+ Updates the plot with the new buffer data and tells the view to redraw itself. Caller will provide a float array with the values they expect to see on the y-axis. The plot will internally handle mapping the x-axis and y-axis to the current view port, any interpolation for fills effects, and mirroring.
+ @param buffer A float array of values to map to the y-axis.
+ @param bufferSize The size of the float array that will be mapped to the y-axis.
+ @warning The bufferSize is expected to be the same, constant value once initial triggered. For plots using OpenGL a vertex buffer object will be allocated with a maximum buffersize of (2 * the initial given buffer size) to account for any interpolation necessary for filling in the graph. Updates use the glBufferSubData(...) function, which will crash if the buffersize exceeds the initial maximum allocated size.
+ */
+-(void)updateBuffer:(float *)buffer withBufferSize:(UInt32)bufferSize;
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZPlot.m b/Pods/EZAudio/EZAudio/EZPlot.m
new file mode 100644
index 0000000..f8ac3b0
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZPlot.m
@@ -0,0 +1,43 @@
+//
+// EZPlot.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 11/24/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZPlot.h"
+
+@implementation EZPlot
+
+#pragma mark - Clearing
+-(void)clear
+{
+ // Override in subclass
+}
+
+#pragma mark - Get Samples
+-(void)updateBuffer:(float *)buffer
+ withBufferSize:(UInt32)bufferSize
+{
+ // Override in subclass
+}
+
+@end
diff --git a/Pods/EZAudio/EZAudio/EZRecorder.h b/Pods/EZAudio/EZAudio/EZRecorder.h
new file mode 100644
index 0000000..95d5c05
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZRecorder.h
@@ -0,0 +1,364 @@
+//
+// EZRecorder.h
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/1/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import
+#import
+
+@class EZRecorder;
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+/**
+ To ensure valid recording formats are used when recording to a file the EZRecorderFileType describes the most common file types that a file can be encoded in. Each of these types can be used to output recordings as such:
+
+ EZRecorderFileTypeAIFF - .aif, .aiff, .aifc, .aac
+ EZRecorderFileTypeM4A - .m4a, .mp4
+ EZRecorderFileTypeWAV - .wav
+
+ */
+typedef NS_ENUM(NSInteger, EZRecorderFileType)
+{
+ /**
+ Recording format that describes AIFF file types. These are uncompressed, LPCM files that are completely lossless, but are large in file size.
+ */
+ EZRecorderFileTypeAIFF,
+ /**
+ Recording format that describes M4A file types. These are compressed, but yield great results especially when file size is an issue.
+ */
+ EZRecorderFileTypeM4A,
+ /**
+ Recording format that describes WAV file types. These are uncompressed, LPCM files that are completely lossless, but are large in file size.
+ */
+ EZRecorderFileTypeWAV
+};
+
+//------------------------------------------------------------------------------
+#pragma mark - EZRecorderDelegate
+//------------------------------------------------------------------------------
+
+/**
+ The EZRecorderDelegate for the EZRecorder provides a receiver for write events, `recorderUpdatedCurrentTime:`, and the close event, `recorderDidClose:`.
+ */
+@protocol EZRecorderDelegate
+
+@optional
+
+/**
+ Triggers when the EZRecorder is explicitly closed with the `closeAudioFile` method.
+ @param recorder The EZRecorder instance that triggered the action
+ */
+- (void)recorderDidClose:(EZRecorder *)recorder;
+
+/**
+ Triggers after the EZRecorder has successfully written audio data from the `appendDataFromBufferList:withBufferSize:` method.
+ @param recorder The EZRecorder instance that triggered the action
+ */
+- (void)recorderUpdatedCurrentTime:(EZRecorder *)recorder;
+
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZRecorder
+//------------------------------------------------------------------------------
+
+/**
+ The EZRecorder provides a flexible way to create an audio file and append raw audio data to it. The EZRecorder will convert the incoming audio on the fly to the destination format so no conversion is needed between this and any other component. Right now the only supported output format is 'caf'. Each output file should have its own EZRecorder instance (think 1 EZRecorder = 1 audio file).
+ */
+@interface EZRecorder : NSObject
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+/**
+ An EZRecorderDelegate to listen for the write and close events.
+ */
+@property (nonatomic, weak) id delegate;
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Initializers
+///-----------------------------------------------------------
+
+/**
+ Creates an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), and a file type (see `EZRecorderFileType`) that will automatically create an internal `fileFormat` and audio file type hint.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @return A newly created EZRecorder instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), and a file type (see `EZRecorderFileType`) that will automatically create an internal `fileFormat` and audio file type hint, as well as a delegate to respond to the recorder's write and close events.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @param delegate An EZRecorderDelegate to listen for the recorder's write and close events.
+ @return A newly created EZRecorder instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), a file format describing the destination format on disk (see `fileFormat` for more info), and an audio file type (an AudioFileTypeID for Core Audio, not a EZRecorderFileType).
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileFormat An AudioStreamBasicDescription describing the format of the audio being written to disk (MP3, AAC, WAV, etc)
+ @param audioFileTypeID An AudioFileTypeID that matches your fileFormat (i.e. kAudioFileM4AType for an M4A format)
+ @return A newly created EZRecorder instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), a file format describing the destination format on disk (see `fileFormat` for more info), an audio file type (an AudioFileTypeID for Core Audio, not a EZRecorderFileType), and delegate to respond to the recorder's write and close events.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileFormat An AudioStreamBasicDescription describing the format of the audio being written to disk (MP3, AAC, WAV, etc)
+ @param audioFileTypeID An AudioFileTypeID that matches your fileFormat (i.e. kAudioFileM4AType for an M4A format)
+ @param delegate An EZRecorderDelegate to listen for the recorder's write and close events.
+ @return A newly created EZRecorder instance.
+ */
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Creates a new instance of an EZRecorder using a destination file path URL and the source format of the incoming audio.
+ @param url An NSURL specifying the file path location of where the audio file should be written to.
+ @param sourceFormat The AudioStreamBasicDescription for the incoming audio that will be written to the file.
+ @param destinationFileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @deprecated This property is deprecated starting in version 0.8.0.
+ @note Please use `initWithURL:clientFormat:fileType:` initializer instead.
+ @return The newly created EZRecorder instance.
+ */
+- (instancetype)initWithDestinationURL:(NSURL*)url
+ sourceFormat:(AudioStreamBasicDescription)sourceFormat
+ destinationFileType:(EZRecorderFileType)destinationFileType __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Class Initializers
+///-----------------------------------------------------------
+
+/**
+ Class method to create an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), and a file type (see `EZRecorderFileType`) that will automatically create an internal `fileFormat` and audio file type hint.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @return A newly created EZRecorder instance.
+ */
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to create an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), and a file type (see `EZRecorderFileType`) that will automatically create an internal `fileFormat` and audio file type hint, as well as a delegate to respond to the recorder's write and close events.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @param delegate An EZRecorderDelegate to listen for the recorder's write and close events.
+ @return A newly created EZRecorder instance.
+ */
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to create an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), a file format describing the destination format on disk (see `fileFormat` for more info), and an audio file type (an AudioFileTypeID for Core Audio, not a EZRecorderFileType).
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileFormat An AudioStreamBasicDescription describing the format of the audio being written to disk (MP3, AAC, WAV, etc)
+ @param audioFileTypeID An AudioFileTypeID that matches your fileFormat (i.e. kAudioFileM4AType for an M4A format)
+ @return A newly created EZRecorder instance.
+ */
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to create an instance of the EZRecorder with a file path URL to write out the file to, a client format describing the in-application common format (see `clientFormat` for more info), a file format describing the destination format on disk (see `fileFormat` for more info), an audio file type (an AudioFileTypeID for Core Audio, not a EZRecorderFileType), and delegate to respond to the recorder's write and close events.
+ @param url An NSURL representing the file path the output file should be written
+ @param clientFormat An AudioStreamBasicDescription describing the in-applciation common format (always linear PCM)
+ @param fileFormat An AudioStreamBasicDescription describing the format of the audio being written to disk (MP3, AAC, WAV, etc)
+ @param audioFileTypeID An AudioFileTypeID that matches your fileFormat (i.e. kAudioFileM4AType for an M4A format)
+ @param delegate An EZRecorderDelegate to listen for the recorder's write and close events.
+ @return A newly created EZRecorder instance.
+ */
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+ delegate:(id)delegate;
+
+//------------------------------------------------------------------------------
+
+/**
+ Class method to create a new instance of an EZRecorder using a destination file path URL and the source format of the incoming audio.
+ @param url An NSURL specifying the file path location of where the audio file should be written to.
+ @param sourceFormat The AudioStreamBasicDescription for the incoming audio that will be written to the file (also called the `clientFormat`).
+ @param destinationFileType A constant described by the EZRecorderFileType that corresponds to the type of destination file that should be written. For instance, an AAC file written using an '.m4a' extension would correspond to EZRecorderFileTypeM4A. See EZRecorderFileType for all the constants and mapping combinations.
+ @return The newly created EZRecorder instance.
+ */
++ (instancetype)recorderWithDestinationURL:(NSURL*)url
+ sourceFormat:(AudioStreamBasicDescription)sourceFormat
+ destinationFileType:(EZRecorderFileType)destinationFileType __attribute__((deprecated));
+
+//------------------------------------------------------------------------------
+#pragma mark - Properties
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Getting The Recorder's Properties
+///-----------------------------------------------------------
+
+/**
+ Provides the common AudioStreamBasicDescription that will be used for in-app interaction. The recorder's format will be converted from this format to the `fileFormat`. For instance, the file on disk could be a 22.5 kHz, float format, but we might have an audio processing graph that has a 44.1 kHz, signed integer format that we'd like to interact with. The client format lets us set that 44.1 kHz format on the recorder to properly write samples from the graph out to the file in the desired destination format.
+ @warning This must be a linear PCM format!
+ @return An AudioStreamBasicDescription structure describing the format of the audio file.
+ */
+@property (readwrite) AudioStreamBasicDescription clientFormat;
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current write offset in the audio file as an NSTimeInterval (i.e. in seconds). When setting this it will determine the correct frame offset and perform a `seekToFrame` to the new time offset.
+ @warning Make sure the new current time offset is less than the `duration` or you will receive an invalid seek assertion.
+ */
+@property (readonly) NSTimeInterval currentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration of the audio file in seconds.
+ */
+@property (readonly) NSTimeInterval duration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the AudioStreamBasicDescription structure containing the format of the recorder's audio file.
+ @return An AudioStreamBasicDescription structure describing the format of the audio file.
+ */
+@property (readonly) AudioStreamBasicDescription fileFormat;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the current time as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedCurrentTime;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the duration as an NSString with the time format MM:SS.
+ */
+@property (readonly) NSString *formattedDuration;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the frame index (a.k.a the write positon) within the audio file as SInt64. This can be helpful when seeking through the audio file.
+ @return The current frame index within the audio file as a SInt64.
+ */
+@property (readonly) SInt64 frameIndex;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the total frame count of the recorder's audio file in the file format.
+ @return The total number of frames in the recorder in the AudioStreamBasicDescription representing the file format as a SInt64.
+ */
+@property (readonly) SInt64 totalFrames;
+
+//------------------------------------------------------------------------------
+
+/**
+ Provides the file path that's currently being used by the recorder.
+ @return The NSURL representing the file path of the recorder path being used for recording.
+ */
+- (NSURL *)url;
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Appending Data To The Recorder
+///-----------------------------------------------------------
+
+/**
+ Appends audio data to the tail of the output file from an AudioBufferList.
+ @param bufferList The AudioBufferList holding the audio data to append
+ @param bufferSize The size of each of the buffers in the buffer list.
+ */
+- (void)appendDataFromBufferList:(AudioBufferList *)bufferList
+ withBufferSize:(UInt32)bufferSize;
+
+//------------------------------------------------------------------------------
+
+///-----------------------------------------------------------
+/// @name Closing The Recorder
+///-----------------------------------------------------------
+
+/**
+ Finishes writes to the recorder's audio file and closes it.
+ */
+- (void)closeAudioFile;
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/EZRecorder.m b/Pods/EZAudio/EZAudio/EZRecorder.m
new file mode 100644
index 0000000..0b61b98
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/EZRecorder.m
@@ -0,0 +1,456 @@
+//
+// EZRecorder.m
+// EZAudio
+//
+// Created by Syed Haris Ali on 12/1/13.
+// Copyright (c) 2015 Syed Haris Ali. All rights reserved.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and associated documentation files (the "Software"), to deal
+// in the Software without restriction, including without limitation the rights
+// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+// copies of the Software, and to permit persons to whom the Software is
+// furnished to do so, subject to the following conditions:
+//
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+// THE SOFTWARE.
+
+#import "EZRecorder.h"
+#import "EZAudioUtilities.h"
+
+//------------------------------------------------------------------------------
+#pragma mark - Data Structures
+//------------------------------------------------------------------------------
+
+typedef struct
+{
+ AudioFileTypeID audioFileTypeID;
+ ExtAudioFileRef extAudioFileRef;
+ AudioStreamBasicDescription clientFormat;
+ BOOL closed;
+ CFURLRef fileURL;
+ AudioStreamBasicDescription fileFormat;
+} EZRecorderInfo;
+
+//------------------------------------------------------------------------------
+#pragma mark - EZRecorder (Interface Extension)
+//------------------------------------------------------------------------------
+
+@interface EZRecorder ()
+@property (nonatomic, assign) EZRecorderInfo *info;
+@end
+
+//------------------------------------------------------------------------------
+#pragma mark - EZRecorder (Implementation)
+//------------------------------------------------------------------------------
+
+@implementation EZRecorder
+
+//------------------------------------------------------------------------------
+#pragma mark - Dealloc
+//------------------------------------------------------------------------------
+
+- (void)dealloc
+{
+ if (!self.info->closed)
+ {
+ [self closeAudioFile];
+ }
+ free(self.info);
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Initializers
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+{
+ return [self initWithURL:url
+ clientFormat:clientFormat
+ fileType:fileType
+ delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+ delegate:(id)delegate
+{
+ AudioStreamBasicDescription fileFormat = [EZRecorder formatForFileType:fileType
+ withSourceFormat:clientFormat];
+ AudioFileTypeID audioFileTypeID = [EZRecorder fileTypeIdForFileType:fileType
+ withSourceFormat:clientFormat];
+ return [self initWithURL:url
+ clientFormat:clientFormat
+ fileFormat:fileFormat
+ audioFileTypeID:audioFileTypeID
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+{
+ return [self initWithURL:url
+ clientFormat:clientFormat
+ fileFormat:fileFormat
+ audioFileTypeID:audioFileTypeID
+ delegate:nil];
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+ delegate:(id)delegate
+{
+
+ self = [super init];
+ if (self)
+ {
+ // Set defaults
+ self.info = (EZRecorderInfo *)calloc(1, sizeof(EZRecorderInfo));
+ self.info->audioFileTypeID = audioFileTypeID;
+ self.info->fileURL = (__bridge CFURLRef)url;
+ self.info->clientFormat = clientFormat;
+ self.info->fileFormat = fileFormat;
+ self.delegate = delegate;
+ [self setup];
+ }
+ return self;
+}
+
+//------------------------------------------------------------------------------
+
+- (instancetype)initWithDestinationURL:(NSURL*)url
+ sourceFormat:(AudioStreamBasicDescription)sourceFormat
+ destinationFileType:(EZRecorderFileType)destinationFileType
+{
+ return [self initWithURL:url
+ clientFormat:sourceFormat
+ fileType:destinationFileType];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Initializers
+//------------------------------------------------------------------------------
+
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+{
+ return [[self alloc] initWithURL:url
+ clientFormat:clientFormat
+ fileType:fileType];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithURL:url
+ clientFormat:clientFormat
+ fileType:fileType
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+{
+ return [[self alloc] initWithURL:url
+ clientFormat:clientFormat
+ fileFormat:fileFormat
+ audioFileTypeID:audioFileTypeID];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID
+ delegate:(id)delegate
+{
+ return [[self alloc] initWithURL:url
+ clientFormat:clientFormat
+ fileFormat:fileFormat
+ audioFileTypeID:audioFileTypeID
+ delegate:delegate];
+}
+
+//------------------------------------------------------------------------------
+
++ (instancetype)recorderWithDestinationURL:(NSURL*)url
+ sourceFormat:(AudioStreamBasicDescription)sourceFormat
+ destinationFileType:(EZRecorderFileType)destinationFileType
+{
+ return [[EZRecorder alloc] initWithDestinationURL:url
+ sourceFormat:sourceFormat
+ destinationFileType:destinationFileType];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Class Methods
+//------------------------------------------------------------------------------
+
++ (AudioStreamBasicDescription)formatForFileType:(EZRecorderFileType)fileType
+ withSourceFormat:(AudioStreamBasicDescription)sourceFormat
+{
+ AudioStreamBasicDescription asbd;
+ switch (fileType)
+ {
+ case EZRecorderFileTypeAIFF:
+ asbd = [EZAudioUtilities AIFFFormatWithNumberOfChannels:sourceFormat.mChannelsPerFrame
+ sampleRate:sourceFormat.mSampleRate];
+ break;
+ case EZRecorderFileTypeM4A:
+ asbd = [EZAudioUtilities M4AFormatWithNumberOfChannels:sourceFormat.mChannelsPerFrame
+ sampleRate:sourceFormat.mSampleRate];
+ break;
+
+ case EZRecorderFileTypeWAV:
+ asbd = [EZAudioUtilities stereoFloatInterleavedFormatWithSampleRate:sourceFormat.mSampleRate];
+ break;
+
+ default:
+ asbd = [EZAudioUtilities stereoCanonicalNonInterleavedFormatWithSampleRate:sourceFormat.mSampleRate];
+ break;
+ }
+ return asbd;
+}
+
+//------------------------------------------------------------------------------
+
++ (AudioFileTypeID)fileTypeIdForFileType:(EZRecorderFileType)fileType
+ withSourceFormat:(AudioStreamBasicDescription)sourceFormat
+{
+ AudioFileTypeID audioFileTypeID;
+ switch (fileType)
+ {
+ case EZRecorderFileTypeAIFF:
+ audioFileTypeID = kAudioFileAIFFType;
+ break;
+
+ case EZRecorderFileTypeM4A:
+ audioFileTypeID = kAudioFileM4AType;
+ break;
+
+ case EZRecorderFileTypeWAV:
+ audioFileTypeID = kAudioFileWAVEType;
+ break;
+
+ default:
+ audioFileTypeID = kAudioFileWAVEType;
+ break;
+ }
+ return audioFileTypeID;
+}
+
+//------------------------------------------------------------------------------
+
+- (void)setup
+{
+ // Finish filling out the destination format description
+ UInt32 propSize = sizeof(self.info->fileFormat);
+ [EZAudioUtilities checkResult:AudioFormatGetProperty(kAudioFormatProperty_FormatInfo,
+ 0,
+ NULL,
+ &propSize,
+ &self.info->fileFormat)
+ operation:"Failed to fill out rest of destination format"];
+
+ //
+ // Create the audio file
+ //
+ [EZAudioUtilities checkResult:ExtAudioFileCreateWithURL(self.info->fileURL,
+ self.info->audioFileTypeID,
+ &self.info->fileFormat,
+ NULL,
+ kAudioFileFlags_EraseFile,
+ &self.info->extAudioFileRef)
+ operation:"Failed to create audio file"];
+
+ //
+ // Set the client format
+ //
+ [self setClientFormat:self.info->clientFormat];
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Events
+//------------------------------------------------------------------------------
+
+- (void)appendDataFromBufferList:(AudioBufferList *)bufferList
+ withBufferSize:(UInt32)bufferSize
+{
+ //
+ // Make sure the audio file is not closed
+ //
+ NSAssert(!self.info->closed, @"Cannot append data when EZRecorder has been closed. You must create a new instance.;");
+
+ //
+ // Perform the write
+ //
+ [EZAudioUtilities checkResult:ExtAudioFileWrite(self.info->extAudioFileRef,
+ bufferSize,
+ bufferList)
+ operation:"Failed to write audio data to recorded audio file"];
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(recorderUpdatedCurrentTime:)])
+ {
+ [self.delegate recorderUpdatedCurrentTime:self];
+ }
+}
+
+//------------------------------------------------------------------------------
+
+- (void)closeAudioFile
+{
+ if (!self.info->closed)
+ {
+ //
+ // Close, audio file can no longer be written to
+ //
+ [EZAudioUtilities checkResult:ExtAudioFileDispose(self.info->extAudioFileRef)
+ operation:"Failed to close audio file"];
+ self.info->closed = YES;
+
+ //
+ // Notify delegate
+ //
+ if ([self.delegate respondsToSelector:@selector(recorderDidClose:)])
+ {
+ [self.delegate recorderDidClose:self];
+ }
+ }
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Getters
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)clientFormat
+{
+ return self.info->clientFormat;
+}
+
+//-----------------------------------------------------------------------------
+
+- (NSTimeInterval)currentTime
+{
+ NSTimeInterval currentTime = 0.0;
+ NSTimeInterval duration = [self duration];
+ if (duration != 0.0)
+ {
+ currentTime = (NSTimeInterval)[EZAudioUtilities MAP:(float)[self frameIndex]
+ leftMin:0.0f
+ leftMax:(float)[self totalFrames]
+ rightMin:0.0f
+ rightMax:duration];
+ }
+ return currentTime;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSTimeInterval)duration
+{
+ NSTimeInterval frames = (NSTimeInterval)[self totalFrames];
+ return (NSTimeInterval) frames / self.info->fileFormat.mSampleRate;
+}
+
+//------------------------------------------------------------------------------
+
+- (AudioStreamBasicDescription)fileFormat
+{
+ return self.info->fileFormat;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedCurrentTime
+{
+ return [EZAudioUtilities displayTimeStringFromSeconds:[self currentTime]];
+}
+
+//------------------------------------------------------------------------------
+
+- (NSString *)formattedDuration
+{
+ return [EZAudioUtilities displayTimeStringFromSeconds:[self duration]];
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)frameIndex
+{
+ SInt64 frameIndex;
+ [EZAudioUtilities checkResult:ExtAudioFileTell(self.info->extAudioFileRef,
+ &frameIndex)
+ operation:"Failed to get frame index"];
+ return frameIndex;
+}
+
+//------------------------------------------------------------------------------
+
+- (SInt64)totalFrames
+{
+ SInt64 totalFrames;
+ UInt32 propSize = sizeof(SInt64);
+ [EZAudioUtilities checkResult:ExtAudioFileGetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_FileLengthFrames,
+ &propSize,
+ &totalFrames)
+ operation:"Recorder failed to get total frames."];
+ return totalFrames;
+}
+
+//------------------------------------------------------------------------------
+
+- (NSURL *)url
+{
+ return (__bridge NSURL*)self.info->fileURL;
+}
+
+//------------------------------------------------------------------------------
+#pragma mark - Setters
+//------------------------------------------------------------------------------
+
+- (void)setClientFormat:(AudioStreamBasicDescription)clientFormat
+{
+ [EZAudioUtilities checkResult:ExtAudioFileSetProperty(self.info->extAudioFileRef,
+ kExtAudioFileProperty_ClientDataFormat,
+ sizeof(clientFormat),
+ &clientFormat)
+ operation:"Failed to set client format on recorded audio file"];
+ self.info->clientFormat = clientFormat;
+}
+
+@end
\ No newline at end of file
diff --git a/Pods/EZAudio/EZAudio/TPCircularBuffer.c b/Pods/EZAudio/EZAudio/TPCircularBuffer.c
new file mode 100644
index 0000000..2493158
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/TPCircularBuffer.c
@@ -0,0 +1,136 @@
+//
+// TPCircularBuffer.c
+// Circular/Ring buffer implementation
+//
+// https://github.com/michaeltyson/TPCircularBuffer
+//
+// Created by Michael Tyson on 10/12/2011.
+//
+// Copyright (C) 2012-2013 A Tasty Pixel
+//
+// This software is provided 'as-is', without any express or implied
+// warranty. In no event will the authors be held liable for any damages
+// arising from the use of this software.
+//
+// Permission is granted to anyone to use this software for any purpose,
+// including commercial applications, and to alter it and redistribute it
+// freely, subject to the following restrictions:
+//
+// 1. The origin of this software must not be misrepresented; you must not
+// claim that you wrote the original software. If you use this software
+// in a product, an acknowledgment in the product documentation would be
+// appreciated but is not required.
+//
+// 2. Altered source versions must be plainly marked as such, and must not be
+// misrepresented as being the original software.
+//
+// 3. This notice may not be removed or altered from any source distribution.
+//
+
+#include "TPCircularBuffer.h"
+#include
+#include
+
+#define reportResult(result,operation) (_reportResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__))
+static inline bool _reportResult(kern_return_t result, const char *operation, const char* file, int line) {
+ if (result != ERR_SUCCESS) {
+ printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result));
+ return false;
+ }
+ return true;
+}
+
+bool TPCircularBufferInit(TPCircularBuffer *buffer, int length) {
+
+ // Keep trying until we get our buffer, needed to handle race conditions
+ int retries = 3;
+ while ( true) {
+
+ buffer->length = (int32_t)round_page(length); // We need whole page sizes
+
+ // Temporarily allocate twice the length, so we have the contiguous address space to
+ // support a second instance of the buffer directly after
+ vm_address_t bufferAddress;
+ kern_return_t result = vm_allocate(mach_task_self(),
+ &bufferAddress,
+ buffer->length * 2,
+ VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit
+ if (result != ERR_SUCCESS) {
+ if (retries-- == 0) {
+ reportResult(result, "Buffer allocation");
+ return false;
+ }
+ // Try again if we fail
+ continue;
+ }
+
+ // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half...
+ result = vm_deallocate(mach_task_self(),
+ bufferAddress + buffer->length,
+ buffer->length);
+ if (result != ERR_SUCCESS) {
+ if (retries-- == 0) {
+ reportResult(result, "Buffer deallocation");
+ return false;
+ }
+ // If this fails somehow, deallocate the whole region and try again
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ // Re-map the buffer to the address space immediately after the buffer
+ vm_address_t virtualAddress = bufferAddress + buffer->length;
+ vm_prot_t cur_prot, max_prot;
+ result = vm_remap(mach_task_self(),
+ &virtualAddress, // mirror target
+ buffer->length, // size of mirror
+ 0, // auto alignment
+ 0, // force remapping to virtualAddress
+ mach_task_self(), // same task
+ bufferAddress, // mirror source
+ 0, // MAP READ-WRITE, NOT COPY
+ &cur_prot, // unused protection struct
+ &max_prot, // unused protection struct
+ VM_INHERIT_DEFAULT);
+ if (result != ERR_SUCCESS) {
+ if (retries-- == 0) {
+ reportResult(result, "Remap buffer memory");
+ return false;
+ }
+ // If this remap failed, we hit a race condition, so deallocate and try again
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ if (virtualAddress != bufferAddress+buffer->length) {
+ // If the memory is not contiguous, clean up both allocated buffers and try again
+ if (retries-- == 0) {
+ printf("Couldn't map buffer memory to end of buffer\n");
+ return false;
+ }
+
+ vm_deallocate(mach_task_self(), virtualAddress, buffer->length);
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ buffer->buffer = (void*)bufferAddress;
+ buffer->fillCount = 0;
+ buffer->head = buffer->tail = 0;
+
+ return true;
+ }
+ return false;
+}
+
+void TPCircularBufferCleanup(TPCircularBuffer *buffer) {
+ vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2);
+ memset(buffer, 0, sizeof(TPCircularBuffer));
+}
+
+void TPCircularBufferClear(TPCircularBuffer *buffer) {
+ int32_t fillCount;
+ if (TPCircularBufferTail(buffer, &fillCount)) {
+ TPCircularBufferConsume(buffer, fillCount);
+ }
+}
diff --git a/Pods/EZAudio/EZAudio/TPCircularBuffer.h b/Pods/EZAudio/EZAudio/TPCircularBuffer.h
new file mode 100644
index 0000000..d2cf019
--- /dev/null
+++ b/Pods/EZAudio/EZAudio/TPCircularBuffer.h
@@ -0,0 +1,195 @@
+//
+// TPCircularBuffer.h
+// Circular/Ring buffer implementation
+//
+// https://github.com/michaeltyson/TPCircularBuffer
+//
+// Created by Michael Tyson on 10/12/2011.
+//
+//
+// This implementation makes use of a virtual memory mapping technique that inserts a virtual copy
+// of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around
+// logic. Clients can simply use the returned memory address as if it were contiguous space.
+//
+// The implementation is thread-safe in the case of a single producer and single consumer.
+//
+// Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and
+// adapted to Darwin by Kurt Revis (http://www.snoize.com,
+// http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz)
+//
+//
+// Copyright (C) 2012-2013 A Tasty Pixel
+//
+// This software is provided 'as-is', without any express or implied
+// warranty. In no event will the authors be held liable for any damages
+// arising from the use of this software.
+//
+// Permission is granted to anyone to use this software for any purpose,
+// including commercial applications, and to alter it and redistribute it
+// freely, subject to the following restrictions:
+//
+// 1. The origin of this software must not be misrepresented; you must not
+// claim that you wrote the original software. If you use this software
+// in a product, an acknowledgment in the product documentation would be
+// appreciated but is not required.
+//
+// 2. Altered source versions must be plainly marked as such, and must not be
+// misrepresented as being the original software.
+//
+// 3. This notice may not be removed or altered from any source distribution.
+//
+
+#ifndef TPCircularBuffer_h
+#define TPCircularBuffer_h
+
+#include
+#include
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+ void *buffer;
+ int32_t length;
+ int32_t tail;
+ int32_t head;
+ volatile int32_t fillCount;
+} TPCircularBuffer;
+
+/*!
+ * Initialise buffer
+ *
+ * Note that the length is advisory only: Because of the way the
+ * memory mirroring technique works, the true buffer length will
+ * be multiples of the device page size (e.g. 4096 bytes)
+ *
+ * @param buffer Circular buffer
+ * @param length Length of buffer
+ */
+bool TPCircularBufferInit(TPCircularBuffer *buffer, int32_t length);
+
+/*!
+ * Cleanup buffer
+ *
+ * Releases buffer resources.
+ */
+void TPCircularBufferCleanup(TPCircularBuffer *buffer);
+
+/*!
+ * Clear buffer
+ *
+ * Resets buffer to original, empty state.
+ *
+ * This is safe for use by consumer while producer is accessing
+ * buffer.
+ */
+void TPCircularBufferClear(TPCircularBuffer *buffer);
+
+// Reading (consuming)
+
+/*!
+ * Access end of buffer
+ *
+ * This gives you a pointer to the end of the buffer, ready
+ * for reading, and the number of available bytes to read.
+ *
+ * @param buffer Circular buffer
+ * @param availableBytes On output, the number of bytes ready for reading
+ * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty
+ */
+static __inline__ __attribute__((always_inline)) void* TPCircularBufferTail(TPCircularBuffer *buffer, int32_t* availableBytes) {
+ *availableBytes = buffer->fillCount;
+ if (*availableBytes == 0) return NULL;
+ return (void*)((char*)buffer->buffer + buffer->tail);
+}
+
+/*!
+ * Consume bytes in buffer
+ *
+ * This frees up the just-read bytes, ready for writing again.
+ *
+ * @param buffer Circular buffer
+ * @param amount Number of bytes to consume
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferConsume(TPCircularBuffer *buffer, int32_t amount) {
+ buffer->tail = (buffer->tail + amount) % buffer->length;
+ OSAtomicAdd32Barrier(-amount, &buffer->fillCount);
+ assert(buffer->fillCount >= 0);
+}
+
+/*!
+ * Version of TPCircularBufferConsume without the memory barrier, for more optimal use in single-threaded contexts
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferConsumeNoBarrier(TPCircularBuffer *buffer, int32_t amount) {
+ buffer->tail = (buffer->tail + amount) % buffer->length;
+ buffer->fillCount -= amount;
+ assert(buffer->fillCount >= 0);
+}
+
+/*!
+ * Access front of buffer
+ *
+ * This gives you a pointer to the front of the buffer, ready
+ * for writing, and the number of available bytes to write.
+ *
+ * @param buffer Circular buffer
+ * @param availableBytes On output, the number of bytes ready for writing
+ * @return Pointer to the first bytes ready for writing, or NULL if buffer is full
+ */
+static __inline__ __attribute__((always_inline)) void* TPCircularBufferHead(TPCircularBuffer *buffer, int32_t* availableBytes) {
+ *availableBytes = (buffer->length - buffer->fillCount);
+ if (*availableBytes == 0) return NULL;
+ return (void*)((char*)buffer->buffer + buffer->head);
+}
+
+// Writing (producing)
+
+/*!
+ * Produce bytes in buffer
+ *
+ * This marks the given section of the buffer ready for reading.
+ *
+ * @param buffer Circular buffer
+ * @param amount Number of bytes to produce
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferProduce(TPCircularBuffer *buffer, int amount) {
+ buffer->head = (buffer->head + amount) % buffer->length;
+ OSAtomicAdd32Barrier(amount, &buffer->fillCount);
+ assert(buffer->fillCount <= buffer->length);
+}
+
+/*!
+ * Version of TPCircularBufferProduce without the memory barrier, for more optimal use in single-threaded contexts
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferProduceNoBarrier(TPCircularBuffer *buffer, int amount) {
+ buffer->head = (buffer->head + amount) % buffer->length;
+ buffer->fillCount += amount;
+ assert(buffer->fillCount <= buffer->length);
+}
+
+/*!
+ * Helper routine to copy bytes to buffer
+ *
+ * This copies the given bytes to the buffer, and marks them ready for writing.
+ *
+ * @param buffer Circular buffer
+ * @param src Source buffer
+ * @param len Number of bytes in source buffer
+ * @return true if bytes copied, false if there was insufficient space
+ */
+static __inline__ __attribute__((always_inline)) bool TPCircularBufferProduceBytes(TPCircularBuffer *buffer, const void* src, int32_t len) {
+ int32_t space;
+ void *ptr = TPCircularBufferHead(buffer, &space);
+ if (space < len) return false;
+ memcpy(ptr, src, len);
+ TPCircularBufferProduce(buffer, len);
+ return true;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/Pods/EZAudio/LICENSE b/Pods/EZAudio/LICENSE
new file mode 100644
index 0000000..34eac18
--- /dev/null
+++ b/Pods/EZAudio/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+EZAudio
+Copyright (c) 2013 Syed Haris Ali
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/Pods/EZAudio/README.md b/Pods/EZAudio/README.md
new file mode 100644
index 0000000..d79cff9
--- /dev/null
+++ b/Pods/EZAudio/README.md
@@ -0,0 +1,1114 @@
+![alt text](https://s3-us-west-1.amazonaws.com/ezaudio-media/EZAudioJumbo-Alt.png "EZAudioLogo")
+
+#EZAudio
+A simple, intuitive audio framework for iOS and OSX.
+
+## Welcome to 1.0.0!
+Thank you guys for being so patient over the last year - I've been working like crazy the last few weeks rewriting and extending the EZAudio core and interface components and squashing bugs. Finally, EZAudio is now at its 1.0.0 release with all new updated components, examples, and documentation. Happy coding!
+
+## Apps Using EZAudio
+I'd really like to start creating a list of projects made using EZAudio. If you've used EZAudio to make something cool, whether it's an app or open source visualization or whatever, please email me at syedhali07[at]gmail.com and I'll add it to our wall of fame!
+To start it off:
+- [Detour](https://www.detour.com/) - Gorgeous location-aware audio walks
+
+##Features
+
+**Awesome Components**
+
+I've designed six audio components and two interface components to allow you to immediately get your hands dirty recording, playing, and visualizing audio data. These components simply plug into each other and build on top of the high-performance, low-latency AudioUnits API and give you an easy to use API written in Objective-C instead of pure C.
+
+[EZAudioDevice](#EZAudioDevice)
+
+A useful class for getting all the current and available inputs/output on any Apple device. The `EZMicrophone` and `EZOutput` use this to direct sound in/out from different hardware components.
+
+[EZMicrophone](#EZMicrophone)
+
+A microphone class that provides its delegate audio data from the default device microphone with one line of code.
+
+[EZOutput](#EZOutput)
+
+An output class that will playback any audio it is provided by its datasource.
+
+[EZAudioFile](#EZAudioFile)
+
+An audio file class that reads/seeks through audio files and provides useful delegate callbacks.
+
+[EZAudioPlayer](#EZAudioPlayer)
+
+A replacement for `AVAudioPlayer` that combines an `EZAudioFile` and a `EZOutput` to perform robust playback of any file on any piece of hardware.
+
+[EZRecorder](#EZRecorder)
+
+A recorder class that provides a quick and easy way to write audio files from any datasource.
+
+[EZAudioPlot](#EZAudioPlot)
+
+A Core Graphics-based audio waveform plot capable of visualizing any float array as a buffer or rolling plot.
+
+[EZAudioPlotGL](#EZAudioPlotGL)
+
+An OpenGL-based, GPU-accelerated audio waveform plot capable of visualizing any float array as a buffer or rolling plot.
+
+**Cross Platform**
+
+`EZAudio` was designed to work transparently across all iOS and OSX devices. This means one universal API whether you're building for Mac or iOS. For instance, under the hood an `EZAudioPlot` knows that it will subclass a UIView for iOS or an NSView for OSX and the `EZMicrophone` knows to build on top of the RemoteIO AudioUnit for iOS, but defaults to the system defaults for input and output for OSX.
+
+##Examples & Docs
+
+Within this repo you'll find the examples for iOS and OSX to get you up to speed using each component and plugging them into each other. With just a few lines of code you'll be recording from the microphone, generating audio waveforms, and playing audio files like a boss. See the full Getting Started guide for an interactive look into each of components.
+
+### Example Projects
+
+**_EZAudioCoreGraphicsWaveformExample_**
+
+![CoreGraphicsWaveformExampleGif](https://cloud.githubusercontent.com/assets/1275640/8516226/1eb885ec-2366-11e5-8d76-3a4b4d982eb0.gif)
+
+Shows how to use the `EZMicrophone` and `EZAudioPlot` to visualize the audio data from the microphone in real-time. The waveform can be displayed as a buffer or a rolling waveform plot (traditional waveform look).
+
+**_EZAudioOpenGLWaveformExample_**
+
+![OpenGLWaveformExampleGif](https://cloud.githubusercontent.com/assets/1275640/8516234/499f6fd2-2366-11e5-9771-7d0afae59391.gif)
+
+Shows how to use the `EZMicrophone` and `EZAudioPlotGL` to visualize the audio data from the microphone in real-time. The drawing is using OpenGL so the performance much better for plots needing a lot of points.
+
+**_EZAudioPlayFileExample_**
+
+![PlayFileExample](https://cloud.githubusercontent.com/assets/1275640/8516245/711ca232-2366-11e5-8d20-2538164f3307.gif)
+
+Shows how to use the `EZAudioPlayer` and `EZAudioPlotGL` to playback, pause, and seek through an audio file while displaying its waveform as a buffer or a rolling waveform plot.
+
+**_EZAudioRecordWaveformExample_**
+
+![RecordWaveformExample](https://cloud.githubusercontent.com/assets/1275640/8516310/86da80f2-2367-11e5-84aa-aea25a439a76.gif)
+
+Shows how to use the `EZMicrophone`, `EZRecorder`, and `EZAudioPlotGL` to record the audio from the microphone input to a file while displaying the audio waveform of the incoming data. You can then playback the newly recorded audio file using AVFoundation and keep adding more audio data to the tail of the file.
+
+**_EZAudioWaveformFromFileExample_**
+
+![WaveformExample](https://cloud.githubusercontent.com/assets/1275640/8516597/f27240ea-236a-11e5-8ecd-68cf05b7ce40.gif)
+
+Shows how to use the `EZAudioFile` and `EZAudioPlot` to animate in an audio waveform for an entire audio file.
+
+**_EZAudioPassThroughExample_**
+
+![PassthroughExample](https://cloud.githubusercontent.com/assets/1275640/8516692/7abfbe36-236c-11e5-9d69-4f82956177b3.gif)
+
+Shows how to use the `EZMicrophone`, `EZOutput`, and the `EZAudioPlotGL` to pass the microphone input to the output for playback while displaying the audio waveform (as a buffer or rolling plot) in real-time.
+
+**_EZAudioFFTExample_**
+
+![FFTExample](https://cloud.githubusercontent.com/assets/1275640/8662077/5621705a-2971-11e5-88ed-9a865e422ade.gif)
+
+Shows how to calculate the real-time FFT of the audio data coming from the `EZMicrophone` and the Accelerate framework. The audio data is plotted using two `EZAudioPlots` for the time and frequency displays.
+
+### Documentation
+The official documentation for EZAudio can be found here: http://cocoadocs.org/docsets/EZAudio/1.1.2/
+
You can also generate the docset yourself using appledocs by running the appledocs on the EZAudio source folder.
+
+##Getting Started
+To begin using `EZAudio` you must first make sure you have the proper build requirements and frameworks. Below you'll find explanations of each component and code snippets to show how to use each to perform common tasks like getting microphone data, updating audio waveform plots, reading/seeking through audio files, and performing playback.
+
+###Build Requirements
+**iOS**
+- 6.0+
+
+**OSX**
+- 10.8+
+
+###Frameworks
+**iOS**
+- Accelerate
+- AudioToolbox
+- AVFoundation
+- GLKit
+
+**OSX**
+- Accelerate
+- AudioToolbox
+- AudioUnit
+- CoreAudio
+- QuartzCore
+- OpenGL
+- GLKit
+
+###Adding To Project
+You can add EZAudio to your project in a few ways:
1.) The easiest way to use EZAudio is via Cocoapods. Simply add EZAudio to your Podfile like so:
+
+`
+pod 'EZAudio', '~> 1.1.2'
+`
+
+####Using EZAudio & The Amazing Audio Engine
+If you're also using the Amazing Audio Engine then use the `EZAudio/Core` subspec like so:
+
+`
+pod 'EZAudio/Core', '~> 1.1.2'
+`
+
+2.) Alternatively, you could clone or fork this repo and just drag and drop the source into your project.
+
+##Core Components
+
+`EZAudio` currently offers six audio components that encompass a wide range of functionality. In addition to the functional aspects of these components such as pulling audio data, reading/writing from files, and performing playback they also take special care to hook into the interface components to allow developers to display visual feedback (see the [Interface Components](#InterfaceComponents) below).
+
+###EZAudioDevice
+Provides a simple interface for obtaining the current and all available inputs and output for any Apple device. For instance, the iPhone 6 has three microphones available for input, while on OSX you can choose the Built-In Microphone or any available HAL device on your system. Similarly, for iOS you can choose from a pair of headphones connected or speaker, while on OSX you can choose from the Built-In Output, any available HAL device, or Airplay.
+
+![EZAudioDeviceInputsExample](https://cloud.githubusercontent.com/assets/1275640/8535722/51e8f702-23fd-11e5-9f1c-8c45e80d19ef.gif)
+
+####Getting Input Devices
+To get all the available input devices use the `inputDevices` class method:
+```objectivec
+NSArray *inputDevices = [EZAudioDevice inputDevices];
+```
+
+or to just get the currently selected input device use the `currentInputDevice` method:
+```objectivec
+// On iOS this will default to the headset device or bottom microphone, while on OSX this will
+// be your selected inpupt device from the Sound preferences
+EZAudioDevice *currentInputDevice = [EZAudioDevice currentInputDevice];
+```
+
+####Getting Output Devices
+Similarly, to get all the available output devices use the `outputDevices` class method:
+```objectivec
+NSArray *outputDevices = [EZAudioDevice outputDevices];
+```
+
+or to just get the currently selected output device use the `currentInputDevice` method:
+```objectivec
+// On iOS this will default to the headset speaker, while on OSX this will be your selected
+// output device from the Sound preferences
+EZAudioDevice *currentOutputDevice = [EZAudioDevice currentOutputDevice];
+```
+
+###EZMicrophone
+Provides access to the default device microphone in one line of code and provides delegate callbacks to receive the audio data as an AudioBufferList and float arrays.
+
+**_Relevant Example Projects_**
+- EZAudioCoreGraphicsWaveformExample (iOS)
+- EZAudioCoreGraphicsWaveformExample (OSX)
+- EZAudioOpenGLWaveformExample (iOS)
+- EZAudioOpenGLWaveformExample (OSX)
+- EZAudioRecordExample (iOS)
+- EZAudioRecordExample (OSX)
+- EZAudioPassThroughExample (iOS)
+- EZAudioPassThroughExample (OSX)
+- EZAudioFFTExample (iOS)
+- EZAudioFFTExample (OSX)
+
+####Creating A Microphone
+
+Create an `EZMicrophone` instance by declaring a property and initializing it like so:
+
+```objectivec
+// Declare the EZMicrophone as a strong property
+@property (nonatomic, strong) EZMicrophone *microphone;
+
+...
+
+// Initialize the microphone instance and assign it a delegate to receive the audio data
+// callbacks
+self.microphone = [EZMicrophone microphoneWithDelegate:self];
+```
+Alternatively, you could also use the shared `EZMicrophone` instance and just assign its `EZMicrophoneDelegate`.
+```objectivec
+// Assign a delegate to the shared instance of the microphone to receive the audio data
+// callbacks
+[EZMicrophone sharedMicrophone].delegate = self;
+```
+
+####Setting The Device
+The `EZMicrophone` uses an `EZAudioDevice` instance to select what specific hardware destination it will use to pull audio data. You'd use this if you wanted to change the input device like in the EZAudioCoreGraphicsWaveformExample for [iOS](https://github.com/syedhali/EZAudio/tree/master/EZAudioExamples/iOS/EZAudioCoreGraphicsWaveformExample) or [OSX](https://github.com/syedhali/EZAudio/tree/master/EZAudioExamples/OSX/EZAudioCoreGraphicsWaveformExample). At any time you can change which input device is used by setting the device property:
+```objectivec
+NSArray *inputs = [EZAudioDevice inputDevices];
+[self.microphone setDevice:[inputs lastObject]];
+```
+
+Anytime the `EZMicrophone` changes its device it will trigger the `EZMicrophoneDelegate` event:
+```objectivec
+
+- (void)microphone:(EZMicrophone *)microphone changedDevice:(EZAudioDevice *)device
+{
+ // This is not always guaranteed to occur on the main thread so make sure you
+ // wrap it in a GCD block
+ dispatch_async(dispatch_get_main_queue(), ^{
+ // Update UI here
+ NSLog(@"Changed input device: %@", device);
+ });
+}
+```
+**Note: For iOS this can happen automatically if the AVAudioSession changes the current device.**
+
+####Getting Microphone Data
+
+To tell the microphone to start fetching audio use the `startFetchingAudio` function.
+
+```objectivec
+// Starts fetching audio from the default device microphone and sends data to EZMicrophoneDelegate
+[self.microphone startFetchingAudio];
+```
+Once the `EZMicrophone` has started it will send the `EZMicrophoneDelegate` the audio back in a few ways.
+An array of float arrays:
+```objectivec
+/**
+ The microphone data represented as non-interleaved float arrays useful for:
+ - Creating real-time waveforms using EZAudioPlot or EZAudioPlotGL
+ - Creating any number of custom visualizations that utilize audio!
+ */
+-(void) microphone:(EZMicrophone *)microphone
+ hasAudioReceived:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+withNumberOfChannels:(UInt32)numberOfChannels
+{
+ __weak typeof (self) weakSelf = self;
+ // Getting audio data as an array of float buffer arrays that can be fed into the
+ // EZAudioPlot, EZAudioPlotGL, or whatever visualization you would like to do with
+ // the microphone data.
+ dispatch_async(dispatch_get_main_queue(),^{
+ // Visualize this data brah, buffer[0] = left channel, buffer[1] = right channel
+ [weakSelf.audioPlot updateBuffer:buffer[0] withBufferSize:bufferSize];
+ });
+}
+```
+or the AudioBufferList representation:
+```objectivec
+/**
+ The microphone data represented as CoreAudio's AudioBufferList useful for:
+ - Appending data to an audio file via the EZRecorder
+ - Playback via the EZOutput
+
+ */
+-(void) microphone:(EZMicrophone *)microphone
+ hasBufferList:(AudioBufferList *)bufferList
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ // Getting audio data as an AudioBufferList that can be directly fed into the EZRecorder
+ // or EZOutput. Say whattt...
+}
+```
+####Pausing/Resuming The Microphone
+
+Pause or resume fetching audio at any time like so:
+```objectivec
+// Stop fetching audio
+[self.microphone stopFetchingAudio];
+
+// Resume fetching audio
+[self.microphone startFetchingAudio];
+```
+
+Alternatively, you could also toggle the `microphoneOn` property (safe to use with Cocoa Bindings)
+```objectivec
+// Stop fetching audio
+self.microphone.microphoneOn = NO;
+
+// Start fetching audio
+self.microphone.microphoneOn = YES;
+```
+
+###EZOutput
+Provides flexible playback to the default output device by asking the `EZOutputDataSource` for audio data to play. Doesn't care where the buffers come from (microphone, audio file, streaming audio, etc). As of 1.0.0 the `EZOutputDataSource` has been simplified to have only one method to provide audio data to your `EZOutput` instance.
+```objectivec
+// The EZOutputDataSource should fill out the audioBufferList with the given frame count.
+// The timestamp is provided for sample accurate calculation, but for basic use cases can
+// be ignored.
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp;
+```
+
+**_Relevant Example Projects_**
+- EZAudioPlayFileExample (iOS)
+- EZAudioPlayFileExample (OSX)
+- EZAudioPassThroughExample (iOS)
+- EZAudioPassThroughExample (OSX)
+
+####Creating An Output
+
+Create an `EZOutput` by declaring a property and initializing it like so:
+
+```objectivec
+// Declare the EZOutput as a strong property
+@property (nonatomic, strong) EZOutput *output;
+...
+
+// Initialize the EZOutput instance and assign it a delegate to provide the output audio data
+self.output = [EZOutput outputWithDataSource:self];
+```
+Alternatively, you could also use the shared output instance and just assign it an `EZOutputDataSource` if you will only have one `EZOutput` instance for your application.
+```objectivec
+// Assign a delegate to the shared instance of the output to provide the output audio data
+[EZOutput sharedOutput].delegate = self;
+```
+####Setting The Device
+The `EZOutput` uses an `EZAudioDevice` instance to select what specific hardware destination it will output audio to. You'd use this if you wanted to change the output device like in the [EZAudioPlayFileExample](https://github.com/syedhali/EZAudio/tree/master/EZAudioExamples/OSX/EZAudioPlayFileExample) for OSX. At any time you can change which output device is used by setting the `device` property:
+```objectivec
+// By default the EZOutput uses the default output device, but you can change this at any time
+EZAudioDevice *currentOutputDevice = [EZAudioDevice currentOutputDevice];
+[self.output setDevice:currentOutputDevice];
+```
+
+Anytime the `EZOutput` changes its device it will trigger the `EZOutputDelegate` event:
+```objectivec
+- (void)output:(EZOutput *)output changedDevice:(EZAudioDevice *)device
+{
+ NSLog(@"Change output device to: %@", device);
+}
+```
+
+####Playing Audio
+
+#####Setting The Input Format
+
+When providing audio data the `EZOutputDataSource` will expect you to fill out the AudioBufferList provided with whatever `inputFormat` that is set on the `EZOutput`. By default the input format is a stereo, non-interleaved, float format (see [defaultInputFormat](http://cocoadocs.org/docsets/EZAudio/1.1.2/Classes/EZOutput.html#//api/name/defaultInputFormat) for more information). If you're dealing with a different input format (which is typically the case), just set the `inputFormat` property. For instance:
+```objectivec
+// Set a mono, float format with a sample rate of 44.1 kHz
+AudioStreamBasicDescription monoFloatFormat = [EZAudioUtilities monoFloatFormatWithSampleRate:44100.0f];
+[self.output setInputFormat:monoFloatFormat];
+```
+#####Implementing the EZOutputDataSource
+
+An example of implementing the `EZOutputDataSource` is done internally in the `EZAudioPlayer` using an `EZAudioFile` to read audio from an audio file on disk like so:
+```objectivec
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp
+{
+ if (self.audioFile)
+ {
+ UInt32 bufferSize; // amount of frames actually read
+ BOOL eof; // end of file
+ [self.audioFile readFrames:frames
+ audioBufferList:audioBufferList
+ bufferSize:&bufferSize
+ eof:&eof];
+ if (eof && [self.delegate respondsToSelector:@selector(audioPlayer:reachedEndOfAudioFile:)])
+ {
+ [self.delegate audioPlayer:self reachedEndOfAudioFile:self.audioFile];
+ }
+ if (eof && self.shouldLoop)
+ {
+ [self seekToFrame:0];
+ }
+ else if (eof)
+ {
+ [self pause];
+ [self seekToFrame:0];
+ [[NSNotificationCenter defaultCenter] postNotificationName:EZAudioPlayerDidReachEndOfFileNotification
+ object:self];
+ }
+ }
+ return noErr;
+}
+```
+
+I created a sample project that uses the `EZOutput` to act as a signal generator to play sine, square, triangle, sawtooth, and noise waveforms. **Here's a snippet of code to generate a sine tone**:
+```objectivec
+...
+double const SAMPLE_RATE = 44100.0;
+
+- (void)awakeFromNib
+{
+ //
+ // Create EZOutput to play audio data with mono format (EZOutput will convert
+ // this mono, float "inputFormat" to a clientFormat, i.e. the stereo output format).
+ //
+ AudioStreamBasicDescription inputFormat = [EZAudioUtilities monoFloatFormatWithSampleRate:SAMPLE_RATE];
+ self.output = [EZOutput outputWithDataSource:self inputFormat:inputFormat];
+ [self.output setDelegate:self];
+ self.frequency = 200.0;
+ self.sampleRate = SAMPLE_RATE;
+ self.amplitude = 0.80;
+}
+
+- (OSStatus) output:(EZOutput *)output
+ shouldFillAudioBufferList:(AudioBufferList *)audioBufferList
+ withNumberOfFrames:(UInt32)frames
+ timestamp:(const AudioTimeStamp *)timestamp
+{
+ Float32 *buffer = (Float32 *)audioBufferList->mBuffers[0].mData;
+ size_t bufferByteSize = (size_t)audioBufferList->mBuffers[0].mDataByteSize;
+ double theta = self.theta;
+ double frequency = self.frequency;
+ double thetaIncrement = 2.0 * M_PI * frequency / SAMPLE_RATE;
+ if (self.type == GeneratorTypeSine)
+ {
+ for (UInt32 frame = 0; frame < frames; frame++)
+ {
+ buffer[frame] = self.amplitude * sin(theta);
+ theta += thetaIncrement;
+ if (theta > 2.0 * M_PI)
+ {
+ theta -= 2.0 * M_PI;
+ }
+ }
+ self.theta = theta;
+ }
+ else if (... other shapes in full source)
+}
+```
+
+For the full implementation of the square, triangle, sawtooth, and noise functions here: (https://github.com/syedhali/SineExample/blob/master/SineExample/GeneratorViewController.m#L220-L305)
+
+Once the `EZOutput` has started it will send the `EZOutputDelegate` the audio back as float arrays for visualizing. These are converted inside the `EZOutput` component from whatever input format you may have provided. For instance, if you provide an interleaved, signed integer AudioStreamBasicDescription for the `inputFormat` property then that will be automatically converted to a stereo, non-interleaved, float format when sent back in the delegate `playedAudio:...` method below:
+An array of float arrays:
+```objectivec
+/**
+ The output data represented as non-interleaved float arrays useful for:
+ - Creating real-time waveforms using EZAudioPlot or EZAudioPlotGL
+ - Creating any number of custom visualizations that utilize audio!
+ */
+- (void) output:(EZOutput *)output
+ playedAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ // Update plot, buffer[0] = left channel, buffer[1] = right channel
+ });
+}
+```
+
+####Pausing/Resuming The Output
+Pause or resume the output component at any time like so:
+```objectivec
+// Stop fetching audio
+[self.output stopPlayback];
+
+// Resume fetching audio
+[self.output startPlayback];
+```
+
+####Chaining Audio Unit Effects
+Internally the `EZOutput` is using an AUGraph to chain together a converter, mixer, and output audio units. You can hook into this graph by subclassing `EZOutput` and implementing the method:
+```objectivec
+// By default this method connects the AUNode representing the input format converter to
+// the mixer node. In subclasses you can add effects in the chain between the converter
+// and mixer by creating additional AUNodes, adding them to the AUGraph provided below,
+// and then connecting them together.
+- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode
+ sourceNodeOutputBus:(UInt32)sourceNodeOutputBus
+ toDestinationNode:(AUNode)destinationNode
+ destinationNodeInputBus:(UInt32)destinationNodeInputBus
+ inGraph:(AUGraph)graph;
+```
+
+This was inspired by the audio processing graph from CocoaLibSpotify (Daniel Kennett of Spotify has [an excellent blog post](http://ikennd.ac/blog/2012/04/augraph-basics-in-cocoalibspotify/) explaining how to add an EQ to the CocoaLibSpotify AUGraph).
+
+Here's an example of how to add a delay audio unit (`kAudioUnitSubType_Delay`):
+```objectivec
+// In interface, declare delay node info property
+@property (nonatomic, assign) EZAudioNodeInfo *delayNodeInfo;
+
+// In implementation, overwrite the connection method
+- (OSStatus)connectOutputOfSourceNode:(AUNode)sourceNode
+ sourceNodeOutputBus:(UInt32)sourceNodeOutputBus
+ toDestinationNode:(AUNode)destinationNode
+ destinationNodeInputBus:(UInt32)destinationNodeInputBus
+ inGraph:(AUGraph)graph
+{
+ self.delayNodeInfo = (EZAudioNodeInfo *)malloc(sizeof(EZAudioNodeInfo));
+
+ // A description for the time/pitch shifter Device
+ AudioComponentDescription delayComponentDescription;
+ delayComponentDescription.componentType = kAudioUnitType_Effect;
+ delayComponentDescription.componentSubType = kAudioUnitSubType_Delay;
+ delayComponentDescription.componentManufacturer = kAudioUnitManufacturer_Apple;
+ delayComponentDescription.componentFlags = 0;
+ delayComponentDescription.componentFlagsMask = 0;
+
+ [EZAudioUtilities checkResult:AUGraphAddNode(graph,
+ &delayComponentDescription,
+ &self.delayNodeInfo->node)
+ operation:"Failed to add node for time shift"];
+
+ // Get the time/pitch shifter Audio Unit from the node
+ [EZAudioUtilities checkResult:AUGraphNodeInfo(graph,
+ self.delayNodeInfo->node,
+ NULL,
+ &self.delayNodeInfo->audioUnit)
+ operation:"Failed to get audio unit for delay node"];
+
+ // connect the output of the input source node to the input of the time/pitch shifter node
+ [EZAudioUtilities checkResult:AUGraphConnectNodeInput(graph,
+ sourceNode,
+ sourceNodeOutputBus,
+ self.delayNodeInfo->node,
+ 0)
+ operation:"Failed to connect source node into delay node"];
+
+ // connect the output of the time/pitch shifter node to the input of the destination node, thus completing the chain.
+ [EZAudioUtilities checkResult:AUGraphConnectNodeInput(graph,
+ self.delayNodeInfo->node,
+ 0,
+ destinationNode,
+ destinationNodeInputBus)
+ operation:"Failed to connect delay to destination node"];
+ return noErr;
+}
+
+// Clean up
+- (void)dealloc
+{
+ free(self.delayNodeInfo);
+}
+```
+
+###EZAudioFile
+Provides simple read/seek operations, pulls waveform amplitude data, and provides the `EZAudioFileDelegate` to notify of any read/seek action occuring on the `EZAudioFile`. This can be thought of as the NSImage/UIImage equivalent of the audio world.
+
+**_Relevant Example Projects_**
+- EZAudioWaveformFromFileExample (iOS)
+- EZAudioWaveformFromFileExample (OSX)
+
+####Opening An Audio File
+To open an audio file create a new instance of the `EZAudioFile` class.
+```objectivec
+// Declare the EZAudioFile as a strong property
+@property (nonatomic, strong) EZAudioFile *audioFile;
+
+...
+
+// Initialize the EZAudioFile instance and assign it a delegate to receive the read/seek callbacks
+self.audioFile = [EZAudioFile audioFileWithURL:[NSURL fileURLWithPath:@"/path/to/your/file"] delegate:self];
+```
+
+####Getting Waveform Data
+
+The EZAudioFile allows you to quickly fetch waveform data from an audio file with as much or little detail as you'd like.
+```objectivec
+__weak typeof (self) weakSelf = self;
+// Get a waveform with 1024 points of data. We can adjust the number of points to whatever level
+// of detail is needed by the application
+[self.audioFile getWaveformDataWithNumberOfPoints:1024
+ completionBlock:^(float **waveformData,
+ int length)
+{
+ [weakSelf.audioPlot updateBuffer:waveformData[0]
+ withBufferSize:length];
+}];
+```
+
+####Reading From An Audio File
+
+Reading audio data from a file requires you to create an AudioBufferList to hold the data. The `EZAudio` utility function, `audioBufferList`, provides a convenient way to get an allocated AudioBufferList to use. There is also a utility function, `freeBufferList:`, to use to free (or release) the AudioBufferList when you are done using that audio data.
+
+**Note: You have to free the AudioBufferList, even in ARC.**
+```objectivec
+// Allocate an AudioBufferList to hold the audio data (the client format is the non-compressed
+// in-app format that is used for reading, it's different than the file format which is usually
+// something compressed like an mp3 or m4a)
+AudioStreamBasicDescription clientFormat = [self.audioFile clientFormat];
+UInt32 numberOfFramesToRead = 512;
+UInt32 channels = clientFormat.mChannelsPerFrame;
+BOOL isInterleaved = [EZAudioUtilities isInterleaved:clientFormat];
+AudioBufferList *bufferList = [EZAudioUtilities audioBufferListWithNumberOfFrames:numberOfFramesToRead
+ numberOfChannels:channels
+ interleaved:isInterleaved];
+
+// Read the frames from the EZAudioFile into the AudioBufferList
+UInt32 framesRead;
+UInt32 isEndOfFile;
+[self.audioFile readFrames:numberOfFramesToRead
+ audioBufferList:bufferList
+ bufferSize:&framesRead
+ eof:&isEndOfFile]
+```
+
+When a read occurs the `EZAudioFileDelegate` receives two events.
+
+An event notifying the delegate of the read audio data as float arrays:
+```objectivec
+-(void) audioFile:(EZAudioFile *)audioFile
+ readAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [weakSelf.audioPlot updateBuffer:buffer[0]
+ withBufferSize:bufferSize];
+ });
+}
+```
+and an event notifying the delegate of the new frame position within the `EZAudioFile`:
+```objectivec
+-(void)audioFile:(EZAudioFile *)audioFile updatedPosition:(SInt64)framePosition
+{
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ // Update UI
+ });
+}
+```
+
+####Seeking Through An Audio File
+
+You can seek very easily through an audio file using the `EZAudioFile`'s seekToFrame: method. The `EZAudioFile` provides a `totalFrames` method to provide you the total amount of frames in an audio file so you can calculate a proper offset.
+```objectivec
+// Get the total number of frames for the audio file
+SInt64 totalFrames = [self.audioFile totalFrames];
+
+// Seeks halfway through the audio file
+[self.audioFile seekToFrame:(totalFrames/2)];
+
+// Alternatively, you can seek using seconds
+NSTimeInterval duration = [self.audioFile duration];
+[self.audioFile setCurrentTime:duration/2.0];
+```
+When a seek occurs the `EZAudioFileDelegate` receives the seek event:
+```objectivec
+-(void)audioFile:(EZAudioFile *)audioFile updatedPosition:(SInt64)framePosition
+{
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ // Update UI
+ });
+}
+```
+
+###EZAudioPlayer
+Provides a class that combines the `EZAudioFile` and `EZOutput` for file playback of all Core Audio supported formats to any hardware device. Because the `EZAudioPlayer` internally hooks into the `EZAudioFileDelegate` and `EZOutputDelegate`, you should implement the `EZAudioPlayerDelegate` to receive the `playedAudio:...` and `updatedPosition:` events. The EZAudioPlayFileExample projects for [iOS](https://github.com/syedhali/EZAudio/tree/master/EZAudioExamples/iOS/EZAudioPlayFileExample) and [OSX](https://github.com/syedhali/EZAudio/tree/master/EZAudioExamples/OSX/EZAudioPlayFileExample) shows how to use the `EZAudioPlayer` to play audio files, visualize the samples with an audio plot, adjust the volume, and change the output device using the `EZAudioDevice` class. The `EZAudioPlayer` primarily uses `NSNotificationCenter` to post notifications because often times you have one audio player and multiple UI elements that need to listen for player events to properly update.
+
+####Creating An Audio Player
+```objectivec
+// Declare the EZAudioFile as a strong property
+@property (nonatomic, strong) EZAudioFile *audioFile;
+
+...
+
+// Create an EZAudioPlayer with a delegate that conforms to EZAudioPlayerDelegate
+self.player = [EZAudioPlayer audioPlayerWithDelegate:self];
+```
+
+####Playing An Audio File
+The `EZAudioPlayer` uses an internal `EZAudioFile` to provide data to its `EZOutput` for output via the `EZOutputDataSource`. You can provide an `EZAudioFile` by just setting the `audioFile` property on the `EZAudioPlayer` will make a copy of the `EZAudioFile` at that file path url for its own use.
+```objectivec
+// Set the EZAudioFile for playback by setting the `audioFile` property
+EZAudioFile *audioFile = [EZAudioFile audioFileWithURL:[NSURL fileURLWithPath:@"/path/to/your/file"]];
+[self.player setAudioFile:audioFile];
+
+// This, however, will not pause playback if a current file is playing. Instead
+// it's encouraged to use `playAudioFile:` instead if you're swapping in a new
+// audio file while playback is already running
+EZAudioFile *audioFile = [EZAudioFile audioFileWithURL:[NSURL fileURLWithPath:@"/path/to/your/file"]];
+[self.player playAudioFile:audioFile];
+```
+
+As audio is played the `EZAudioPlayerDelegate` will receive the `playedAudio:...`, `updatedPosition:...`, and, if the audio file reaches the end of the file, the `reachedEndOfAudioFile:` events. A typical implementation of the `EZAudioPlayerDelegate` would be something like:
+```objectivec
+- (void) audioPlayer:(EZAudioPlayer *)audioPlayer
+ playedAudio:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+ inAudioFile:(EZAudioFile *)audioFile
+{
+ __weak typeof (self) weakSelf = self;
+ // Update an EZAudioPlot or EZAudioPlotGL to reflect the audio data coming out
+ // of the EZAudioPlayer (post volume and pan)
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [weakSelf.audioPlot updateBuffer:buffer[0]
+ withBufferSize:bufferSize];
+ });
+}
+
+//------------------------------------------------------------------------------
+
+- (void)audioPlayer:(EZAudioPlayer *)audioPlayer
+ updatedPosition:(SInt64)framePosition
+ inAudioFile:(EZAudioFile *)audioFile
+{
+ __weak typeof (self) weakSelf = self;
+ // Update any UI controls including sliders and labels
+ // display current time/duration
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if (!weakSelf.positionSlider.highlighted)
+ {
+ weakSelf.positionSlider.floatValue = (float)framePosition;
+ weakSelf.positionLabel.integerValue = framePosition;
+ }
+ });
+}
+```
+
+####Seeking
+You can seek through the audio file in a similar fashion as with the `EZAudioFile`. That is, using the `seekToFrame:` or `currentTime` property.
+```objectivec
+// Get the total number of frames and seek halfway
+SInt64 totalFrames = [self.player totalFrames];
+[self.player seekToFrame:(totalFrames/2)];
+
+// Alternatively, you can seek using seconds
+NSTimeInterval duration = [self.player duration];
+[self.player setCurrentTime:duration/2.0];
+```
+
+####Setting Playback Parameters
+Because the `EZAudioPlayer` wraps the `EZOutput` you can adjust the volume and pan parameters for playback.
+```objectivec
+// Make it half as loud, 0 = silence, 1 = full volume. Default is 1.
+[self.player setVolume:0.5];
+
+// Make it only play on the left, -1 = left, 1 = right. Default is 0.0 (center)
+[self.player setPan:-1.0];
+```
+
+####Getting Audio File Parameters
+The `EZAudioPlayer` wraps the `EZAudioFile` and provides a high level interface for pulling values like current time, duration, the frame index, total frames, etc.
+```objectivec
+NSTimeInterval currentTime = [self.player currentTime];
+NSTimeInterval duration = [self.player duration];
+NSString *formattedCurrentTime = [self.player formattedCurrentTime]; // MM:SS formatted
+NSString *formattedDuration = [self.player formattedDuration]; // MM:SS formatted
+SInt64 frameIndex = [self.player frameIndex];
+SInt64 totalFrames = [self.player totalFrames];
+```
+
+In addition, the `EZOutput` properties are also offered at a high level as well:
+```objectivec
+EZAudioDevice *outputDevice = [self.player device];
+BOOL isPlaying = [self.player isPlaying];
+float pan = [self.player pan];
+float volume = [self.player volume];
+```
+
+####Notifications
+The `EZAudioPlayer` provides the following notifications (as of 1.1.2):
+```objectivec
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `audioFile` property. Check the new value using the EZAudioPlayer's `audioFile` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeAudioFileNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `device` property. Check the new value using the EZAudioPlayer's `device` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeOutputDeviceNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's `pan` property. Check the new value using the EZAudioPlayer's `pan` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangePanNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's play state. Check the new value using the EZAudioPlayer's `isPlaying` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangePlayStateNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer changes its `output` component's `volume` property. Check the new value using the EZAudioPlayer's `volume` property.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidChangeVolumeNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer has reached the end of a file and its `shouldLoop` property has been set to NO.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidReachEndOfFileNotification;
+
+/**
+ Notification that occurs whenever the EZAudioPlayer performs a seek via the `seekToFrame` method or `setCurrentTime:` property setter. Check the new `currentTime` or `frameIndex` value using the EZAudioPlayer's `currentTime` or `frameIndex` property, respectively.
+ */
+FOUNDATION_EXPORT NSString * const EZAudioPlayerDidSeekNotification;
+```
+
+###EZRecorder
+Provides a way to record any audio source to an audio file. This hooks into the other components quite nicely to do something like plot the audio waveform while recording to give visual feedback as to what is happening. The `EZRecorderDelegate` provides methods to listen to write events and a final close event on the `EZRecorder` (explained [below](#EZRecorderDelegateExplanation)).
+
+*Relevant Example Projects*
+- EZAudioRecordExample (iOS)
+- EZAudioRecordExample (OSX)
+
+####Creating A Recorder
+
+To create an `EZRecorder` you must provide at least 3 things: an NSURL representing the file path of where the audio file should be written to (an existing file will be overwritten), a `clientFormat` representing the format in which you will be providing the audio data, and either an `EZRecorderFileType` or an `AudioStreamBasicDescription` representing the file format of the audio data on disk.
+
+```objectivec
+// Provide a file path url to write to, a client format (always linear PCM, this is the format
+// coming from another component like the EZMicrophone's audioStreamBasicDescription property),
+// and a EZRecorderFileType constant representing either a wav (EZRecorderFileTypeWAV),
+// aiff (EZRecorderFileTypeAIFF), or m4a (EZRecorderFileTypeM4A) file format. The advantage of
+// this is that the `fileFormat` property will be automatically filled out for you.
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileType:(EZRecorderFileType)fileType;
+
+// Alternatively, you can provide a file path url to write to, a client format (always linear
+// PCM, this is the format coming from another component like the EZMicrophone's
+// audioStreamBasicDescription property), a `fileFormat` representing your custom
+// AudioStreamBasicDescription, and an AudioFileTypeID that corresponds with your `fileFormat`.
++ (instancetype)recorderWithURL:(NSURL *)url
+ clientFormat:(AudioStreamBasicDescription)clientFormat
+ fileFormat:(AudioStreamBasicDescription)fileFormat
+ audioFileTypeID:(AudioFileTypeID)audioFileTypeID;
+
+```
+
+Start by declaring an instance of the EZRecorder (you will have one of these per audio file written out)
+```objectivec
+// Declare the EZRecorder as a strong property
+@property (nonatomic, strong) EZRecorder *recorder;
+```
+
+and initialize it using one of the two initializers from above. For instance, using the `EZRecorderFileType` shortcut initializer you could create an instance like so:
+```objectivec
+// Example using an EZMicrophone and a string called kAudioFilePath representing a file
+// path location on your computer to write out a M4A file.
+self.recorder = [EZRecorder recorderWithURL:[NSURL fileURLWithPath:@"/path/to/your/file.m4a"]
+ clientFormat:[self.microphone audioStreamBasicDescription]
+ fileType:EZRecorderFileTypeM4A];
+```
+
+or to configure your own custom file format, say to write out a 8000 Hz, iLBC file:
+```objectivec
+// Example using an EZMicrophone, a string called kAudioFilePath representing a file
+// path location on your computer, and an iLBC file format.
+AudioStreamBasicDescription iLBCFormat = [EZAudioUtilities iLBCFormatWithSampleRate:8000];
+self.recorder = [EZRecorder recorderWithURL:[NSURL fileURLWithPath:@"/path/to/your/file.caf"]
+ clientFormat:[self.microphone audioStreamBasicDescription]
+ fileFormat:iLBCFormat
+ audioFileTypeID:kAudioFileCAFType];
+```
+
+####Recording Some Audio
+
+Once you've initialized your `EZRecorder` you can append data by passing in an AudioBufferList and its buffer size like so:
+```objectivec
+// Append the microphone data coming as a AudioBufferList with the specified buffer size
+// to the recorder
+-(void) microphone:(EZMicrophone *)microphone
+ hasBufferList:(AudioBufferList *)bufferList
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ // Getting audio data as a buffer list that can be directly fed into the EZRecorder. This is
+ // happening on the audio thread - any UI updating needs a GCD main queue block.
+ if (self.isRecording)
+ {
+ // Since we set the recorder's client format to be that of the EZMicrophone instance,
+ // the audio data coming in represented by the AudioBufferList can directly be provided
+ // to the EZRecorder. The EZRecorder will internally convert the audio data from the
+ // `clientFormat` to `fileFormat`.
+ [self.recorder appendDataFromBufferList:bufferList
+ withBufferSize:bufferSize];
+ }
+}
+```
+
+#####Responding to an EZRecorder after it has written audio data
+
+Once audio data has been successfully written with the `EZRecorder` it will notify the `EZRecorderDelegate` of the event so it can respond via:
+```objectivec
+// Triggers after the EZRecorder's `appendDataFromBufferList:withBufferSize:` method is called
+// so you can update your interface accordingly.
+- (void)recorderUpdatedCurrentTime:(EZRecorder *)recorder
+{
+ __weak typeof (self) weakSelf = self;
+ // This will get triggerd on the thread that the write occured on so be sure to wrap your UI
+ // updates in a GCD main queue block! However, I highly recommend you first pull the values
+ // you'd like to update the interface with before entering the GCD block to avoid trying to
+ // fetch a value after the audio file has been closed.
+ NSString *formattedCurrentTime = [recorder formattedCurrentTime]; // MM:SS formatted
+ dispatch_async(dispatch_get_main_queue(), ^{
+ // Update label
+ weakSelf.currentTimeLabel.stringValue = formattedCurrentTime;
+ });
+}
+```
+
+####Closing An Audio File
+When you're recording is done be sure to call the `closeAudioFile` method to make sure the audio file written to disk is properly closed before you attempt to read it again.
+
+```objectivec
+// Close the EZRecorder's audio file BEFORE reading
+[self.recorder closeAudioFile];
+```
+
+This will trigger the EZRecorder's delegate method:
+```objectivec
+- (void)recorderDidClose:(EZRecorder *)recorder
+{
+ recorder.delegate = nil;
+}
+```
+
+##Interface Components
+`EZAudio` currently offers two drop in audio waveform components that help simplify the process of visualizing audio.
+
+###EZAudioPlot
+Provides an audio waveform plot that uses CoreGraphics to perform the drawing. On iOS this is a subclass of UIView while on OSX this is a subclass of NSView. As of the 1.0.0 release, the waveforms are drawn using CALayers where compositing is done on the GPU. As a result, there have been some huge performance gains and CPU usage per real-time (i.e. 60 frames per second redrawing) plot is now about 2-3% CPU as opposed to the 20-30% we were experiencing before.
+
+*Relevant Example Projects*
+- EZAudioCoreGraphicsWaveformExample (iOS)
+- EZAudioCoreGraphicsWaveformExample (OSX)
+- EZAudioRecordExample (iOS)
+- EZAudioRecordExample (OSX)
+- EZAudioWaveformFromFileExample (iOS)
+- EZAudioWaveformFromFileExample (OSX)
+- EZAudioFFTExample (iOS)
+- EZAudioFFTExample (OSX)
+
+####Creating An Audio Plot
+
+You can create an audio plot in the interface builder by dragging in a UIView on iOS or an NSView on OSX onto your content area. Then change the custom class of the UIView/NSView to `EZAudioPlot`.
+
+![EZAudioPlotInterfaceBuilder](https://cloud.githubusercontent.com/assets/1275640/8532901/47d6f9ce-23e6-11e5-9766-d9969e630338.gif)
+
+Alternatively, you can could create the audio plot programmatically
+
+```objectivec
+// Programmatically create an audio plot
+EZAudioPlot *audioPlot = [[EZAudioPlot alloc] initWithFrame:self.view.frame];
+[self.view addSubview:audioPlot];
+```
+
+####Customizing The Audio Plot
+
+All plots offer the ability to change the background color, waveform color, plot type (buffer or rolling), toggle between filled and stroked, and toggle between mirrored and unmirrored (about the x-axis). For iOS colors are of the type UIColor while on OSX colors are of the type NSColor.
+
+```objectivec
+// Background color (use UIColor for iOS)
+audioPlot.backgroundColor = [NSColor colorWithCalibratedRed:0.816
+ green:0.349
+ blue:0.255
+ alpha:1];
+// Waveform color (use UIColor for iOS)
+audioPlot.color = [NSColor colorWithCalibratedRed:1.000
+ green:1.000
+ blue:1.000
+ alpha:1];
+// Plot type
+audioPlot.plotType = EZPlotTypeBuffer;
+// Fill
+audioPlot.shouldFill = YES;
+// Mirror
+audioPlot.shouldMirror = YES;
+```
+
+####IBInspectable Attributes
+
+Also, as of iOS 8 you can adjust the background color, color, gain, shouldFill, and shouldMirror parameters directly in the Interface Builder via the IBInspectable attributes:
+
+![EZAudioPlotInspectableAttributes](https://cloud.githubusercontent.com/assets/1275640/8530670/288840c8-23d7-11e5-954b-644ed4ed67b4.png)
+
+####Updating The Audio Plot
+
+All plots have only one update function, `updateBuffer:withBufferSize:`, which expects a float array and its length.
+```objectivec
+// The microphone component provides audio data to its delegate as an array of float buffer arrays.
+- (void) microphone:(EZMicrophone *)microphone
+ hasAudioReceived:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ /**
+ Update the audio plot using the float array provided by the microphone:
+ buffer[0] = left channel
+ buffer[1] = right channel
+ Note: Audio updates happen asynchronously so we need to make sure
+ sure to update the plot on the main thread
+ */
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [weakSelf.audioPlot updateBuffer:buffer[0] withBufferSize:bufferSize];
+ });
+}
+```
+
+###EZAudioPlotGL
+Provides an audio waveform plot that uses OpenGL to perform the drawing. The API this class are exactly the same as those for the EZAudioPlot above. On iOS this is a subclass of the GLKView while on OSX this is a subclass of the NSOpenGLView. In most cases this is the plot you want to use, it's GPU-accelerated, can handle lots of points while displaying 60 frames per second (the EZAudioPlot starts to choke on anything greater than 1024), and performs amazingly on all devices. The only downside is that you can only have one OpenGL plot onscreen at a time. However, you can combine OpenGL plots with Core Graphics plots in the view hierachy (see the EZAudioRecordExample for an example of how to do this).
+
+*Relevant Example Projects*
+- EZAudioOpenGLWaveformExample (iOS)
+- EZAudioOpenGLWaveformExample (OSX)
+- EZAudioPlayFileExample (iOS)
+- EZAudioPlayFileExample (OSX)
+- EZAudioRecordExample (iOS)
+- EZAudioRecordExample (OSX)
+- EZAudioPassThroughExample (iOS)
+- EZAudioPassThroughExample (OSX)
+
+####Creating An OpenGL Audio Plot
+
+You can create an audio plot in the interface builder by dragging in a UIView on iOS or an NSView on OSX onto your content area. Then change the custom class of the UIView/NSView to `EZAudioPlotGL`.
+
+![EZAudioPlotGLInterfaceBuilder](https://cloud.githubusercontent.com/assets/1275640/8532900/47d62346-23e6-11e5-8128-07c6641f4af8.gif)
+
+Alternatively, you can could create the `EZAudioPlotGL` programmatically
+```objectivec
+// Programmatically create an audio plot
+EZAudioPlotGL *audioPlotGL = [[EZAudioPlotGL alloc] initWithFrame:self.view.frame];
+[self.view addSubview:audioPlotGL];
+```
+
+####Customizing The OpenGL Audio Plot
+
+All plots offer the ability to change the background color, waveform color, plot type (buffer or rolling), toggle between filled and stroked, and toggle between mirrored and unmirrored (about the x-axis). For iOS colors are of the type UIColor while on OSX colors are of the type NSColor.
+```objectivec
+// Background color (use UIColor for iOS)
+audioPlotGL.backgroundColor = [NSColor colorWithCalibratedRed:0.816
+ green:0.349
+ blue:0.255
+ alpha:1];
+// Waveform color (use UIColor for iOS)
+audioPlotGL.color = [NSColor colorWithCalibratedRed:1.000
+ green:1.000
+ blue:1.000
+ alpha:1];
+// Plot type
+audioPlotGL.plotType = EZPlotTypeBuffer;
+// Fill
+audioPlotGL.shouldFill = YES;
+// Mirror
+audioPlotGL.shouldMirror = YES;
+```
+
+####IBInspectable Attributes
+
+Also, as of iOS 8 you can adjust the background color, color, gain, shouldFill, and shouldMirror parameters directly in the Interface Builder via the IBInspectable attributes:
+
+![EZAudioPlotGLInspectableAttributes](https://cloud.githubusercontent.com/assets/1275640/8530670/288840c8-23d7-11e5-954b-644ed4ed67b4.png)
+
+####Updating The OpenGL Audio Plot
+
+All plots have only one update function, `updateBuffer:withBufferSize:`, which expects a float array and its length.
+```objectivec
+// The microphone component provides audio data to its delegate as an array of float buffer arrays.
+- (void) microphone:(EZMicrophone *)microphone
+ hasAudioReceived:(float **)buffer
+ withBufferSize:(UInt32)bufferSize
+ withNumberOfChannels:(UInt32)numberOfChannels
+{
+ /**
+ Update the audio plot using the float array provided by the microphone:
+ buffer[0] = left channel
+ buffer[1] = right channel
+ Note: Audio updates happen asynchronously so we need to make sure
+ sure to update the plot on the main thread
+ */
+ __weak typeof (self) weakSelf = self;
+ dispatch_async(dispatch_get_main_queue(), ^{
+ [weakSelf.audioPlotGL updateBuffer:buffer[0] withBufferSize:bufferSize];
+ });
+}
+```
+
+##License
+EZAudio is available under the MIT license. See the LICENSE file for more info.
+
+##Contact & Contributers
+Syed Haris Ali
+www.syedharisali.com
+syedhali07[at]gmail.com
+
+##Acknowledgements
+The following people rock:
+- My brother, [Reza Ali](http://www.syedrezaali.com/), for walking me through all the gritty details of OpenGL and his constant encouragement through this journey to 1.0.0.
+- [Aure Prochazka](http://aure.com/) for his amazing work on [AudioKit](http://audiokit.io/) and his encouragement to bring EZAudio to 1.0.0
+- [Daniel Kennett](http://ikennd.ac/) for writing [this great blog post](http://ikennd.ac/blog/2012/04/augraph-basics-in-cocoalibspotify/) that inspired the rewrite of the `EZOutput` in 1.0.0.
+- [Michael Tyson](http://atastypixel.com/blog/) for creating the [TPCircularBuffer](http://atastypixel.com/blog/a-simple-fast-circular-buffer-implementation-for-audio-processing/) and all his contributions to the community including the Amazing Audio Engine, Audiobus, and all the tasty pixel blog posts.
+- Chris Adamson and Kevin Avila for writing the amazing [Learning Core Audio](http://www.amazon.com/Learning-Core-Audio-Hands-On-Programming/dp/0321636848) book.
diff --git a/Pods/Headers/Private/EZAudio/EZAudio.h b/Pods/Headers/Private/EZAudio/EZAudio.h
new file mode 120000
index 0000000..598d66d
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudio.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudio.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioDevice.h b/Pods/Headers/Private/EZAudio/EZAudioDevice.h
new file mode 120000
index 0000000..4e74ae3
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioDevice.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioDevice.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioDisplayLink.h b/Pods/Headers/Private/EZAudio/EZAudioDisplayLink.h
new file mode 120000
index 0000000..9181b8c
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioDisplayLink.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioDisplayLink.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioFFT.h b/Pods/Headers/Private/EZAudio/EZAudioFFT.h
new file mode 120000
index 0000000..cd26060
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioFFT.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFFT.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioFile.h b/Pods/Headers/Private/EZAudio/EZAudioFile.h
new file mode 120000
index 0000000..647862b
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioFile.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFile.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioFloatConverter.h b/Pods/Headers/Private/EZAudio/EZAudioFloatConverter.h
new file mode 120000
index 0000000..a2f129c
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioFloatConverter.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFloatConverter.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioFloatData.h b/Pods/Headers/Private/EZAudio/EZAudioFloatData.h
new file mode 120000
index 0000000..011d800
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioFloatData.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFloatData.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioPlayer.h b/Pods/Headers/Private/EZAudio/EZAudioPlayer.h
new file mode 120000
index 0000000..6630f2d
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioPlayer.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlayer.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioPlot.h b/Pods/Headers/Private/EZAudio/EZAudioPlot.h
new file mode 120000
index 0000000..7ac6330
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioPlot.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlot.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioPlotGL.h b/Pods/Headers/Private/EZAudio/EZAudioPlotGL.h
new file mode 120000
index 0000000..29803ca
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioPlotGL.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlotGL.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZAudioUtilities.h b/Pods/Headers/Private/EZAudio/EZAudioUtilities.h
new file mode 120000
index 0000000..f673e52
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZAudioUtilities.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioUtilities.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZMicrophone.h b/Pods/Headers/Private/EZAudio/EZMicrophone.h
new file mode 120000
index 0000000..b7c30b2
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZMicrophone.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZMicrophone.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZOutput.h b/Pods/Headers/Private/EZAudio/EZOutput.h
new file mode 120000
index 0000000..756c0ca
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZOutput.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZOutput.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZPlot.h b/Pods/Headers/Private/EZAudio/EZPlot.h
new file mode 120000
index 0000000..79ecc6a
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZPlot.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZPlot.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/EZRecorder.h b/Pods/Headers/Private/EZAudio/EZRecorder.h
new file mode 120000
index 0000000..a9dd341
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/EZRecorder.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZRecorder.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/EZAudio/TPCircularBuffer.h b/Pods/Headers/Private/EZAudio/TPCircularBuffer.h
new file mode 120000
index 0000000..044cbe5
--- /dev/null
+++ b/Pods/Headers/Private/EZAudio/TPCircularBuffer.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/TPCircularBuffer.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h b/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
new file mode 120000
index 0000000..f3fd2d1
--- /dev/null
+++ b/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
@@ -0,0 +1 @@
+../../../TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
\ No newline at end of file
diff --git a/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer.h b/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer.h
new file mode 120000
index 0000000..bff0736
--- /dev/null
+++ b/Pods/Headers/Private/TPCircularBuffer/TPCircularBuffer.h
@@ -0,0 +1 @@
+../../../TPCircularBuffer/TPCircularBuffer.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudio.h b/Pods/Headers/Public/EZAudio/EZAudio.h
new file mode 120000
index 0000000..598d66d
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudio.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudio.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioDevice.h b/Pods/Headers/Public/EZAudio/EZAudioDevice.h
new file mode 120000
index 0000000..4e74ae3
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioDevice.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioDevice.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioDisplayLink.h b/Pods/Headers/Public/EZAudio/EZAudioDisplayLink.h
new file mode 120000
index 0000000..9181b8c
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioDisplayLink.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioDisplayLink.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioFFT.h b/Pods/Headers/Public/EZAudio/EZAudioFFT.h
new file mode 120000
index 0000000..cd26060
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioFFT.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFFT.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioFile.h b/Pods/Headers/Public/EZAudio/EZAudioFile.h
new file mode 120000
index 0000000..647862b
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioFile.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFile.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioFloatConverter.h b/Pods/Headers/Public/EZAudio/EZAudioFloatConverter.h
new file mode 120000
index 0000000..a2f129c
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioFloatConverter.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFloatConverter.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioFloatData.h b/Pods/Headers/Public/EZAudio/EZAudioFloatData.h
new file mode 120000
index 0000000..011d800
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioFloatData.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioFloatData.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioPlayer.h b/Pods/Headers/Public/EZAudio/EZAudioPlayer.h
new file mode 120000
index 0000000..6630f2d
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioPlayer.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlayer.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioPlot.h b/Pods/Headers/Public/EZAudio/EZAudioPlot.h
new file mode 120000
index 0000000..7ac6330
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioPlot.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlot.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioPlotGL.h b/Pods/Headers/Public/EZAudio/EZAudioPlotGL.h
new file mode 120000
index 0000000..29803ca
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioPlotGL.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioPlotGL.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZAudioUtilities.h b/Pods/Headers/Public/EZAudio/EZAudioUtilities.h
new file mode 120000
index 0000000..f673e52
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZAudioUtilities.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZAudioUtilities.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZMicrophone.h b/Pods/Headers/Public/EZAudio/EZMicrophone.h
new file mode 120000
index 0000000..b7c30b2
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZMicrophone.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZMicrophone.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZOutput.h b/Pods/Headers/Public/EZAudio/EZOutput.h
new file mode 120000
index 0000000..756c0ca
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZOutput.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZOutput.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZPlot.h b/Pods/Headers/Public/EZAudio/EZPlot.h
new file mode 120000
index 0000000..79ecc6a
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZPlot.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZPlot.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/EZRecorder.h b/Pods/Headers/Public/EZAudio/EZRecorder.h
new file mode 120000
index 0000000..a9dd341
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/EZRecorder.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/EZRecorder.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/EZAudio/TPCircularBuffer.h b/Pods/Headers/Public/EZAudio/TPCircularBuffer.h
new file mode 120000
index 0000000..044cbe5
--- /dev/null
+++ b/Pods/Headers/Public/EZAudio/TPCircularBuffer.h
@@ -0,0 +1 @@
+../../../EZAudio/EZAudio/TPCircularBuffer.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h b/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
new file mode 120000
index 0000000..f3fd2d1
--- /dev/null
+++ b/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
@@ -0,0 +1 @@
+../../../TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
\ No newline at end of file
diff --git a/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer.h b/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer.h
new file mode 120000
index 0000000..bff0736
--- /dev/null
+++ b/Pods/Headers/Public/TPCircularBuffer/TPCircularBuffer.h
@@ -0,0 +1 @@
+../../../TPCircularBuffer/TPCircularBuffer.h
\ No newline at end of file
diff --git a/Pods/Manifest.lock b/Pods/Manifest.lock
new file mode 100644
index 0000000..9d63fd0
--- /dev/null
+++ b/Pods/Manifest.lock
@@ -0,0 +1,17 @@
+PODS:
+ - EZAudio (1.1.2):
+ - EZAudio/Full (= 1.1.2)
+ - EZAudio/Core (1.1.2)
+ - EZAudio/Full (1.1.2):
+ - EZAudio/Core
+ - TPCircularBuffer (~> 0.0)
+ - TPCircularBuffer (0.0.1)
+
+DEPENDENCIES:
+ - EZAudio (~> 1.0)
+
+SPEC CHECKSUMS:
+ EZAudio: 01373de2b1257b77be1bf13753fb1c4ea8a9777e
+ TPCircularBuffer: 949e9632b9fb99b7274d7b2296ee22bff5841e35
+
+COCOAPODS: 0.38.2
diff --git a/Pods/Pods.xcodeproj/project.pbxproj b/Pods/Pods.xcodeproj/project.pbxproj
new file mode 100644
index 0000000..0e36978
--- /dev/null
+++ b/Pods/Pods.xcodeproj/project.pbxproj
@@ -0,0 +1,754 @@
+// !$*UTF8*$!
+{
+ archiveVersion = 1;
+ classes = {
+ };
+ objectVersion = 46;
+ objects = {
+
+/* Begin PBXBuildFile section */
+ 06A74F53E0DEF248FF1C0A6FF4FAA765 /* EZAudioPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = 844B106FF4C569539E5A0426A4526E5E /* EZAudioPlayer.h */; };
+ 0A71CFA297CC183516E2B9C57A2F89E3 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 6A2DE08414FB910A0D6A73A7F78CA58A /* Accelerate.framework */; };
+ 130E5B40FF201E525FBB7E0A67451502 /* EZPlot.m in Sources */ = {isa = PBXBuildFile; fileRef = CCF1B258111E9FF8257E3C66F10512D9 /* EZPlot.m */; };
+ 1537A380AE62ADD722D187D2820F8A40 /* EZAudioFile.h in Headers */ = {isa = PBXBuildFile; fileRef = 14C7CE6CD53EE2DDA03F09EC1B14A914 /* EZAudioFile.h */; };
+ 19A2733D8CE00C3A1F3B73A6C39AF426 /* EZAudioDisplayLink.m in Sources */ = {isa = PBXBuildFile; fileRef = C4FEF0E9A071405DD53A31886FEC441A /* EZAudioDisplayLink.m */; };
+ 1CA008A25FD4B771F716A889722FCC81 /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = BFB0C284967C65EC24416317A385BDDC /* TPCircularBuffer.h */; };
+ 1CB1173B299C5DC192DA8DBEDA6D94AF /* TPCircularBuffer.c in Sources */ = {isa = PBXBuildFile; fileRef = B6D7D5EA9C893AC884B64EE6A57F2C94 /* TPCircularBuffer.c */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; };
+ 233322DE375E0D040E9A893D0BA8756C /* EZMicrophone.m in Sources */ = {isa = PBXBuildFile; fileRef = 1DE4577382021443B70F123CA3853D87 /* EZMicrophone.m */; };
+ 24264E3C7D01C0BB8CCEC9D77579D99A /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8F41F029247A05C6F5A5E3F5A5325EAE /* Cocoa.framework */; };
+ 26052838777F60085AAC9CCA05DBFD40 /* OpenGL.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8F2FEC89D100208A3895A6BCDE817A0F /* OpenGL.framework */; };
+ 278D8D6197A179598D94AA027204EC74 /* TPCircularBuffer.c in Sources */ = {isa = PBXBuildFile; fileRef = 963F2C1DABC27E60822C97FD5382B249 /* TPCircularBuffer.c */; };
+ 2905E3EA2BE236E54689FEDB6EF9E015 /* EZOutput.m in Sources */ = {isa = PBXBuildFile; fileRef = 590011A96F4A72B009EAD4718F1EECF6 /* EZOutput.m */; };
+ 3610309BF1CC23AF2DC36AB3A68CE337 /* EZAudioDevice.h in Headers */ = {isa = PBXBuildFile; fileRef = 787AE61236F89C47CCAA70801AEA4713 /* EZAudioDevice.h */; };
+ 3A2ECEBE893B32169F19CFAE1CC1C6A0 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0E2FC5A2E795C1183762213CA023AD8D /* AudioToolbox.framework */; };
+ 3E52E7F31F272141188178A25D613451 /* EZAudioPlotGL.m in Sources */ = {isa = PBXBuildFile; fileRef = 47C38D41E4BC7EC5394D48E92F29C405 /* EZAudioPlotGL.m */; };
+ 49523A3464E33290FA8CF7A3D105EA00 /* Pods-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 37667675DCA0438A56029EAF5E76E332 /* Pods-dummy.m */; };
+ 499EA7D1801D16D70E5DB9C4D825B273 /* EZAudioFloatConverter.h in Headers */ = {isa = PBXBuildFile; fileRef = E12F545FEC55D53E560DB5960417428B /* EZAudioFloatConverter.h */; };
+ 508BC34C7BB7F403009E9D946A749D43 /* TPCircularBuffer.h in Headers */ = {isa = PBXBuildFile; fileRef = E5CBE14E324B53E5F90CAE6D468313B9 /* TPCircularBuffer.h */; };
+ 55F3F42276AA623B238A8FA61BA63F83 /* EZAudioPlot.m in Sources */ = {isa = PBXBuildFile; fileRef = CDD0E4F870AAEB4E350365D1435A29FD /* EZAudioPlot.m */; };
+ 61DCF329051FC1EAE4E08CFB6777AA14 /* EZAudio.h in Headers */ = {isa = PBXBuildFile; fileRef = 4FAF4579003965F0E816CA7FF5DE223E /* EZAudio.h */; };
+ 697560BC16510AAC9D8FAD110D6EFEF4 /* EZOutput.h in Headers */ = {isa = PBXBuildFile; fileRef = 863BB818EE9AB42F436FBF09DD4E29C3 /* EZOutput.h */; };
+ 80B57D9686CCBDEE952699E9553D657E /* EZAudio-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 8915450A58367586FAD67BF2B7E61D3A /* EZAudio-dummy.m */; };
+ 81064FB6352CD159DC6A219BFC954B6A /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 781DEBEC70AA10C809F57F5FD6FBDE59 /* QuartzCore.framework */; };
+ 82DE92221EAEC5DD3776ECC49C29E876 /* EZAudioDisplayLink.h in Headers */ = {isa = PBXBuildFile; fileRef = FCDCAB642609C427EAB6178F9EDEBEB8 /* EZAudioDisplayLink.h */; };
+ 83492108FAADC8A0F73C412236CDA802 /* EZAudioUtilities.m in Sources */ = {isa = PBXBuildFile; fileRef = AFEBF5162AE1FB8E27DAF895D52442D1 /* EZAudioUtilities.m */; };
+ 8CDC8F7E97E3CE10D759ACEB2D435730 /* EZMicrophone.h in Headers */ = {isa = PBXBuildFile; fileRef = DABA424080B7CE92AF7C8643CE888922 /* EZMicrophone.h */; };
+ 8E311F5BF0391701799F10DAB4D468C4 /* EZAudioFloatConverter.m in Sources */ = {isa = PBXBuildFile; fileRef = 70A24E9FE563D8CBC45732489F018523 /* EZAudioFloatConverter.m */; };
+ 917CEAE4E33F13EE110A5BAB886920D7 /* TPCircularBuffer+AudioBufferList.c in Sources */ = {isa = PBXBuildFile; fileRef = C0F95DBFB8D541402132026D9CB6A394 /* TPCircularBuffer+AudioBufferList.c */; settings = {COMPILER_FLAGS = "-fno-objc-arc"; }; };
+ 9F388E2071307CE335903429A973695C /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 0E2FC5A2E795C1183762213CA023AD8D /* AudioToolbox.framework */; };
+ A3EBD581DF2B7E61BDFE3DDF3CF11100 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8F41F029247A05C6F5A5E3F5A5325EAE /* Cocoa.framework */; };
+ AD094579E4296BD7B49A1C65964D976C /* EZAudioPlotGL.h in Headers */ = {isa = PBXBuildFile; fileRef = 8DFF7E5AF3E27A3C91FBBEEA54B2B2CC /* EZAudioPlotGL.h */; };
+ B02F9F7127FCEDBE422BA343543CD13B /* AudioUnit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = EDB31095661FB06008B71A10FDFCE1DE /* AudioUnit.framework */; };
+ B9C44C4BD0EE7D51FC6068C9471B53F6 /* Cocoa.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 8F41F029247A05C6F5A5E3F5A5325EAE /* Cocoa.framework */; };
+ BF6B39CD8384CC1D9F1A67D576609EDE /* EZAudioFFT.m in Sources */ = {isa = PBXBuildFile; fileRef = 151D54F7EC63CBF63692ADCC4B3AB33E /* EZAudioFFT.m */; };
+ C4A74A42538F2BF59773C3D6BD7BB718 /* EZAudioFloatData.h in Headers */ = {isa = PBXBuildFile; fileRef = C6C4C261CAA3904E0BB8A23A15DBB0E5 /* EZAudioFloatData.h */; };
+ C970DEAEE40C45F0557916CF1FB0FC0D /* TPCircularBuffer-dummy.m in Sources */ = {isa = PBXBuildFile; fileRef = 2A3B4B9447995D7156D6EB92A0AD7884 /* TPCircularBuffer-dummy.m */; };
+ CCA6CAC9BB85ACB024E4751CF23C667A /* CoreAudio.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 227EB3D8E99010EB8122ED41758726E7 /* CoreAudio.framework */; };
+ D3E818F0D8063BE0A8DEE916014CBF94 /* EZPlot.h in Headers */ = {isa = PBXBuildFile; fileRef = 6B0ADC5C5EB712C97BAFA377A1ED0881 /* EZPlot.h */; };
+ D4C45846527E574161FAE3939C239D95 /* EZAudioDevice.m in Sources */ = {isa = PBXBuildFile; fileRef = 0CC6034A2947D122CDF3C0D05B910F25 /* EZAudioDevice.m */; };
+ D6AAE83A701127166BF35C15FF5EDAEF /* EZAudioPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = 7F7FDE3D87A2BAAF2B4281FC17BAF2A1 /* EZAudioPlayer.m */; };
+ D82F82E49A37A01626A99E18C50ED9EB /* EZAudioPlot.h in Headers */ = {isa = PBXBuildFile; fileRef = 1E09593D194A043766C1484F3885203A /* EZAudioPlot.h */; };
+ D9409745ED8D1C5383FAD796F88520D3 /* TPCircularBuffer+AudioBufferList.h in Headers */ = {isa = PBXBuildFile; fileRef = D4E6AC08D1433B18AEEDD4F137EE0440 /* TPCircularBuffer+AudioBufferList.h */; };
+ E307BE58CF4E77DAF5A92E90B00E2677 /* GLKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D8E4F2C11F73BCCDA076CE24A9C7AF3D /* GLKit.framework */; };
+ E5062CBE3E38B424AA198C21027E940D /* EZAudio.m in Sources */ = {isa = PBXBuildFile; fileRef = 40024001F79580A97BC50A532CC89526 /* EZAudio.m */; };
+ EA361E063FA1FBA49CA610FB7EDD0673 /* EZAudioFloatData.m in Sources */ = {isa = PBXBuildFile; fileRef = D840441A6555D06A0576B9A5BB26A689 /* EZAudioFloatData.m */; };
+ EE2FD61C9807062FD42DF44BB8A36F2B /* EZRecorder.m in Sources */ = {isa = PBXBuildFile; fileRef = 4BE9CE16927BA14C193685220D7B230B /* EZRecorder.m */; };
+ EF339C16555549DC80F2FE17DBD600D0 /* EZAudioFFT.h in Headers */ = {isa = PBXBuildFile; fileRef = 219ED2A6C31ED3C5B63CBF81E4F277FB /* EZAudioFFT.h */; };
+ F588D945C81B1E47854D557602306C8B /* EZAudioFile.m in Sources */ = {isa = PBXBuildFile; fileRef = 20762864E6672E506A10B0CACD4573D6 /* EZAudioFile.m */; };
+ FB0A03FEB2987CC3013B170A64A64AA8 /* EZAudioUtilities.h in Headers */ = {isa = PBXBuildFile; fileRef = 964176089F2A3098DAB55DB346BA5D63 /* EZAudioUtilities.h */; };
+ FFB9B363A3BF2047880A988A35C16B00 /* EZRecorder.h in Headers */ = {isa = PBXBuildFile; fileRef = A52C36EBF666B2FB1FB5EF392D425774 /* EZRecorder.h */; };
+/* End PBXBuildFile section */
+
+/* Begin PBXContainerItemProxy section */
+ 03A270BE6D2D1E1C1A7182B5CA8BA48D /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 7A0939DD735C48F8E58B6B8F2EE0E780;
+ remoteInfo = EZAudio;
+ };
+ 32A122D75BF14B0E439CB0803E7824D3 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 319D68C9944BC65DE9046E0DB29E4AC6;
+ remoteInfo = TPCircularBuffer;
+ };
+ FF99B2F9D4D7F1C16C4248F957F41550 /* PBXContainerItemProxy */ = {
+ isa = PBXContainerItemProxy;
+ containerPortal = D41D8CD98F00B204E9800998ECF8427E /* Project object */;
+ proxyType = 1;
+ remoteGlobalIDString = 319D68C9944BC65DE9046E0DB29E4AC6;
+ remoteInfo = TPCircularBuffer;
+ };
+/* End PBXContainerItemProxy section */
+
+/* Begin PBXFileReference section */
+ 0CC6034A2947D122CDF3C0D05B910F25 /* EZAudioDevice.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioDevice.m; path = EZAudio/EZAudioDevice.m; sourceTree = ""; };
+ 0E2FC5A2E795C1183762213CA023AD8D /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/AudioToolbox.framework; sourceTree = DEVELOPER_DIR; };
+ 14C7CE6CD53EE2DDA03F09EC1B14A914 /* EZAudioFile.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioFile.h; path = EZAudio/EZAudioFile.h; sourceTree = ""; };
+ 151D54F7EC63CBF63692ADCC4B3AB33E /* EZAudioFFT.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioFFT.m; path = EZAudio/EZAudioFFT.m; sourceTree = ""; };
+ 15A529C27057E4A57D259CBC6E6CE49C /* Pods-acknowledgements.markdown */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; path = "Pods-acknowledgements.markdown"; sourceTree = ""; };
+ 1DE4577382021443B70F123CA3853D87 /* EZMicrophone.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZMicrophone.m; path = EZAudio/EZMicrophone.m; sourceTree = ""; };
+ 1E09593D194A043766C1484F3885203A /* EZAudioPlot.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioPlot.h; path = EZAudio/EZAudioPlot.h; sourceTree = ""; };
+ 20762864E6672E506A10B0CACD4573D6 /* EZAudioFile.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioFile.m; path = EZAudio/EZAudioFile.m; sourceTree = ""; };
+ 219ED2A6C31ED3C5B63CBF81E4F277FB /* EZAudioFFT.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioFFT.h; path = EZAudio/EZAudioFFT.h; sourceTree = ""; };
+ 227EB3D8E99010EB8122ED41758726E7 /* CoreAudio.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreAudio.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/CoreAudio.framework; sourceTree = DEVELOPER_DIR; };
+ 26B7397D768C864F6AFCC95D262F8509 /* libEZAudio.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libEZAudio.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ 2A3B4B9447995D7156D6EB92A0AD7884 /* TPCircularBuffer-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "TPCircularBuffer-dummy.m"; sourceTree = ""; };
+ 37667675DCA0438A56029EAF5E76E332 /* Pods-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "Pods-dummy.m"; sourceTree = ""; };
+ 40024001F79580A97BC50A532CC89526 /* EZAudio.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudio.m; path = EZAudio/EZAudio.m; sourceTree = ""; };
+ 47C38D41E4BC7EC5394D48E92F29C405 /* EZAudioPlotGL.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioPlotGL.m; path = EZAudio/EZAudioPlotGL.m; sourceTree = ""; };
+ 4BE9CE16927BA14C193685220D7B230B /* EZRecorder.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZRecorder.m; path = EZAudio/EZRecorder.m; sourceTree = ""; };
+ 4FAF4579003965F0E816CA7FF5DE223E /* EZAudio.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudio.h; path = EZAudio/EZAudio.h; sourceTree = ""; };
+ 544B8F146570AC07F876B9DD3A4E81B7 /* Pods.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = Pods.debug.xcconfig; sourceTree = ""; };
+ 590011A96F4A72B009EAD4718F1EECF6 /* EZOutput.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZOutput.m; path = EZAudio/EZOutput.m; sourceTree = ""; };
+ 641AE05DD55E5E6AC1590CD7B4A18F97 /* Pods-resources.sh */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.script.sh; path = "Pods-resources.sh"; sourceTree = ""; };
+ 64C36E9C70B17E585E546DD7B1462E1F /* TPCircularBuffer-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TPCircularBuffer-prefix.pch"; sourceTree = ""; };
+ 6A2DE08414FB910A0D6A73A7F78CA58A /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/Accelerate.framework; sourceTree = DEVELOPER_DIR; };
+ 6B0ADC5C5EB712C97BAFA377A1ED0881 /* EZPlot.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZPlot.h; path = EZAudio/EZPlot.h; sourceTree = ""; };
+ 70A24E9FE563D8CBC45732489F018523 /* EZAudioFloatConverter.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioFloatConverter.m; path = EZAudio/EZAudioFloatConverter.m; sourceTree = ""; };
+ 781DEBEC70AA10C809F57F5FD6FBDE59 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/QuartzCore.framework; sourceTree = DEVELOPER_DIR; };
+ 787AE61236F89C47CCAA70801AEA4713 /* EZAudioDevice.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioDevice.h; path = EZAudio/EZAudioDevice.h; sourceTree = ""; };
+ 7F7FDE3D87A2BAAF2B4281FC17BAF2A1 /* EZAudioPlayer.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioPlayer.m; path = EZAudio/EZAudioPlayer.m; sourceTree = ""; };
+ 844B106FF4C569539E5A0426A4526E5E /* EZAudioPlayer.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioPlayer.h; path = EZAudio/EZAudioPlayer.h; sourceTree = ""; };
+ 863BB818EE9AB42F436FBF09DD4E29C3 /* EZOutput.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZOutput.h; path = EZAudio/EZOutput.h; sourceTree = ""; };
+ 8915450A58367586FAD67BF2B7E61D3A /* EZAudio-dummy.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; path = "EZAudio-dummy.m"; sourceTree = ""; };
+ 8A74E3C98D625EADC82DE68FFFA9AAE1 /* EZAudio-prefix.pch */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "EZAudio-prefix.pch"; sourceTree = ""; };
+ 8DFF7E5AF3E27A3C91FBBEEA54B2B2CC /* EZAudioPlotGL.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioPlotGL.h; path = EZAudio/EZAudioPlotGL.h; sourceTree = ""; };
+ 8F2FEC89D100208A3895A6BCDE817A0F /* OpenGL.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGL.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/OpenGL.framework; sourceTree = DEVELOPER_DIR; };
+ 8F41F029247A05C6F5A5E3F5A5325EAE /* Cocoa.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Cocoa.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/Cocoa.framework; sourceTree = DEVELOPER_DIR; };
+ 93A46BE9B251972DAC715ED6E2E43FF2 /* libPods.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libPods.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ 963F2C1DABC27E60822C97FD5382B249 /* TPCircularBuffer.c */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.c; name = TPCircularBuffer.c; path = EZAudio/TPCircularBuffer.c; sourceTree = ""; };
+ 964176089F2A3098DAB55DB346BA5D63 /* EZAudioUtilities.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioUtilities.h; path = EZAudio/EZAudioUtilities.h; sourceTree = ""; };
+ 97DAF1294C5FF2B86B582EA47EFC2516 /* EZAudio-Private.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "EZAudio-Private.xcconfig"; sourceTree = ""; };
+ 99ABE0B7A31DAE1500B1A2C4700005EE /* TPCircularBuffer-Private.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = "TPCircularBuffer-Private.xcconfig"; sourceTree = ""; };
+ A52C36EBF666B2FB1FB5EF392D425774 /* EZRecorder.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZRecorder.h; path = EZAudio/EZRecorder.h; sourceTree = ""; };
+ AFEBF5162AE1FB8E27DAF895D52442D1 /* EZAudioUtilities.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioUtilities.m; path = EZAudio/EZAudioUtilities.m; sourceTree = ""; };
+ B6D7D5EA9C893AC884B64EE6A57F2C94 /* TPCircularBuffer.c */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.c; path = TPCircularBuffer.c; sourceTree = ""; };
+ BA6428E9F66FD5A23C0A2E06ED26CD2F /* Podfile */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text; name = Podfile; path = ../Podfile; sourceTree = SOURCE_ROOT; xcLanguageSpecificationIdentifier = xcode.lang.ruby; };
+ BF59BC15D23E1E1912C8F334E7236813 /* Pods-acknowledgements.plist */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.plist.xml; path = "Pods-acknowledgements.plist"; sourceTree = ""; };
+ BFB0C284967C65EC24416317A385BDDC /* TPCircularBuffer.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = TPCircularBuffer.h; path = EZAudio/TPCircularBuffer.h; sourceTree = ""; };
+ C0F95DBFB8D541402132026D9CB6A394 /* TPCircularBuffer+AudioBufferList.c */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.c; path = "TPCircularBuffer+AudioBufferList.c"; sourceTree = ""; };
+ C4FEF0E9A071405DD53A31886FEC441A /* EZAudioDisplayLink.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioDisplayLink.m; path = EZAudio/EZAudioDisplayLink.m; sourceTree = ""; };
+ C6C4C261CAA3904E0BB8A23A15DBB0E5 /* EZAudioFloatData.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioFloatData.h; path = EZAudio/EZAudioFloatData.h; sourceTree = ""; };
+ CCF1B258111E9FF8257E3C66F10512D9 /* EZPlot.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZPlot.m; path = EZAudio/EZPlot.m; sourceTree = ""; };
+ CDD0E4F870AAEB4E350365D1435A29FD /* EZAudioPlot.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioPlot.m; path = EZAudio/EZAudioPlot.m; sourceTree = ""; };
+ D4E6AC08D1433B18AEEDD4F137EE0440 /* TPCircularBuffer+AudioBufferList.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = "TPCircularBuffer+AudioBufferList.h"; sourceTree = ""; };
+ D840441A6555D06A0576B9A5BB26A689 /* EZAudioFloatData.m */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.objc; name = EZAudioFloatData.m; path = EZAudio/EZAudioFloatData.m; sourceTree = ""; };
+ D8E4F2C11F73BCCDA076CE24A9C7AF3D /* GLKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = GLKit.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/GLKit.framework; sourceTree = DEVELOPER_DIR; };
+ DABA424080B7CE92AF7C8643CE888922 /* EZMicrophone.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZMicrophone.h; path = EZAudio/EZMicrophone.h; sourceTree = ""; };
+ E12F545FEC55D53E560DB5960417428B /* EZAudioFloatConverter.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioFloatConverter.h; path = EZAudio/EZAudioFloatConverter.h; sourceTree = ""; };
+ E5CBE14E324B53E5F90CAE6D468313B9 /* TPCircularBuffer.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; path = TPCircularBuffer.h; sourceTree = ""; };
+ E6D35893B4E44D5BE6918731CACC3316 /* libTPCircularBuffer.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = libTPCircularBuffer.a; sourceTree = BUILT_PRODUCTS_DIR; };
+ EDB31095661FB06008B71A10FDFCE1DE /* AudioUnit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioUnit.framework; path = Platforms/MacOSX.platform/Developer/SDKs/MacOSX10.10.sdk/System/Library/Frameworks/AudioUnit.framework; sourceTree = DEVELOPER_DIR; };
+ F071662B915CC4358D4A2E7D1E635B72 /* EZAudio.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = EZAudio.xcconfig; sourceTree = ""; };
+ F1FD87405313176581473390AD05B25E /* Pods.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = Pods.release.xcconfig; sourceTree = ""; };
+ F347F0066E1426853129E8202FB8D05E /* TPCircularBuffer.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; path = TPCircularBuffer.xcconfig; sourceTree = ""; };
+ FCDCAB642609C427EAB6178F9EDEBEB8 /* EZAudioDisplayLink.h */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = sourcecode.c.h; name = EZAudioDisplayLink.h; path = EZAudio/EZAudioDisplayLink.h; sourceTree = ""; };
+/* End PBXFileReference section */
+
+/* Begin PBXFrameworksBuildPhase section */
+ 36C6070658446AA1D7EF166D7944EF99 /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 3A2ECEBE893B32169F19CFAE1CC1C6A0 /* AudioToolbox.framework in Frameworks */,
+ 24264E3C7D01C0BB8CCEC9D77579D99A /* Cocoa.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ C583BA495CAD8742F1691EB84015DC8B /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ A3EBD581DF2B7E61BDFE3DDF3CF11100 /* Cocoa.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ D23507E7BA4D36DDA72A7485D7A1627B /* Frameworks */ = {
+ isa = PBXFrameworksBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 0A71CFA297CC183516E2B9C57A2F89E3 /* Accelerate.framework in Frameworks */,
+ 9F388E2071307CE335903429A973695C /* AudioToolbox.framework in Frameworks */,
+ B02F9F7127FCEDBE422BA343543CD13B /* AudioUnit.framework in Frameworks */,
+ B9C44C4BD0EE7D51FC6068C9471B53F6 /* Cocoa.framework in Frameworks */,
+ CCA6CAC9BB85ACB024E4751CF23C667A /* CoreAudio.framework in Frameworks */,
+ E307BE58CF4E77DAF5A92E90B00E2677 /* GLKit.framework in Frameworks */,
+ 26052838777F60085AAC9CCA05DBFD40 /* OpenGL.framework in Frameworks */,
+ 81064FB6352CD159DC6A219BFC954B6A /* QuartzCore.framework in Frameworks */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXFrameworksBuildPhase section */
+
+/* Begin PBXGroup section */
+ 0FA1E633E8BA0C4E0BE6C7B47BBBCBEB /* TPCircularBuffer */ = {
+ isa = PBXGroup;
+ children = (
+ B6D7D5EA9C893AC884B64EE6A57F2C94 /* TPCircularBuffer.c */,
+ E5CBE14E324B53E5F90CAE6D468313B9 /* TPCircularBuffer.h */,
+ C0F95DBFB8D541402132026D9CB6A394 /* TPCircularBuffer+AudioBufferList.c */,
+ D4E6AC08D1433B18AEEDD4F137EE0440 /* TPCircularBuffer+AudioBufferList.h */,
+ B2EC951CBFF179D5AD6357C640CBA03A /* Support Files */,
+ );
+ path = TPCircularBuffer;
+ sourceTree = "";
+ };
+ 1CBADE37138E64B73598236E7DBAD742 /* Core */ = {
+ isa = PBXGroup;
+ children = (
+ 4FAF4579003965F0E816CA7FF5DE223E /* EZAudio.h */,
+ 40024001F79580A97BC50A532CC89526 /* EZAudio.m */,
+ 787AE61236F89C47CCAA70801AEA4713 /* EZAudioDevice.h */,
+ 0CC6034A2947D122CDF3C0D05B910F25 /* EZAudioDevice.m */,
+ FCDCAB642609C427EAB6178F9EDEBEB8 /* EZAudioDisplayLink.h */,
+ C4FEF0E9A071405DD53A31886FEC441A /* EZAudioDisplayLink.m */,
+ 219ED2A6C31ED3C5B63CBF81E4F277FB /* EZAudioFFT.h */,
+ 151D54F7EC63CBF63692ADCC4B3AB33E /* EZAudioFFT.m */,
+ 14C7CE6CD53EE2DDA03F09EC1B14A914 /* EZAudioFile.h */,
+ 20762864E6672E506A10B0CACD4573D6 /* EZAudioFile.m */,
+ E12F545FEC55D53E560DB5960417428B /* EZAudioFloatConverter.h */,
+ 70A24E9FE563D8CBC45732489F018523 /* EZAudioFloatConverter.m */,
+ C6C4C261CAA3904E0BB8A23A15DBB0E5 /* EZAudioFloatData.h */,
+ D840441A6555D06A0576B9A5BB26A689 /* EZAudioFloatData.m */,
+ 844B106FF4C569539E5A0426A4526E5E /* EZAudioPlayer.h */,
+ 7F7FDE3D87A2BAAF2B4281FC17BAF2A1 /* EZAudioPlayer.m */,
+ 1E09593D194A043766C1484F3885203A /* EZAudioPlot.h */,
+ CDD0E4F870AAEB4E350365D1435A29FD /* EZAudioPlot.m */,
+ 8DFF7E5AF3E27A3C91FBBEEA54B2B2CC /* EZAudioPlotGL.h */,
+ 47C38D41E4BC7EC5394D48E92F29C405 /* EZAudioPlotGL.m */,
+ 964176089F2A3098DAB55DB346BA5D63 /* EZAudioUtilities.h */,
+ AFEBF5162AE1FB8E27DAF895D52442D1 /* EZAudioUtilities.m */,
+ DABA424080B7CE92AF7C8643CE888922 /* EZMicrophone.h */,
+ 1DE4577382021443B70F123CA3853D87 /* EZMicrophone.m */,
+ 863BB818EE9AB42F436FBF09DD4E29C3 /* EZOutput.h */,
+ 590011A96F4A72B009EAD4718F1EECF6 /* EZOutput.m */,
+ 6B0ADC5C5EB712C97BAFA377A1ED0881 /* EZPlot.h */,
+ CCF1B258111E9FF8257E3C66F10512D9 /* EZPlot.m */,
+ A52C36EBF666B2FB1FB5EF392D425774 /* EZRecorder.h */,
+ 4BE9CE16927BA14C193685220D7B230B /* EZRecorder.m */,
+ 963F2C1DABC27E60822C97FD5382B249 /* TPCircularBuffer.c */,
+ BFB0C284967C65EC24416317A385BDDC /* TPCircularBuffer.h */,
+ );
+ name = Core;
+ sourceTree = "";
+ };
+ 3BE797AC3D5981AB41053A2B3297A349 /* Support Files */ = {
+ isa = PBXGroup;
+ children = (
+ F071662B915CC4358D4A2E7D1E635B72 /* EZAudio.xcconfig */,
+ 97DAF1294C5FF2B86B582EA47EFC2516 /* EZAudio-Private.xcconfig */,
+ 8915450A58367586FAD67BF2B7E61D3A /* EZAudio-dummy.m */,
+ 8A74E3C98D625EADC82DE68FFFA9AAE1 /* EZAudio-prefix.pch */,
+ );
+ name = "Support Files";
+ path = "../Target Support Files/EZAudio";
+ sourceTree = "";
+ };
+ 42FF258D88DD3F656983A82D86E4D1E9 /* Frameworks */ = {
+ isa = PBXGroup;
+ children = (
+ 8737EFD840948358ADE23FA2473F7357 /* OS X */,
+ );
+ name = Frameworks;
+ sourceTree = "";
+ };
+ 7C57FA91A4188381F5A04A6F83E77D76 /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ D2CE101712085CE009D2CC07DF9D0EC7 /* EZAudio */,
+ 0FA1E633E8BA0C4E0BE6C7B47BBBCBEB /* TPCircularBuffer */,
+ );
+ name = Pods;
+ sourceTree = "";
+ };
+ 7DB346D0F39D3F0E887471402A8071AB = {
+ isa = PBXGroup;
+ children = (
+ BA6428E9F66FD5A23C0A2E06ED26CD2F /* Podfile */,
+ 42FF258D88DD3F656983A82D86E4D1E9 /* Frameworks */,
+ 7C57FA91A4188381F5A04A6F83E77D76 /* Pods */,
+ CCA510CFBEA2D207524CDA0D73C3B561 /* Products */,
+ D2411A5FE7F7A004607BED49990C37F4 /* Targets Support Files */,
+ );
+ sourceTree = "";
+ };
+ 8737EFD840948358ADE23FA2473F7357 /* OS X */ = {
+ isa = PBXGroup;
+ children = (
+ 6A2DE08414FB910A0D6A73A7F78CA58A /* Accelerate.framework */,
+ 0E2FC5A2E795C1183762213CA023AD8D /* AudioToolbox.framework */,
+ EDB31095661FB06008B71A10FDFCE1DE /* AudioUnit.framework */,
+ 8F41F029247A05C6F5A5E3F5A5325EAE /* Cocoa.framework */,
+ 227EB3D8E99010EB8122ED41758726E7 /* CoreAudio.framework */,
+ D8E4F2C11F73BCCDA076CE24A9C7AF3D /* GLKit.framework */,
+ 8F2FEC89D100208A3895A6BCDE817A0F /* OpenGL.framework */,
+ 781DEBEC70AA10C809F57F5FD6FBDE59 /* QuartzCore.framework */,
+ );
+ name = "OS X";
+ sourceTree = "";
+ };
+ 952EEBFAF8F7E620423C9F156F25A506 /* Pods */ = {
+ isa = PBXGroup;
+ children = (
+ 15A529C27057E4A57D259CBC6E6CE49C /* Pods-acknowledgements.markdown */,
+ BF59BC15D23E1E1912C8F334E7236813 /* Pods-acknowledgements.plist */,
+ 37667675DCA0438A56029EAF5E76E332 /* Pods-dummy.m */,
+ 641AE05DD55E5E6AC1590CD7B4A18F97 /* Pods-resources.sh */,
+ 544B8F146570AC07F876B9DD3A4E81B7 /* Pods.debug.xcconfig */,
+ F1FD87405313176581473390AD05B25E /* Pods.release.xcconfig */,
+ );
+ name = Pods;
+ path = "Target Support Files/Pods";
+ sourceTree = "";
+ };
+ B2EC951CBFF179D5AD6357C640CBA03A /* Support Files */ = {
+ isa = PBXGroup;
+ children = (
+ F347F0066E1426853129E8202FB8D05E /* TPCircularBuffer.xcconfig */,
+ 99ABE0B7A31DAE1500B1A2C4700005EE /* TPCircularBuffer-Private.xcconfig */,
+ 2A3B4B9447995D7156D6EB92A0AD7884 /* TPCircularBuffer-dummy.m */,
+ 64C36E9C70B17E585E546DD7B1462E1F /* TPCircularBuffer-prefix.pch */,
+ );
+ name = "Support Files";
+ path = "../Target Support Files/TPCircularBuffer";
+ sourceTree = "";
+ };
+ CCA510CFBEA2D207524CDA0D73C3B561 /* Products */ = {
+ isa = PBXGroup;
+ children = (
+ 26B7397D768C864F6AFCC95D262F8509 /* libEZAudio.a */,
+ 93A46BE9B251972DAC715ED6E2E43FF2 /* libPods.a */,
+ E6D35893B4E44D5BE6918731CACC3316 /* libTPCircularBuffer.a */,
+ );
+ name = Products;
+ sourceTree = "";
+ };
+ D2411A5FE7F7A004607BED49990C37F4 /* Targets Support Files */ = {
+ isa = PBXGroup;
+ children = (
+ 952EEBFAF8F7E620423C9F156F25A506 /* Pods */,
+ );
+ name = "Targets Support Files";
+ sourceTree = "";
+ };
+ D2CE101712085CE009D2CC07DF9D0EC7 /* EZAudio */ = {
+ isa = PBXGroup;
+ children = (
+ 1CBADE37138E64B73598236E7DBAD742 /* Core */,
+ 3BE797AC3D5981AB41053A2B3297A349 /* Support Files */,
+ );
+ path = EZAudio;
+ sourceTree = "";
+ };
+/* End PBXGroup section */
+
+/* Begin PBXHeadersBuildPhase section */
+ 37791B9BB517DB8A12BA5B1BD79C3411 /* Headers */ = {
+ isa = PBXHeadersBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ D9409745ED8D1C5383FAD796F88520D3 /* TPCircularBuffer+AudioBufferList.h in Headers */,
+ 508BC34C7BB7F403009E9D946A749D43 /* TPCircularBuffer.h in Headers */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ AFAA5E336599C62DA4CF24D63BC0D1F6 /* Headers */ = {
+ isa = PBXHeadersBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 61DCF329051FC1EAE4E08CFB6777AA14 /* EZAudio.h in Headers */,
+ 3610309BF1CC23AF2DC36AB3A68CE337 /* EZAudioDevice.h in Headers */,
+ 82DE92221EAEC5DD3776ECC49C29E876 /* EZAudioDisplayLink.h in Headers */,
+ EF339C16555549DC80F2FE17DBD600D0 /* EZAudioFFT.h in Headers */,
+ 1537A380AE62ADD722D187D2820F8A40 /* EZAudioFile.h in Headers */,
+ 499EA7D1801D16D70E5DB9C4D825B273 /* EZAudioFloatConverter.h in Headers */,
+ C4A74A42538F2BF59773C3D6BD7BB718 /* EZAudioFloatData.h in Headers */,
+ 06A74F53E0DEF248FF1C0A6FF4FAA765 /* EZAudioPlayer.h in Headers */,
+ D82F82E49A37A01626A99E18C50ED9EB /* EZAudioPlot.h in Headers */,
+ AD094579E4296BD7B49A1C65964D976C /* EZAudioPlotGL.h in Headers */,
+ FB0A03FEB2987CC3013B170A64A64AA8 /* EZAudioUtilities.h in Headers */,
+ 8CDC8F7E97E3CE10D759ACEB2D435730 /* EZMicrophone.h in Headers */,
+ 697560BC16510AAC9D8FAD110D6EFEF4 /* EZOutput.h in Headers */,
+ D3E818F0D8063BE0A8DEE916014CBF94 /* EZPlot.h in Headers */,
+ FFB9B363A3BF2047880A988A35C16B00 /* EZRecorder.h in Headers */,
+ 1CA008A25FD4B771F716A889722FCC81 /* TPCircularBuffer.h in Headers */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXHeadersBuildPhase section */
+
+/* Begin PBXNativeTarget section */
+ 319D68C9944BC65DE9046E0DB29E4AC6 /* TPCircularBuffer */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 147ED9421E6AA632ACA9D4FF2FDDFF0A /* Build configuration list for PBXNativeTarget "TPCircularBuffer" */;
+ buildPhases = (
+ 797008EC5ABAE9E6FB6C905CD90E2EDC /* Sources */,
+ 36C6070658446AA1D7EF166D7944EF99 /* Frameworks */,
+ 37791B9BB517DB8A12BA5B1BD79C3411 /* Headers */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ );
+ name = TPCircularBuffer;
+ productName = TPCircularBuffer;
+ productReference = E6D35893B4E44D5BE6918731CACC3316 /* libTPCircularBuffer.a */;
+ productType = "com.apple.product-type.library.static";
+ };
+ 7A0939DD735C48F8E58B6B8F2EE0E780 /* EZAudio */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = D411E84EB00AD857E0FB82E12C3B7531 /* Build configuration list for PBXNativeTarget "EZAudio" */;
+ buildPhases = (
+ 0568F8A7DFB9BDF46F283960BA80B8D5 /* Sources */,
+ D23507E7BA4D36DDA72A7485D7A1627B /* Frameworks */,
+ AFAA5E336599C62DA4CF24D63BC0D1F6 /* Headers */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ BEC36CF122276C8E2863501376A9DB00 /* PBXTargetDependency */,
+ );
+ name = EZAudio;
+ productName = EZAudio;
+ productReference = 26B7397D768C864F6AFCC95D262F8509 /* libEZAudio.a */;
+ productType = "com.apple.product-type.library.static";
+ };
+ 83500D944F7749EC24C11353544621D3 /* Pods */ = {
+ isa = PBXNativeTarget;
+ buildConfigurationList = 8B8242766CD7537F2433F62FBB88C408 /* Build configuration list for PBXNativeTarget "Pods" */;
+ buildPhases = (
+ 3C7F1E4DB0FEA7A239A64A3BF9FA6157 /* Sources */,
+ C583BA495CAD8742F1691EB84015DC8B /* Frameworks */,
+ );
+ buildRules = (
+ );
+ dependencies = (
+ FB6A79D1682BF929089870F00E4DA695 /* PBXTargetDependency */,
+ 95823029C89CFF7A9F4D86C051DCE075 /* PBXTargetDependency */,
+ );
+ name = Pods;
+ productName = Pods;
+ productReference = 93A46BE9B251972DAC715ED6E2E43FF2 /* libPods.a */;
+ productType = "com.apple.product-type.library.static";
+ };
+/* End PBXNativeTarget section */
+
+/* Begin PBXProject section */
+ D41D8CD98F00B204E9800998ECF8427E /* Project object */ = {
+ isa = PBXProject;
+ attributes = {
+ LastSwiftUpdateCheck = 0700;
+ LastUpgradeCheck = 0700;
+ };
+ buildConfigurationList = 2D8E8EC45A3A1A1D94AE762CB5028504 /* Build configuration list for PBXProject "Pods" */;
+ compatibilityVersion = "Xcode 3.2";
+ developmentRegion = English;
+ hasScannedForEncodings = 0;
+ knownRegions = (
+ en,
+ );
+ mainGroup = 7DB346D0F39D3F0E887471402A8071AB;
+ productRefGroup = CCA510CFBEA2D207524CDA0D73C3B561 /* Products */;
+ projectDirPath = "";
+ projectRoot = "";
+ targets = (
+ 7A0939DD735C48F8E58B6B8F2EE0E780 /* EZAudio */,
+ 83500D944F7749EC24C11353544621D3 /* Pods */,
+ 319D68C9944BC65DE9046E0DB29E4AC6 /* TPCircularBuffer */,
+ );
+ };
+/* End PBXProject section */
+
+/* Begin PBXSourcesBuildPhase section */
+ 0568F8A7DFB9BDF46F283960BA80B8D5 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 80B57D9686CCBDEE952699E9553D657E /* EZAudio-dummy.m in Sources */,
+ E5062CBE3E38B424AA198C21027E940D /* EZAudio.m in Sources */,
+ D4C45846527E574161FAE3939C239D95 /* EZAudioDevice.m in Sources */,
+ 19A2733D8CE00C3A1F3B73A6C39AF426 /* EZAudioDisplayLink.m in Sources */,
+ BF6B39CD8384CC1D9F1A67D576609EDE /* EZAudioFFT.m in Sources */,
+ F588D945C81B1E47854D557602306C8B /* EZAudioFile.m in Sources */,
+ 8E311F5BF0391701799F10DAB4D468C4 /* EZAudioFloatConverter.m in Sources */,
+ EA361E063FA1FBA49CA610FB7EDD0673 /* EZAudioFloatData.m in Sources */,
+ D6AAE83A701127166BF35C15FF5EDAEF /* EZAudioPlayer.m in Sources */,
+ 55F3F42276AA623B238A8FA61BA63F83 /* EZAudioPlot.m in Sources */,
+ 3E52E7F31F272141188178A25D613451 /* EZAudioPlotGL.m in Sources */,
+ 83492108FAADC8A0F73C412236CDA802 /* EZAudioUtilities.m in Sources */,
+ 233322DE375E0D040E9A893D0BA8756C /* EZMicrophone.m in Sources */,
+ 2905E3EA2BE236E54689FEDB6EF9E015 /* EZOutput.m in Sources */,
+ 130E5B40FF201E525FBB7E0A67451502 /* EZPlot.m in Sources */,
+ EE2FD61C9807062FD42DF44BB8A36F2B /* EZRecorder.m in Sources */,
+ 278D8D6197A179598D94AA027204EC74 /* TPCircularBuffer.c in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 3C7F1E4DB0FEA7A239A64A3BF9FA6157 /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 49523A3464E33290FA8CF7A3D105EA00 /* Pods-dummy.m in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+ 797008EC5ABAE9E6FB6C905CD90E2EDC /* Sources */ = {
+ isa = PBXSourcesBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ 917CEAE4E33F13EE110A5BAB886920D7 /* TPCircularBuffer+AudioBufferList.c in Sources */,
+ C970DEAEE40C45F0557916CF1FB0FC0D /* TPCircularBuffer-dummy.m in Sources */,
+ 1CB1173B299C5DC192DA8DBEDA6D94AF /* TPCircularBuffer.c in Sources */,
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ };
+/* End PBXSourcesBuildPhase section */
+
+/* Begin PBXTargetDependency section */
+ 95823029C89CFF7A9F4D86C051DCE075 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ name = TPCircularBuffer;
+ target = 319D68C9944BC65DE9046E0DB29E4AC6 /* TPCircularBuffer */;
+ targetProxy = 32A122D75BF14B0E439CB0803E7824D3 /* PBXContainerItemProxy */;
+ };
+ BEC36CF122276C8E2863501376A9DB00 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ name = TPCircularBuffer;
+ target = 319D68C9944BC65DE9046E0DB29E4AC6 /* TPCircularBuffer */;
+ targetProxy = FF99B2F9D4D7F1C16C4248F957F41550 /* PBXContainerItemProxy */;
+ };
+ FB6A79D1682BF929089870F00E4DA695 /* PBXTargetDependency */ = {
+ isa = PBXTargetDependency;
+ name = EZAudio;
+ target = 7A0939DD735C48F8E58B6B8F2EE0E780 /* EZAudio */;
+ targetProxy = 03A270BE6D2D1E1C1A7182B5CA8BA48D /* PBXContainerItemProxy */;
+ };
+/* End PBXTargetDependency section */
+
+/* Begin XCBuildConfiguration section */
+ 2892439A379E5AC5862F6E8933486566 /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 97DAF1294C5FF2B86B582EA47EFC2516 /* EZAudio-Private.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ GCC_PREFIX_HEADER = "Target Support Files/EZAudio/EZAudio-prefix.pch";
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ };
+ name = Release;
+ };
+ 309C40EE9B9EBEE0C9AB7949AB114FF8 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 544B8F146570AC07F876B9DD3A4E81B7 /* Pods.debug.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PODS_ROOT = "$(SRCROOT)";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ SKIP_INSTALL = YES;
+ };
+ name = Debug;
+ };
+ 3940F7C0895960E2D0D73A429BF2527A /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 97DAF1294C5FF2B86B582EA47EFC2516 /* EZAudio-Private.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ GCC_PREFIX_HEADER = "Target Support Files/EZAudio/EZAudio-prefix.pch";
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ };
+ name = Debug;
+ };
+ 40BF8CC841328169637DF4869A828B78 /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 99ABE0B7A31DAE1500B1A2C4700005EE /* TPCircularBuffer-Private.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ GCC_PREFIX_HEADER = "Target Support Files/TPCircularBuffer/TPCircularBuffer-prefix.pch";
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ };
+ name = Release;
+ };
+ 52D4ECB745D1C035F94BD060EE7F6605 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ COPY_PHASE_STRIP = NO;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_DYNAMIC_NO_PIC = NO;
+ GCC_OPTIMIZATION_LEVEL = 0;
+ GCC_PREPROCESSOR_DEFINITIONS = (
+ "DEBUG=1",
+ "$(inherited)",
+ );
+ GCC_SYMBOLS_PRIVATE_EXTERN = NO;
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ ONLY_ACTIVE_ARCH = YES;
+ STRIP_INSTALLED_PRODUCT = NO;
+ SYMROOT = "${SRCROOT}/../build";
+ };
+ name = Debug;
+ };
+ 76312231ECC72AB9067E3042F2C56DA4 /* Debug */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = 99ABE0B7A31DAE1500B1A2C4700005EE /* TPCircularBuffer-Private.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ GCC_PREFIX_HEADER = "Target Support Files/TPCircularBuffer/TPCircularBuffer-prefix.pch";
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = YES;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ };
+ name = Debug;
+ };
+ A070F153A04DD66F4E492261894B37FF /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ ALWAYS_SEARCH_USER_PATHS = NO;
+ CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
+ CLANG_CXX_LIBRARY = "libc++";
+ CLANG_ENABLE_MODULES = YES;
+ CLANG_ENABLE_OBJC_ARC = YES;
+ CLANG_WARN_BOOL_CONVERSION = YES;
+ CLANG_WARN_CONSTANT_CONVERSION = YES;
+ CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES;
+ CLANG_WARN_EMPTY_BODY = YES;
+ CLANG_WARN_ENUM_CONVERSION = YES;
+ CLANG_WARN_INT_CONVERSION = YES;
+ CLANG_WARN_OBJC_ROOT_CLASS = YES;
+ CLANG_WARN_UNREACHABLE_CODE = YES;
+ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
+ COPY_PHASE_STRIP = YES;
+ ENABLE_NS_ASSERTIONS = NO;
+ GCC_C_LANGUAGE_STANDARD = gnu99;
+ GCC_PREPROCESSOR_DEFINITIONS = "RELEASE=1";
+ GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
+ GCC_WARN_ABOUT_RETURN_TYPE = YES;
+ GCC_WARN_UNDECLARED_SELECTOR = YES;
+ GCC_WARN_UNINITIALIZED_AUTOS = YES;
+ GCC_WARN_UNUSED_FUNCTION = YES;
+ GCC_WARN_UNUSED_VARIABLE = YES;
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ STRIP_INSTALLED_PRODUCT = NO;
+ SYMROOT = "${SRCROOT}/../build";
+ VALIDATE_PRODUCT = YES;
+ };
+ name = Release;
+ };
+ BB324394BFBBB7855D03C7A166017380 /* Release */ = {
+ isa = XCBuildConfiguration;
+ baseConfigurationReference = F1FD87405313176581473390AD05B25E /* Pods.release.xcconfig */;
+ buildSettings = {
+ ARCHS = "$(ARCHS_STANDARD)";
+ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
+ ENABLE_STRICT_OBJC_MSGSEND = YES;
+ EXECUTABLE_PREFIX = lib;
+ MACOSX_DEPLOYMENT_TARGET = 10.9;
+ MTL_ENABLE_DEBUG_INFO = NO;
+ OTHER_LDFLAGS = "";
+ OTHER_LIBTOOLFLAGS = "";
+ PODS_ROOT = "$(SRCROOT)";
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ SDKROOT = macosx;
+ SKIP_INSTALL = YES;
+ };
+ name = Release;
+ };
+/* End XCBuildConfiguration section */
+
+/* Begin XCConfigurationList section */
+ 147ED9421E6AA632ACA9D4FF2FDDFF0A /* Build configuration list for PBXNativeTarget "TPCircularBuffer" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 76312231ECC72AB9067E3042F2C56DA4 /* Debug */,
+ 40BF8CC841328169637DF4869A828B78 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 2D8E8EC45A3A1A1D94AE762CB5028504 /* Build configuration list for PBXProject "Pods" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 52D4ECB745D1C035F94BD060EE7F6605 /* Debug */,
+ A070F153A04DD66F4E492261894B37FF /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ 8B8242766CD7537F2433F62FBB88C408 /* Build configuration list for PBXNativeTarget "Pods" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 309C40EE9B9EBEE0C9AB7949AB114FF8 /* Debug */,
+ BB324394BFBBB7855D03C7A166017380 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+ D411E84EB00AD857E0FB82E12C3B7531 /* Build configuration list for PBXNativeTarget "EZAudio" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ 3940F7C0895960E2D0D73A429BF2527A /* Debug */,
+ 2892439A379E5AC5862F6E8933486566 /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
+/* End XCConfigurationList section */
+ };
+ rootObject = D41D8CD98F00B204E9800998ECF8427E /* Project object */;
+}
diff --git a/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/EZAudio.xcscheme b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/EZAudio.xcscheme
new file mode 100644
index 0000000..0a772a0
--- /dev/null
+++ b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/EZAudio.xcscheme
@@ -0,0 +1,62 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/Pods.xcscheme b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/Pods.xcscheme
new file mode 100644
index 0000000..cc74753
--- /dev/null
+++ b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/Pods.xcscheme
@@ -0,0 +1,62 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/TPCircularBuffer.xcscheme b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/TPCircularBuffer.xcscheme
new file mode 100644
index 0000000..24df489
--- /dev/null
+++ b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/TPCircularBuffer.xcscheme
@@ -0,0 +1,62 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/xcschememanagement.plist b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/xcschememanagement.plist
new file mode 100644
index 0000000..7cb42e1
--- /dev/null
+++ b/Pods/Pods.xcodeproj/xcuserdata/michael.xcuserdatad/xcschemes/xcschememanagement.plist
@@ -0,0 +1,42 @@
+
+
+
+
+ SchemeUserState
+
+ EZAudio.xcscheme
+
+ isShown
+
+
+ Pods.xcscheme
+
+ isShown
+
+
+ TPCircularBuffer.xcscheme
+
+ isShown
+
+
+
+ SuppressBuildableAutocreation
+
+ 319D68C9944BC65DE9046E0DB29E4AC6
+
+ primary
+
+
+ 7A0939DD735C48F8E58B6B8F2EE0E780
+
+ primary
+
+
+ 83500D944F7749EC24C11353544621D3
+
+ primary
+
+
+
+
+
diff --git a/Pods/TPCircularBuffer/README.markdown b/Pods/TPCircularBuffer/README.markdown
new file mode 100644
index 0000000..2d00785
--- /dev/null
+++ b/Pods/TPCircularBuffer/README.markdown
@@ -0,0 +1,35 @@
+A simple, fast circular buffer implementation for audio processing
+==================================================================
+
+A simple C implementation for a circular (ring) buffer. Thread-safe with a single producer and a single consumer, using OSAtomic.h primitives, and avoids any need for buffer wrapping logic by using a virtual memory map technique to place a virtual copy of the buffer straight after the end of the real buffer.
+
+Distributed under the [MIT license](http://opensource.org/licenses/mit-license.php)
+
+Usage
+-----
+
+Initialisation and cleanup: `TPCircularBufferInit` and `TPCircularBufferCleanup` to allocate and free resources.
+
+Producing: Use `TPCircularBufferHead` to get a pointer to write to the buffer, followed by `TPCircularBufferProduce` to submit the written data. `TPCircularBufferProduceBytes` is a convenience routine for writing data straight to the buffer.
+
+Consuming: Use `TPCircularBufferTail` to get a pointer to the next data to read, followed by `TPCircularBufferConsume` to free up the space once processed.
+
+TPCircularBuffer+AudioBufferList.(c,h) contain helper functions to queue and dequeue AudioBufferList
+structures. These will automatically adjust the mData fields of each buffer to point to 16-byte aligned
+regions within the circular buffer.
+
+Thread safety
+-------------
+
+As long as you restrict multithreaded access to just one producer, and just one consumer, this utility should be thread safe.
+
+Only one shared variable is used (the buffer fill count), and OSAtomic primitives are used to write to this value to ensure atomicity.
+
+-----------------------------------------------------
+
+Virtual memory technique originally proposed by [Philip Howard](http://vrb.slashusr.org/), and [adapted to Darwin](http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz) by [Kurt Revis](http://www.snoize.com)
+
+See more info at [atastypixel.com](http://atastypixel.com/blog/a-simple-fast-circular-buffer-implementation-for-audio-processing/)
+
+Michael Tyson
+A Tasty Pixel
\ No newline at end of file
diff --git a/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.c b/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.c
new file mode 100644
index 0000000..ba3d1c6
--- /dev/null
+++ b/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.c
@@ -0,0 +1,237 @@
+//
+// TPCircularBuffer+AudioBufferList.c
+// Circular/Ring buffer implementation
+//
+// Created by Michael Tyson on 20/03/2012.
+// Copyright 2012 A Tasty Pixel. All rights reserved.
+//
+
+#include "TPCircularBuffer+AudioBufferList.h"
+#import
+
+static double __secondsToHostTicks = 0.0;
+
+static inline long align16byte(long val) {
+ if ( val & (16-1) ) {
+ return val + (16 - (val & (16-1)));
+ }
+ return val;
+}
+
+static inline long min(long a, long b) {
+ return a > b ? b : a;
+}
+
+AudioBufferList *TPCircularBufferPrepareEmptyAudioBufferList(TPCircularBuffer *buffer, int numberOfBuffers, int bytesPerBuffer, const AudioTimeStamp *inTimestamp) {
+ int32_t availableBytes;
+ TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferHead(buffer, &availableBytes);
+ if ( availableBytes < sizeof(TPCircularBufferABLBlockHeader)+((numberOfBuffers-1)*sizeof(AudioBuffer))+(numberOfBuffers*bytesPerBuffer) ) return NULL;
+
+ assert(!((unsigned long)block & 0xF) /* Beware unaligned accesses */);
+
+ if ( inTimestamp ) {
+ memcpy(&block->timestamp, inTimestamp, sizeof(AudioTimeStamp));
+ } else {
+ memset(&block->timestamp, 0, sizeof(AudioTimeStamp));
+ }
+
+ memset(&block->bufferList, 0, sizeof(AudioBufferList)+((numberOfBuffers-1)*sizeof(AudioBuffer)));
+ block->bufferList.mNumberBuffers = numberOfBuffers;
+
+ char *dataPtr = (char*)&block->bufferList + sizeof(AudioBufferList)+((numberOfBuffers-1)*sizeof(AudioBuffer));
+ for ( int i=0; i availableBytes ) {
+ return NULL;
+ }
+
+ block->bufferList.mBuffers[i].mData = dataPtr;
+ block->bufferList.mBuffers[i].mDataByteSize = bytesPerBuffer;
+ block->bufferList.mBuffers[i].mNumberChannels = 1;
+
+ dataPtr += bytesPerBuffer;
+ }
+
+ // Make sure whole buffer (including timestamp and length value) is 16-byte aligned in length
+ block->totalLength = align16byte(dataPtr - (char*)block);
+ if ( block->totalLength > availableBytes ) {
+ return NULL;
+ }
+
+ return &block->bufferList;
+}
+
+void TPCircularBufferProduceAudioBufferList(TPCircularBuffer *buffer, const AudioTimeStamp *inTimestamp) {
+ int32_t availableBytes;
+ TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferHead(buffer, &availableBytes);
+
+ assert(!((unsigned long)block & 0xF) /* Beware unaligned accesses */);
+
+ if ( inTimestamp ) {
+ memcpy(&block->timestamp, inTimestamp, sizeof(AudioTimeStamp));
+ }
+
+ UInt32 calculatedLength = ((char*)block->bufferList.mBuffers[block->bufferList.mNumberBuffers-1].mData + block->bufferList.mBuffers[block->bufferList.mNumberBuffers-1].mDataByteSize) - (char*)block;
+
+ // Make sure whole buffer (including timestamp and length value) is 16-byte aligned in length
+ calculatedLength = align16byte(calculatedLength);
+
+ assert(calculatedLength <= block->totalLength && calculatedLength <= availableBytes);
+
+ block->totalLength = calculatedLength;
+
+ TPCircularBufferProduce(buffer, block->totalLength);
+}
+
+bool TPCircularBufferCopyAudioBufferList(TPCircularBuffer *buffer, const AudioBufferList *inBufferList, const AudioTimeStamp *inTimestamp, UInt32 frames, AudioStreamBasicDescription *audioDescription) {
+ if ( frames == 0 ) return true;
+
+ int byteCount = inBufferList->mBuffers[0].mDataByteSize;
+ if ( frames != kTPCircularBufferCopyAll ) {
+ byteCount = frames * audioDescription->mBytesPerFrame;
+ assert(byteCount <= inBufferList->mBuffers[0].mDataByteSize);
+ }
+
+ AudioBufferList *bufferList = TPCircularBufferPrepareEmptyAudioBufferList(buffer, inBufferList->mNumberBuffers, byteCount, inTimestamp);
+ if ( !bufferList ) return false;
+
+ for ( int i=0; imNumberBuffers; i++ ) {
+ memcpy(bufferList->mBuffers[i].mData, inBufferList->mBuffers[i].mData, byteCount);
+ }
+
+ TPCircularBufferProduceAudioBufferList(buffer, NULL);
+
+ return true;
+}
+
+AudioBufferList *TPCircularBufferNextBufferListAfter(TPCircularBuffer *buffer, AudioBufferList *bufferList, AudioTimeStamp *outTimestamp) {
+ int32_t availableBytes;
+ void *tail = TPCircularBufferTail(buffer, &availableBytes);
+ void *end = (char*)tail + availableBytes;
+ assert((void*)bufferList > (void*)tail && (void*)bufferList < end);
+
+ TPCircularBufferABLBlockHeader *originalBlock = (TPCircularBufferABLBlockHeader*)((char*)bufferList - offsetof(TPCircularBufferABLBlockHeader, bufferList));
+ assert(!((unsigned long)originalBlock & 0xF) /* Beware unaligned accesses */);
+
+
+ TPCircularBufferABLBlockHeader *nextBlock = (TPCircularBufferABLBlockHeader*)((char*)originalBlock + originalBlock->totalLength);
+ if ( (void*)nextBlock >= end ) return NULL;
+ assert(!((unsigned long)nextBlock & 0xF) /* Beware unaligned accesses */);
+
+ if ( outTimestamp ) {
+ memcpy(outTimestamp, &nextBlock->timestamp, sizeof(AudioTimeStamp));
+ }
+
+ return &nextBlock->bufferList;
+}
+
+void TPCircularBufferConsumeNextBufferListPartial(TPCircularBuffer *buffer, int framesToConsume, AudioStreamBasicDescription *audioFormat) {
+ assert(framesToConsume >= 0);
+
+ int32_t dontcare;
+ TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &dontcare);
+ if ( !block ) return;
+ assert(!((unsigned long)block & 0xF)); // Beware unaligned accesses
+
+ int bytesToConsume = min(framesToConsume * audioFormat->mBytesPerFrame, block->bufferList.mBuffers[0].mDataByteSize);
+
+ if ( bytesToConsume == block->bufferList.mBuffers[0].mDataByteSize ) {
+ TPCircularBufferConsumeNextBufferList(buffer);
+ return;
+ }
+
+ for ( int i=0; ibufferList.mNumberBuffers; i++ ) {
+ assert(bytesToConsume <= block->bufferList.mBuffers[i].mDataByteSize && (char*)block->bufferList.mBuffers[i].mData + bytesToConsume <= (char*)block+block->totalLength);
+
+ block->bufferList.mBuffers[i].mData = (char*)block->bufferList.mBuffers[i].mData + bytesToConsume;
+ block->bufferList.mBuffers[i].mDataByteSize -= bytesToConsume;
+ }
+
+ if ( block->timestamp.mFlags & kAudioTimeStampSampleTimeValid ) {
+ block->timestamp.mSampleTime += framesToConsume;
+ }
+ if ( block->timestamp.mFlags & kAudioTimeStampHostTimeValid ) {
+ if ( !__secondsToHostTicks ) {
+ mach_timebase_info_data_t tinfo;
+ mach_timebase_info(&tinfo);
+ __secondsToHostTicks = 1.0 / (((double)tinfo.numer / tinfo.denom) * 1.0e-9);
+ }
+
+ block->timestamp.mHostTime += ((double)framesToConsume / audioFormat->mSampleRate) * __secondsToHostTicks;
+ }
+}
+
+void TPCircularBufferDequeueBufferListFrames(TPCircularBuffer *buffer, UInt32 *ioLengthInFrames, AudioBufferList *outputBufferList, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat) {
+ bool hasTimestamp = false;
+ UInt32 bytesToGo = *ioLengthInFrames * audioFormat->mBytesPerFrame;
+ UInt32 bytesCopied = 0;
+ while ( bytesToGo > 0 ) {
+ AudioBufferList *bufferList = TPCircularBufferNextBufferList(buffer, !hasTimestamp ? outTimestamp : NULL);
+ TPCircularBufferABLBlockHeader *block = bufferList ? (TPCircularBufferABLBlockHeader*)((char*)bufferList - offsetof(TPCircularBufferABLBlockHeader, bufferList)) : NULL;
+ hasTimestamp = true;
+ if ( !bufferList ) break;
+
+ UInt32 bytesToCopy = min(bytesToGo, bufferList->mBuffers[0].mDataByteSize);
+
+ if ( outputBufferList ) {
+ for ( int i=0; imNumberBuffers; i++ ) {
+ assert(bytesCopied + bytesToCopy <= outputBufferList->mBuffers[i].mDataByteSize);
+ assert((char*)bufferList->mBuffers[i].mData + bytesToCopy <= (char*)bufferList+(block?block->totalLength:0));
+
+ memcpy((char*)outputBufferList->mBuffers[i].mData + bytesCopied, bufferList->mBuffers[i].mData, bytesToCopy);
+ }
+ }
+
+ TPCircularBufferConsumeNextBufferListPartial(buffer, bytesToCopy/audioFormat->mBytesPerFrame, audioFormat);
+
+ bytesToGo -= bytesToCopy;
+ bytesCopied += bytesToCopy;
+ }
+
+ *ioLengthInFrames -= bytesToGo / audioFormat->mBytesPerFrame;
+
+ if ( outputBufferList ) {
+ for ( int i=0; imNumberBuffers; i++ ) {
+ outputBufferList->mBuffers[i].mDataByteSize = *ioLengthInFrames * audioFormat->mBytesPerFrame;
+ }
+ }
+}
+
+static UInt32 _TPCircularBufferPeek(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat, UInt32 contiguousToleranceSampleTime) {
+ int32_t availableBytes;
+ TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &availableBytes);
+ if ( !block ) return 0;
+ assert(!((unsigned long)block & 0xF) /* Beware unaligned accesses */);
+
+ if ( outTimestamp ) {
+ memcpy(outTimestamp, &block->timestamp, sizeof(AudioTimeStamp));
+ }
+
+ void *end = (char*)block + availableBytes;
+
+ UInt32 byteCount = 0;
+
+ while ( 1 ) {
+ byteCount += block->bufferList.mBuffers[0].mDataByteSize;
+ TPCircularBufferABLBlockHeader *nextBlock = (TPCircularBufferABLBlockHeader*)((char*)block + block->totalLength);
+ if ( (void*)nextBlock >= end ||
+ (contiguousToleranceSampleTime != UINT32_MAX
+ && labs(nextBlock->timestamp.mSampleTime - (block->timestamp.mSampleTime + (block->bufferList.mBuffers[0].mDataByteSize / audioFormat->mBytesPerFrame))) > contiguousToleranceSampleTime) ) {
+ break;
+ }
+ assert(!((unsigned long)nextBlock & 0xF) /* Beware unaligned accesses */);
+ block = nextBlock;
+ }
+
+ return byteCount / audioFormat->mBytesPerFrame;
+}
+
+UInt32 TPCircularBufferPeek(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat) {
+ return _TPCircularBufferPeek(buffer, outTimestamp, audioFormat, UINT32_MAX);
+}
+
+UInt32 TPCircularBufferPeekContiguous(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat, UInt32 contiguousToleranceSampleTime) {
+ return _TPCircularBufferPeek(buffer, outTimestamp, audioFormat, contiguousToleranceSampleTime);
+}
diff --git a/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h b/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
new file mode 100644
index 0000000..8167c25
--- /dev/null
+++ b/Pods/TPCircularBuffer/TPCircularBuffer+AudioBufferList.h
@@ -0,0 +1,167 @@
+//
+// TPCircularBuffer+AudioBufferList.h
+// Circular/Ring buffer implementation
+//
+// https://github.com/michaeltyson/TPCircularBuffer
+//
+// Created by Michael Tyson on 20/03/2012.
+// Copyright 2012 A Tasty Pixel. All rights reserved.
+//
+
+#ifndef TPCircularBuffer_AudioBufferList_h
+#define TPCircularBuffer_AudioBufferList_h
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "TPCircularBuffer.h"
+#include
+
+#define kTPCircularBufferCopyAll UINT32_MAX
+
+typedef struct {
+ AudioTimeStamp timestamp;
+ UInt32 totalLength;
+ AudioBufferList bufferList;
+} TPCircularBufferABLBlockHeader;
+
+
+/*!
+ * Prepare an empty buffer list, stored on the circular buffer
+ *
+ * @param buffer Circular buffer
+ * @param numberOfBuffers The number of buffers to be contained within the buffer list
+ * @param bytesPerBuffer The number of bytes to store for each buffer
+ * @param timestamp The timestamp associated with the buffer, or NULL. Note that you can also pass a timestamp into TPCircularBufferProduceAudioBufferList, to set it there instead.
+ * @return The empty buffer list, or NULL if circular buffer has insufficient space
+ */
+AudioBufferList *TPCircularBufferPrepareEmptyAudioBufferList(TPCircularBuffer *buffer, int numberOfBuffers, int bytesPerBuffer, const AudioTimeStamp *timestamp);
+
+/*!
+ * Mark next audio buffer list as ready for reading
+ *
+ * This marks the audio buffer list prepared using TPCircularBufferPrepareEmptyAudioBufferList
+ * as ready for reading. You must not call this function without first calling
+ * TPCircularBufferPrepareEmptyAudioBufferList.
+ *
+ * @param buffer Circular buffer
+ * @param timestamp The timestamp associated with the buffer, or NULL to leave as-is. Note that you can also pass a timestamp into TPCircularBufferPrepareEmptyAudioBufferList, to set it there instead.
+ */
+void TPCircularBufferProduceAudioBufferList(TPCircularBuffer *buffer, const AudioTimeStamp *inTimestamp);
+
+/*!
+ * Copy the audio buffer list onto the buffer
+ *
+ * @param buffer Circular buffer
+ * @param bufferList Buffer list containing audio to copy to buffer
+ * @param timestamp The timestamp associated with the buffer, or NULL
+ * @param frames Length of audio in frames. Specify kTPCircularBufferCopyAll to copy the whole buffer (audioFormat can be NULL, in this case)
+ * @param audioFormat The AudioStreamBasicDescription describing the audio, or NULL if you specify kTPCircularBufferCopyAll to the `frames` argument
+ * @return YES if buffer list was successfully copied; NO if there was insufficient space
+ */
+bool TPCircularBufferCopyAudioBufferList(TPCircularBuffer *buffer, const AudioBufferList *bufferList, const AudioTimeStamp *timestamp, UInt32 frames, AudioStreamBasicDescription *audioFormat);
+
+/*!
+ * Get a pointer to the next stored buffer list
+ *
+ * @param buffer Circular buffer
+ * @param outTimestamp On output, if not NULL, the timestamp corresponding to the buffer
+ * @return Pointer to the next buffer list in the buffer
+ */
+static __inline__ __attribute__((always_inline)) AudioBufferList *TPCircularBufferNextBufferList(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp) {
+ int32_t dontcare; // Length of segment is contained within buffer list, so we can ignore this
+ TPCircularBufferABLBlockHeader *block = TPCircularBufferTail(buffer, &dontcare);
+ if ( !block ) {
+ if ( outTimestamp ) {
+ memset(outTimestamp, 0, sizeof(AudioTimeStamp));
+ }
+ return NULL;
+ }
+ if ( outTimestamp ) {
+ memcpy(outTimestamp, &block->timestamp, sizeof(AudioTimeStamp));
+ }
+ return &block->bufferList;
+}
+
+/*!
+ * Get a pointer to the next stored buffer list after the given one
+ *
+ * @param buffer Circular buffer
+ * @param bufferList Preceding buffer list
+ * @param outTimestamp On output, if not NULL, the timestamp corresponding to the buffer
+ * @return Pointer to the next buffer list in the buffer, or NULL
+ */
+AudioBufferList *TPCircularBufferNextBufferListAfter(TPCircularBuffer *buffer, AudioBufferList *bufferList, AudioTimeStamp *outTimestamp);
+
+/*!
+ * Consume the next buffer list
+ *
+ * @param buffer Circular buffer
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferConsumeNextBufferList(TPCircularBuffer *buffer) {
+ int32_t dontcare;
+ TPCircularBufferABLBlockHeader *block = TPCircularBufferTail(buffer, &dontcare);
+ if ( !block ) return;
+ TPCircularBufferConsume(buffer, block->totalLength);
+}
+
+/*!
+ * Consume a portion of the next buffer list
+ *
+ * This will also increment the sample time and host time portions of the timestamp of
+ * the buffer list, if present.
+ *
+ * @param buffer Circular buffer
+ * @param framesToConsume The number of frames to consume from the buffer list
+ * @param audioFormat The AudioStreamBasicDescription describing the audio
+ */
+void TPCircularBufferConsumeNextBufferListPartial(TPCircularBuffer *buffer, int framesToConsume, AudioStreamBasicDescription *audioFormat);
+
+/*!
+ * Consume a certain number of frames from the buffer, possibly from multiple queued buffer lists
+ *
+ * Copies the given number of frames from the buffer into outputBufferList, of the
+ * given audio description, then consumes the audio buffers. If an audio buffer has
+ * not been entirely consumed, then updates the queued buffer list structure to point
+ * to the unconsumed data only.
+ *
+ * @param buffer Circular buffer
+ * @param ioLengthInFrames On input, the number of frames in the given audio format to consume; on output, the number of frames provided
+ * @param outputBufferList The buffer list to copy audio to, or NULL to discard audio. If not NULL, the structure must be initialised properly, and the mData pointers must not be NULL.
+ * @param outTimestamp On output, if not NULL, the timestamp corresponding to the first audio frame returned
+ * @param audioFormat The format of the audio stored in the buffer
+ */
+void TPCircularBufferDequeueBufferListFrames(TPCircularBuffer *buffer, UInt32 *ioLengthInFrames, AudioBufferList *outputBufferList, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat);
+
+/*!
+ * Determine how many frames of audio are buffered
+ *
+ * Given the provided audio format, determines the frame count of all queued buffers
+ *
+ * @param buffer Circular buffer
+ * @param outTimestamp On output, if not NULL, the timestamp corresponding to the first audio frame returned
+ * @param audioFormat The format of the audio stored in the buffer
+ * @return The number of frames in the given audio format that are in the buffer
+ */
+UInt32 TPCircularBufferPeek(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat);
+
+/*!
+ * Determine how many contiguous frames of audio are buffered
+ *
+ * Given the provided audio format, determines the frame count of all queued buffers that are contiguous,
+ * given their corresponding timestamps (sample time).
+ *
+ * @param buffer Circular buffer
+ * @param outTimestamp On output, if not NULL, the timestamp corresponding to the first audio frame returned
+ * @param audioFormat The format of the audio stored in the buffer
+ * @param contiguousToleranceSampleTime The number of samples of discrepancy to tolerate
+ * @return The number of frames in the given audio format that are in the buffer
+ */
+UInt32 TPCircularBufferPeekContiguous(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat, UInt32 contiguousToleranceSampleTime);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/Pods/TPCircularBuffer/TPCircularBuffer.c b/Pods/TPCircularBuffer/TPCircularBuffer.c
new file mode 100644
index 0000000..803d731
--- /dev/null
+++ b/Pods/TPCircularBuffer/TPCircularBuffer.c
@@ -0,0 +1,115 @@
+//
+// TPCircularBuffer.c
+// Circular/Ring buffer implementation
+//
+// Created by Michael Tyson on 10/12/2011.
+// Copyright 2011-2012 A Tasty Pixel. All rights reserved.
+
+
+#include "TPCircularBuffer.h"
+#include
+#include
+
+#define reportResult(result,operation) (_reportResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__))
+static inline bool _reportResult(kern_return_t result, const char *operation, const char* file, int line) {
+ if ( result != ERR_SUCCESS ) {
+ printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result));
+ return false;
+ }
+ return true;
+}
+
+bool TPCircularBufferInit(TPCircularBuffer *buffer, int length) {
+
+ // Keep trying until we get our buffer, needed to handle race conditions
+ int retries = 3;
+ while ( true ) {
+
+ buffer->length = round_page(length); // We need whole page sizes
+
+ // Temporarily allocate twice the length, so we have the contiguous address space to
+ // support a second instance of the buffer directly after
+ vm_address_t bufferAddress;
+ kern_return_t result = vm_allocate(mach_task_self(),
+ &bufferAddress,
+ buffer->length * 2,
+ VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit
+ if ( result != ERR_SUCCESS ) {
+ if ( retries-- == 0 ) {
+ reportResult(result, "Buffer allocation");
+ return false;
+ }
+ // Try again if we fail
+ continue;
+ }
+
+ // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half...
+ result = vm_deallocate(mach_task_self(),
+ bufferAddress + buffer->length,
+ buffer->length);
+ if ( result != ERR_SUCCESS ) {
+ if ( retries-- == 0 ) {
+ reportResult(result, "Buffer deallocation");
+ return false;
+ }
+ // If this fails somehow, deallocate the whole region and try again
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ // Re-map the buffer to the address space immediately after the buffer
+ vm_address_t virtualAddress = bufferAddress + buffer->length;
+ vm_prot_t cur_prot, max_prot;
+ result = vm_remap(mach_task_self(),
+ &virtualAddress, // mirror target
+ buffer->length, // size of mirror
+ 0, // auto alignment
+ 0, // force remapping to virtualAddress
+ mach_task_self(), // same task
+ bufferAddress, // mirror source
+ 0, // MAP READ-WRITE, NOT COPY
+ &cur_prot, // unused protection struct
+ &max_prot, // unused protection struct
+ VM_INHERIT_DEFAULT);
+ if ( result != ERR_SUCCESS ) {
+ if ( retries-- == 0 ) {
+ reportResult(result, "Remap buffer memory");
+ return false;
+ }
+ // If this remap failed, we hit a race condition, so deallocate and try again
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ if ( virtualAddress != bufferAddress+buffer->length ) {
+ // If the memory is not contiguous, clean up both allocated buffers and try again
+ if ( retries-- == 0 ) {
+ printf("Couldn't map buffer memory to end of buffer\n");
+ return false;
+ }
+
+ vm_deallocate(mach_task_self(), virtualAddress, buffer->length);
+ vm_deallocate(mach_task_self(), bufferAddress, buffer->length);
+ continue;
+ }
+
+ buffer->buffer = (void*)bufferAddress;
+ buffer->fillCount = 0;
+ buffer->head = buffer->tail = 0;
+
+ return true;
+ }
+ return false;
+}
+
+void TPCircularBufferCleanup(TPCircularBuffer *buffer) {
+ vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2);
+ memset(buffer, 0, sizeof(TPCircularBuffer));
+}
+
+void TPCircularBufferClear(TPCircularBuffer *buffer) {
+ int32_t fillCount;
+ if ( TPCircularBufferTail(buffer, &fillCount) ) {
+ TPCircularBufferConsume(buffer, fillCount);
+ }
+}
diff --git a/Pods/TPCircularBuffer/TPCircularBuffer.h b/Pods/TPCircularBuffer/TPCircularBuffer.h
new file mode 100644
index 0000000..9112836
--- /dev/null
+++ b/Pods/TPCircularBuffer/TPCircularBuffer.h
@@ -0,0 +1,170 @@
+//
+// TPCircularBuffer.h
+// Circular/Ring buffer implementation
+//
+// https://github.com/michaeltyson/TPCircularBuffer
+//
+// Created by Michael Tyson on 10/12/2011.
+// Copyright 2011-2012 A Tasty Pixel. All rights reserved.
+//
+//
+// This implementation makes use of a virtual memory mapping technique that inserts a virtual copy
+// of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around
+// logic. Clients can simply use the returned memory address as if it were contiguous space.
+//
+// The implementation is thread-safe in the case of a single producer and single consumer.
+//
+// Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and
+// adapted to Darwin by Kurt Revis (http://www.snoize.com,
+// http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz)
+//
+
+#ifndef TPCircularBuffer_h
+#define TPCircularBuffer_h
+
+#include
+#include
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+ void *buffer;
+ int32_t length;
+ int32_t tail;
+ int32_t head;
+ volatile int32_t fillCount;
+} TPCircularBuffer;
+
+/*!
+ * Initialise buffer
+ *
+ * Note that the length is advisory only: Because of the way the
+ * memory mirroring technique works, the true buffer length will
+ * be multiples of the device page size (e.g. 4096 bytes)
+ *
+ * @param buffer Circular buffer
+ * @param length Length of buffer
+ */
+bool TPCircularBufferInit(TPCircularBuffer *buffer, int32_t length);
+
+/*!
+ * Cleanup buffer
+ *
+ * Releases buffer resources.
+ */
+void TPCircularBufferCleanup(TPCircularBuffer *buffer);
+
+/*!
+ * Clear buffer
+ *
+ * Resets buffer to original, empty state.
+ *
+ * This is safe for use by consumer while producer is accessing
+ * buffer.
+ */
+void TPCircularBufferClear(TPCircularBuffer *buffer);
+
+// Reading (consuming)
+
+/*!
+ * Access end of buffer
+ *
+ * This gives you a pointer to the end of the buffer, ready
+ * for reading, and the number of available bytes to read.
+ *
+ * @param buffer Circular buffer
+ * @param availableBytes On output, the number of bytes ready for reading
+ * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty
+ */
+static __inline__ __attribute__((always_inline)) void* TPCircularBufferTail(TPCircularBuffer *buffer, int32_t* availableBytes) {
+ *availableBytes = buffer->fillCount;
+ if ( *availableBytes == 0 ) return NULL;
+ return (void*)((char*)buffer->buffer + buffer->tail);
+}
+
+/*!
+ * Consume bytes in buffer
+ *
+ * This frees up the just-read bytes, ready for writing again.
+ *
+ * @param buffer Circular buffer
+ * @param amount Number of bytes to consume
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferConsume(TPCircularBuffer *buffer, int32_t amount) {
+ buffer->tail = (buffer->tail + amount) % buffer->length;
+ OSAtomicAdd32Barrier(-amount, &buffer->fillCount);
+}
+
+/*!
+ * Version of TPCircularBufferConsume without the memory barrier, for more optimal use in single-threaded contexts
+ */
+ static __inline__ __attribute__((always_inline)) void TPCircularBufferConsumeNoBarrier(TPCircularBuffer *buffer, int32_t amount) {
+ buffer->tail = (buffer->tail + amount) % buffer->length;
+ buffer->fillCount -= amount;
+}
+
+/*!
+ * Access front of buffer
+ *
+ * This gives you a pointer to the front of the buffer, ready
+ * for writing, and the number of available bytes to write.
+ *
+ * @param buffer Circular buffer
+ * @param availableBytes On output, the number of bytes ready for writing
+ * @return Pointer to the first bytes ready for writing, or NULL if buffer is full
+ */
+static __inline__ __attribute__((always_inline)) void* TPCircularBufferHead(TPCircularBuffer *buffer, int32_t* availableBytes) {
+ *availableBytes = (buffer->length - buffer->fillCount);
+ if ( *availableBytes == 0 ) return NULL;
+ return (void*)((char*)buffer->buffer + buffer->head);
+}
+
+// Writing (producing)
+
+/*!
+ * Produce bytes in buffer
+ *
+ * This marks the given section of the buffer ready for reading.
+ *
+ * @param buffer Circular buffer
+ * @param amount Number of bytes to produce
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferProduce(TPCircularBuffer *buffer, int amount) {
+ buffer->head = (buffer->head + amount) % buffer->length;
+ OSAtomicAdd32Barrier(amount, &buffer->fillCount);
+}
+
+/*!
+ * Version of TPCircularBufferProduce without the memory barrier, for more optimal use in single-threaded contexts
+ */
+static __inline__ __attribute__((always_inline)) void TPCircularBufferProduceNoBarrier(TPCircularBuffer *buffer, int amount) {
+ buffer->head = (buffer->head + amount) % buffer->length;
+ buffer->fillCount += amount;
+}
+
+/*!
+ * Helper routine to copy bytes to buffer
+ *
+ * This copies the given bytes to the buffer, and marks them ready for writing.
+ *
+ * @param buffer Circular buffer
+ * @param src Source buffer
+ * @param len Number of bytes in source buffer
+ * @return true if bytes copied, false if there was insufficient space
+ */
+static __inline__ __attribute__((always_inline)) bool TPCircularBufferProduceBytes(TPCircularBuffer *buffer, const void* src, int32_t len) {
+ int32_t space;
+ void *ptr = TPCircularBufferHead(buffer, &space);
+ if ( space < len ) return false;
+ memcpy(ptr, src, len);
+ TPCircularBufferProduce(buffer, len);
+ return true;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/Pods/Target Support Files/EZAudio/EZAudio-Private.xcconfig b/Pods/Target Support Files/EZAudio/EZAudio-Private.xcconfig
new file mode 100644
index 0000000..54bad1a
--- /dev/null
+++ b/Pods/Target Support Files/EZAudio/EZAudio-Private.xcconfig
@@ -0,0 +1,6 @@
+#include "EZAudio.xcconfig"
+GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
+HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/EZAudio" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/EZAudio" "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_LDFLAGS = ${EZAUDIO_OTHER_LDFLAGS}
+PODS_ROOT = ${SRCROOT}
+SKIP_INSTALL = YES
\ No newline at end of file
diff --git a/Pods/Target Support Files/EZAudio/EZAudio-dummy.m b/Pods/Target Support Files/EZAudio/EZAudio-dummy.m
new file mode 100644
index 0000000..3f9b622
--- /dev/null
+++ b/Pods/Target Support Files/EZAudio/EZAudio-dummy.m
@@ -0,0 +1,5 @@
+#import
+@interface PodsDummy_EZAudio : NSObject
+@end
+@implementation PodsDummy_EZAudio
+@end
diff --git a/Pods/Target Support Files/EZAudio/EZAudio-prefix.pch b/Pods/Target Support Files/EZAudio/EZAudio-prefix.pch
new file mode 100644
index 0000000..b9c163b
--- /dev/null
+++ b/Pods/Target Support Files/EZAudio/EZAudio-prefix.pch
@@ -0,0 +1,4 @@
+#ifdef __OBJC__
+#import
+#endif
+
diff --git a/Pods/Target Support Files/EZAudio/EZAudio.xcconfig b/Pods/Target Support Files/EZAudio/EZAudio.xcconfig
new file mode 100644
index 0000000..54dc9ec
--- /dev/null
+++ b/Pods/Target Support Files/EZAudio/EZAudio.xcconfig
@@ -0,0 +1 @@
+EZAUDIO_OTHER_LDFLAGS = -framework "Accelerate" -framework "AudioToolbox" -framework "AudioUnit" -framework "CoreAudio" -framework "GLKit" -framework "OpenGL" -framework "QuartzCore"
\ No newline at end of file
diff --git a/Pods/Target Support Files/Pods/Pods-acknowledgements.markdown b/Pods/Target Support Files/Pods/Pods-acknowledgements.markdown
new file mode 100644
index 0000000..03e8793
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods-acknowledgements.markdown
@@ -0,0 +1,37 @@
+# Acknowledgements
+This application makes use of the following third party libraries:
+
+## EZAudio
+
+The MIT License (MIT)
+
+EZAudio
+Copyright (c) 2013 Syed Haris Ali
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+
+## TPCircularBuffer
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
+
+Generated by CocoaPods - http://cocoapods.org
diff --git a/Pods/Target Support Files/Pods/Pods-acknowledgements.plist b/Pods/Target Support Files/Pods/Pods-acknowledgements.plist
new file mode 100644
index 0000000..8f41481
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods-acknowledgements.plist
@@ -0,0 +1,71 @@
+
+
+
+
+ PreferenceSpecifiers
+
+
+ FooterText
+ This application makes use of the following third party libraries:
+ Title
+ Acknowledgements
+ Type
+ PSGroupSpecifier
+
+
+ FooterText
+ The MIT License (MIT)
+
+EZAudio
+Copyright (c) 2013 Syed Haris Ali
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ Title
+ EZAudio
+ Type
+ PSGroupSpecifier
+
+
+ FooterText
+ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE."
+
+ Title
+ TPCircularBuffer
+ Type
+ PSGroupSpecifier
+
+
+ FooterText
+ Generated by CocoaPods - http://cocoapods.org
+ Title
+
+ Type
+ PSGroupSpecifier
+
+
+ StringsTable
+ Acknowledgements
+ Title
+ Acknowledgements
+
+
diff --git a/Pods/Target Support Files/Pods/Pods-dummy.m b/Pods/Target Support Files/Pods/Pods-dummy.m
new file mode 100644
index 0000000..ade64bd
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods-dummy.m
@@ -0,0 +1,5 @@
+#import
+@interface PodsDummy_Pods : NSObject
+@end
+@implementation PodsDummy_Pods
+@end
diff --git a/Pods/Target Support Files/Pods/Pods-resources.sh b/Pods/Target Support Files/Pods/Pods-resources.sh
new file mode 100755
index 0000000..ea685a2
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods-resources.sh
@@ -0,0 +1,95 @@
+#!/bin/sh
+set -e
+
+mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+
+RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
+> "$RESOURCES_TO_COPY"
+
+XCASSET_FILES=()
+
+realpath() {
+ DIRECTORY="$(cd "${1%/*}" && pwd)"
+ FILENAME="${1##*/}"
+ echo "$DIRECTORY/$FILENAME"
+}
+
+install_resource()
+{
+ case $1 in
+ *.storyboard)
+ echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
+ ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .storyboard`.storyboardc" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
+ ;;
+ *.xib)
+ echo "ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile ${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib ${PODS_ROOT}/$1 --sdk ${SDKROOT}"
+ ibtool --reference-external-strings-file --errors --warnings --notices --output-format human-readable-text --compile "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$1\" .xib`.nib" "${PODS_ROOT}/$1" --sdk "${SDKROOT}"
+ ;;
+ *.framework)
+ echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
+ mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
+ echo "rsync -av ${PODS_ROOT}/$1 ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
+ rsync -av "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
+ ;;
+ *.xcdatamodel)
+ echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1"`.mom\""
+ xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodel`.mom"
+ ;;
+ *.xcdatamodeld)
+ echo "xcrun momc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd\""
+ xcrun momc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcdatamodeld`.momd"
+ ;;
+ *.xcmappingmodel)
+ echo "xcrun mapc \"${PODS_ROOT}/$1\" \"${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm\""
+ xcrun mapc "${PODS_ROOT}/$1" "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$1" .xcmappingmodel`.cdm"
+ ;;
+ *.xcassets)
+ ABSOLUTE_XCASSET_FILE=$(realpath "${PODS_ROOT}/$1")
+ XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
+ ;;
+ /*)
+ echo "$1"
+ echo "$1" >> "$RESOURCES_TO_COPY"
+ ;;
+ *)
+ echo "${PODS_ROOT}/$1"
+ echo "${PODS_ROOT}/$1" >> "$RESOURCES_TO_COPY"
+ ;;
+ esac
+}
+
+mkdir -p "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${CONFIGURATION_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+if [[ "${ACTION}" == "install" ]]; then
+ mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+ rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+fi
+rm -f "$RESOURCES_TO_COPY"
+
+if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
+then
+ case "${TARGETED_DEVICE_FAMILY}" in
+ 1,2)
+ TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
+ ;;
+ 1)
+ TARGET_DEVICE_ARGS="--target-device iphone"
+ ;;
+ 2)
+ TARGET_DEVICE_ARGS="--target-device ipad"
+ ;;
+ *)
+ TARGET_DEVICE_ARGS="--target-device mac"
+ ;;
+ esac
+
+ # Find all other xcassets (this unfortunately includes those of path pods and other targets).
+ OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
+ while read line; do
+ if [[ $line != "`realpath $PODS_ROOT`*" ]]; then
+ XCASSET_FILES+=("$line")
+ fi
+ done <<<"$OTHER_XCASSETS"
+
+ printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${IPHONEOS_DEPLOYMENT_TARGET}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
+fi
diff --git a/Pods/Target Support Files/Pods/Pods.debug.xcconfig b/Pods/Target Support Files/Pods/Pods.debug.xcconfig
new file mode 100644
index 0000000..d5a1d8b
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods.debug.xcconfig
@@ -0,0 +1,5 @@
+GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
+HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/EZAudio" "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_CFLAGS = $(inherited) -isystem "${PODS_ROOT}/Headers/Public" -isystem "${PODS_ROOT}/Headers/Public/EZAudio" -isystem "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_LDFLAGS = $(inherited) -ObjC -l"EZAudio" -l"TPCircularBuffer" -framework "Accelerate" -framework "AudioToolbox" -framework "AudioUnit" -framework "CoreAudio" -framework "GLKit" -framework "OpenGL" -framework "QuartzCore"
+PODS_ROOT = ${SRCROOT}/Pods
\ No newline at end of file
diff --git a/Pods/Target Support Files/Pods/Pods.release.xcconfig b/Pods/Target Support Files/Pods/Pods.release.xcconfig
new file mode 100644
index 0000000..d5a1d8b
--- /dev/null
+++ b/Pods/Target Support Files/Pods/Pods.release.xcconfig
@@ -0,0 +1,5 @@
+GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
+HEADER_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/EZAudio" "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_CFLAGS = $(inherited) -isystem "${PODS_ROOT}/Headers/Public" -isystem "${PODS_ROOT}/Headers/Public/EZAudio" -isystem "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_LDFLAGS = $(inherited) -ObjC -l"EZAudio" -l"TPCircularBuffer" -framework "Accelerate" -framework "AudioToolbox" -framework "AudioUnit" -framework "CoreAudio" -framework "GLKit" -framework "OpenGL" -framework "QuartzCore"
+PODS_ROOT = ${SRCROOT}/Pods
\ No newline at end of file
diff --git a/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-Private.xcconfig b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-Private.xcconfig
new file mode 100644
index 0000000..ca823ce
--- /dev/null
+++ b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-Private.xcconfig
@@ -0,0 +1,6 @@
+#include "TPCircularBuffer.xcconfig"
+GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
+HEADER_SEARCH_PATHS = "${PODS_ROOT}/Headers/Private" "${PODS_ROOT}/Headers/Private/TPCircularBuffer" "${PODS_ROOT}/Headers/Public" "${PODS_ROOT}/Headers/Public/EZAudio" "${PODS_ROOT}/Headers/Public/TPCircularBuffer"
+OTHER_LDFLAGS = ${TPCIRCULARBUFFER_OTHER_LDFLAGS}
+PODS_ROOT = ${SRCROOT}
+SKIP_INSTALL = YES
\ No newline at end of file
diff --git a/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-dummy.m b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-dummy.m
new file mode 100644
index 0000000..dd59cd0
--- /dev/null
+++ b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-dummy.m
@@ -0,0 +1,5 @@
+#import
+@interface PodsDummy_TPCircularBuffer : NSObject
+@end
+@implementation PodsDummy_TPCircularBuffer
+@end
diff --git a/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-prefix.pch b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-prefix.pch
new file mode 100644
index 0000000..b9c163b
--- /dev/null
+++ b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer-prefix.pch
@@ -0,0 +1,4 @@
+#ifdef __OBJC__
+#import
+#endif
+
diff --git a/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer.xcconfig b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer.xcconfig
new file mode 100644
index 0000000..c88f926
--- /dev/null
+++ b/Pods/Target Support Files/TPCircularBuffer/TPCircularBuffer.xcconfig
@@ -0,0 +1 @@
+TPCIRCULARBUFFER_OTHER_LDFLAGS = -framework "AudioToolbox"
\ No newline at end of file