/************************************************************************/ /*! \class RtAudio \brief Realtime audio i/o C++ classes. RtAudio provides a common API (Application Programming Interface) for realtime audio input/output across Linux (native ALSA, Jack, and OSS), SGI, Macintosh OS X (CoreAudio), and Windows (DirectSound and ASIO) operating systems. RtAudio WWW site: http://music.mcgill.ca/~gary/rtaudio/ RtAudio: realtime audio i/o C++ classes Copyright (c) 2001-2005 Gary P. Scavone Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. Any person wishing to distribute modifications to the Software is requested to send the modifications to the original developer so that they can be incorporated into the canonical version. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ /************************************************************************/ // RtAudio: Version 3.0.3 (18 November 2005) #include "RtAudio.h" #include #include // Static variable definitions. const unsigned int RtApi::MAX_SAMPLE_RATES = 14; const unsigned int RtApi::SAMPLE_RATES[] = { 4000, 5512, 8000, 9600, 11025, 16000, 22050, 32000, 44100, 48000, 88200, 96000, 176400, 192000 }; #if defined(__WINDOWS_DS__) || defined(__WINDOWS_ASIO__) #define MUTEX_INITIALIZE(A) InitializeCriticalSection(A) #define MUTEX_DESTROY(A) DeleteCriticalSection(A); #define MUTEX_LOCK(A) EnterCriticalSection(A) #define MUTEX_UNLOCK(A) LeaveCriticalSection(A) #else // pthread API #define MUTEX_INITIALIZE(A) pthread_mutex_init(A, NULL) #define MUTEX_DESTROY(A) pthread_mutex_destroy(A); #define MUTEX_LOCK(A) pthread_mutex_lock(A) #define MUTEX_UNLOCK(A) pthread_mutex_unlock(A) #endif // *************************************************** // // // Public common (OS-independent) methods. // // *************************************************** // RtAudio :: RtAudio( RtAudioApi api ) { initialize( api ); } RtAudio :: RtAudio( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int numberOfBuffers, RtAudioApi api ) { initialize( api ); try { rtapi_->openStream( outputDevice, outputChannels, inputDevice, inputChannels, format, sampleRate, bufferSize, numberOfBuffers ); } catch (RtError &exception) { // Deallocate the RtApi instance. delete rtapi_; throw exception; } } RtAudio :: RtAudio( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int *numberOfBuffers, RtAudioApi api ) { initialize( api ); try { rtapi_->openStream( outputDevice, outputChannels, inputDevice, inputChannels, format, sampleRate, bufferSize, numberOfBuffers ); } catch (RtError &exception) { // Deallocate the RtApi instance. delete rtapi_; throw exception; } } RtAudio :: ~RtAudio() { delete rtapi_; } void RtAudio :: openStream( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int numberOfBuffers ) { rtapi_->openStream( outputDevice, outputChannels, inputDevice, inputChannels, format, sampleRate, bufferSize, numberOfBuffers ); } void RtAudio :: openStream( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int *numberOfBuffers ) { rtapi_->openStream( outputDevice, outputChannels, inputDevice, inputChannels, format, sampleRate, bufferSize, *numberOfBuffers ); } void RtAudio::initialize( RtAudioApi api ) { rtapi_ = 0; // First look for a compiled match to a specified API value. If one // of these constructors throws an error, it will be passed up the // inheritance chain. #if defined(__LINUX_JACK__) if ( api == LINUX_JACK ) rtapi_ = new RtApiJack(); #endif #if defined(__LINUX_ALSA__) if ( api == LINUX_ALSA ) rtapi_ = new RtApiAlsa(); #endif #if defined(__LINUX_OSS__) if ( api == LINUX_OSS ) rtapi_ = new RtApiOss(); #endif #if defined(__WINDOWS_ASIO__) if ( api == WINDOWS_ASIO ) rtapi_ = new RtApiAsio(); #endif #if defined(__WINDOWS_DS__) if ( api == WINDOWS_DS ) rtapi_ = new RtApiDs(); #endif #if defined(__IRIX_AL__) if ( api == IRIX_AL ) rtapi_ = new RtApiAl(); #endif #if defined(__MACOSX_CORE__) if ( api == MACOSX_CORE ) rtapi_ = new RtApiCore(); #endif if ( rtapi_ ) return; if ( api > 0 ) { // No compiled support for specified API value. throw RtError( "RtAudio: no compiled support for specified API argument!", RtError::INVALID_PARAMETER ); } // No specified API ... search for "best" option. try { #if defined(__LINUX_JACK__) rtapi_ = new RtApiJack(); #elif defined(__WINDOWS_ASIO__) rtapi_ = new RtApiAsio(); #elif defined(__IRIX_AL__) rtapi_ = new RtApiAl(); #elif defined(__MACOSX_CORE__) rtapi_ = new RtApiCore(); #else ; #endif } catch (RtError &) { #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\nRtAudio: no devices found for first api option (JACK, ASIO, Al, or CoreAudio).\n\n"); #endif rtapi_ = 0; } if ( rtapi_ ) return; // Try second API support if ( rtapi_ == 0 ) { try { #if defined(__LINUX_ALSA__) rtapi_ = new RtApiAlsa(); #elif defined(__WINDOWS_DS__) rtapi_ = new RtApiDs(); #else ; #endif } catch (RtError &) { #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\nRtAudio: no devices found for second api option (Alsa or DirectSound).\n\n"); #endif rtapi_ = 0; } } if ( rtapi_ ) return; // Try third API support if ( rtapi_ == 0 ) { #if defined(__LINUX_OSS__) try { rtapi_ = new RtApiOss(); } catch (RtError &error) { rtapi_ = 0; } #else ; #endif } if ( rtapi_ == 0 ) { // No devices found. throw RtError( "RtAudio: no devices found for compiled audio APIs!", RtError::NO_DEVICES_FOUND ); } } RtApi :: RtApi() { stream_.mode = UNINITIALIZED; stream_.state = STREAM_STOPPED; stream_.apiHandle = 0; MUTEX_INITIALIZE(&stream_.mutex); } RtApi :: ~RtApi() { MUTEX_DESTROY(&stream_.mutex); } void RtApi :: openStream( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int *numberOfBuffers ) { this->openStream( outputDevice, outputChannels, inputDevice, inputChannels, format, sampleRate, bufferSize, *numberOfBuffers ); *numberOfBuffers = stream_.nBuffers; } void RtApi :: openStream( int outputDevice, int outputChannels, int inputDevice, int inputChannels, RtAudioFormat format, int sampleRate, int *bufferSize, int numberOfBuffers ) { if ( stream_.mode != UNINITIALIZED ) { sprintf(message_, "RtApi: only one open stream allowed per class instance."); error(RtError::INVALID_STREAM); } if (outputChannels < 1 && inputChannels < 1) { sprintf(message_,"RtApi: one or both 'channel' parameters must be greater than zero."); error(RtError::INVALID_PARAMETER); } if ( formatBytes(format) == 0 ) { sprintf(message_,"RtApi: 'format' parameter value is undefined."); error(RtError::INVALID_PARAMETER); } if ( outputChannels > 0 ) { if (outputDevice > nDevices_ || outputDevice < 0) { sprintf(message_,"RtApi: 'outputDevice' parameter value (%d) is invalid.", outputDevice); error(RtError::INVALID_PARAMETER); } } if ( inputChannels > 0 ) { if (inputDevice > nDevices_ || inputDevice < 0) { sprintf(message_,"RtApi: 'inputDevice' parameter value (%d) is invalid.", inputDevice); error(RtError::INVALID_PARAMETER); } } std::string errorMessages; clearStreamInfo(); bool result = FAILURE; int device, defaultDevice = 0; StreamMode mode; int channels; if ( outputChannels > 0 ) { mode = OUTPUT; channels = outputChannels; if ( outputDevice == 0 ) { // Try default device first. defaultDevice = getDefaultOutputDevice(); device = defaultDevice; } else device = outputDevice - 1; for ( int i=-1; i= 0 ) { if ( i == defaultDevice ) continue; device = i; } if ( devices_[device].probed == false ) { // If the device wasn't successfully probed before, try it // (again) now. clearDeviceInfo(&devices_[device]); probeDeviceInfo(&devices_[device]); } if ( devices_[device].probed ) result = probeDeviceOpen(device, mode, channels, sampleRate, format, bufferSize, numberOfBuffers); if ( result == SUCCESS ) break; errorMessages.append( " " ); errorMessages.append( message_ ); errorMessages.append( "\n" ); if ( outputDevice > 0 ) break; clearStreamInfo(); } } if ( inputChannels > 0 && ( result == SUCCESS || outputChannels <= 0 ) ) { mode = INPUT; channels = inputChannels; if ( inputDevice == 0 ) { // Try default device first. defaultDevice = getDefaultInputDevice(); device = defaultDevice; } else device = inputDevice - 1; for ( int i=-1; i= 0 ) { if ( i == defaultDevice ) continue; device = i; } if ( devices_[device].probed == false ) { // If the device wasn't successfully probed before, try it // (again) now. clearDeviceInfo(&devices_[device]); probeDeviceInfo(&devices_[device]); } if ( devices_[device].probed ) result = probeDeviceOpen( device, mode, channels, sampleRate, format, bufferSize, numberOfBuffers ); if ( result == SUCCESS ) break; errorMessages.append( " " ); errorMessages.append( message_ ); errorMessages.append( "\n" ); if ( inputDevice > 0 ) break; } } if ( result == SUCCESS ) return; // If we get here, all attempted probes failed. Close any opened // devices and clear the stream structure. if ( stream_.mode != UNINITIALIZED ) closeStream(); clearStreamInfo(); if ( ( outputDevice == 0 && outputChannels > 0 ) || ( inputDevice == 0 && inputChannels > 0 ) ) sprintf(message_,"RtApi: no devices found for given stream parameters: \n%s", errorMessages.c_str()); else sprintf(message_,"RtApi: unable to open specified device(s) with given stream parameters: \n%s", errorMessages.c_str()); error(RtError::INVALID_PARAMETER); return; } int RtApi :: getDeviceCount(void) { return devices_.size(); } RtApi::StreamState RtApi :: getStreamState( void ) const { return stream_.state; } RtAudioDeviceInfo RtApi :: getDeviceInfo( int device ) { if (device > (int) devices_.size() || device < 1) { sprintf(message_, "RtApi: invalid device specifier (%d)!", device); error(RtError::INVALID_DEVICE); } RtAudioDeviceInfo info; int deviceIndex = device - 1; // If the device wasn't successfully probed before, try it now (or again). if (devices_[deviceIndex].probed == false) { clearDeviceInfo(&devices_[deviceIndex]); probeDeviceInfo(&devices_[deviceIndex]); } info.name.append( devices_[deviceIndex].name ); info.probed = devices_[deviceIndex].probed; if ( info.probed == true ) { info.outputChannels = devices_[deviceIndex].maxOutputChannels; info.inputChannels = devices_[deviceIndex].maxInputChannels; info.duplexChannels = devices_[deviceIndex].maxDuplexChannels; for (unsigned int i=0; i #include #include #include #include #include #include #include #include #define DAC_NAME "/dev/dsp" #define MAX_DEVICES 16 #define MAX_CHANNELS 16 extern "C" void *ossCallbackHandler(void * ptr); RtApiOss :: RtApiOss() { this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiOss: no Linux OSS audio devices found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiOss :: ~RtApiOss() { if ( stream_.mode != UNINITIALIZED ) closeStream(); } void RtApiOss :: initialize(void) { // Count cards and devices nDevices_ = 0; // We check /dev/dsp before probing devices. /dev/dsp is supposed to // be a link to the "default" audio device, of the form /dev/dsp0, // /dev/dsp1, etc... However, I've seen many cases where /dev/dsp was a // real device, so we need to check for that. Also, sometimes the // link is to /dev/dspx and other times just dspx. I'm not sure how // the latter works, but it does. char device_name[16]; struct stat dspstat; int dsplink = -1; int i = 0; if (lstat(DAC_NAME, &dspstat) == 0) { if (S_ISLNK(dspstat.st_mode)) { i = readlink(DAC_NAME, device_name, sizeof(device_name)); if (i > 0) { device_name[i] = '\0'; if (i > 8) { // check for "/dev/dspx" if (!strncmp(DAC_NAME, device_name, 8)) dsplink = atoi(&device_name[8]); } else if (i > 3) { // check for "dspx" if (!strncmp("dsp", device_name, 3)) dsplink = atoi(&device_name[3]); } } else { sprintf(message_, "RtApiOss: cannot read value of symbolic link %s.", DAC_NAME); error(RtError::SYSTEM_ERROR); } } } else { sprintf(message_, "RtApiOss: cannot stat %s.", DAC_NAME); error(RtError::SYSTEM_ERROR); } // The OSS API doesn't provide a routine for determining the number // of devices. Thus, we'll just pursue a brute force method. The // idea is to start with /dev/dsp(0) and continue with higher device // numbers until we reach MAX_DSP_DEVICES. This should tell us how // many devices we have ... it is not a fullproof scheme, but hopefully // it will work most of the time. int fd = 0; RtApiDevice device; for (i=-1; i= 0) close(fd); device.name.erase(); device.name.append( (const char *)device_name, strlen(device_name)+1); devices_.push_back(device); nDevices_++; } } void RtApiOss :: probeDeviceInfo(RtApiDevice *info) { int i, fd, channels, mask; // The OSS API doesn't provide a means for probing the capabilities // of devices. Thus, we'll just pursue a brute force method. // First try for playback fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK); if (fd == -1) { // Open device failed ... either busy or doesn't exist if (errno == EBUSY || errno == EAGAIN) sprintf(message_, "RtApiOss: OSS playback device (%s) is busy and cannot be probed.", info->name.c_str()); else sprintf(message_, "RtApiOss: OSS playback device (%s) open error.", info->name.c_str()); error(RtError::DEBUG_WARNING); goto capture_probe; } // We have an open device ... see how many channels it can handle for (i=MAX_CHANNELS; i>0; i--) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1) { // This would normally indicate some sort of hardware error, but under ALSA's // OSS emulation, it sometimes indicates an invalid channel value. Further, // the returned channel value is not changed. So, we'll ignore the possible // hardware error. continue; // try next channel number } // Check to see whether the device supports the requested number of channels if (channels != i ) continue; // try next channel number // If here, we found the largest working channel value break; } info->maxOutputChannels = i; // Now find the minimum number of channels it can handle for (i=1; i<=info->maxOutputChannels; i++) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) continue; // try next channel number // If here, we found the smallest working channel value break; } info->minOutputChannels = i; close(fd); capture_probe: // Now try for capture fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK); if (fd == -1) { // Open device for capture failed ... either busy or doesn't exist if (errno == EBUSY || errno == EAGAIN) sprintf(message_, "RtApiOss: OSS capture device (%s) is busy and cannot be probed.", info->name.c_str()); else sprintf(message_, "RtApiOss: OSS capture device (%s) open error.", info->name.c_str()); error(RtError::DEBUG_WARNING); if (info->maxOutputChannels == 0) // didn't open for playback either ... device invalid return; goto probe_parameters; } // We have the device open for capture ... see how many channels it can handle for (i=MAX_CHANNELS; i>0; i--) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) { continue; // as above } // If here, we found a working channel value break; } info->maxInputChannels = i; // Now find the minimum number of channels it can handle for (i=1; i<=info->maxInputChannels; i++) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) continue; // try next channel number // If here, we found the smallest working channel value break; } info->minInputChannels = i; close(fd); if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) { sprintf(message_, "RtApiOss: device (%s) reports zero channels for input and output.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // If device opens for both playback and capture, we determine the channels. if (info->maxOutputChannels == 0 || info->maxInputChannels == 0) goto probe_parameters; fd = open(info->name.c_str(), O_RDWR | O_NONBLOCK); if (fd == -1) goto probe_parameters; ioctl(fd, SNDCTL_DSP_SETDUPLEX, 0); ioctl(fd, SNDCTL_DSP_GETCAPS, &mask); if (mask & DSP_CAP_DUPLEX) { info->hasDuplexSupport = true; // We have the device open for duplex ... see how many channels it can handle for (i=MAX_CHANNELS; i>0; i--) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) continue; // as above // If here, we found a working channel value break; } info->maxDuplexChannels = i; // Now find the minimum number of channels it can handle for (i=1; i<=info->maxDuplexChannels; i++) { channels = i; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) continue; // try next channel number // If here, we found the smallest working channel value break; } info->minDuplexChannels = i; } close(fd); probe_parameters: // At this point, we need to figure out the supported data formats // and sample rates. We'll proceed by openning the device in the // direction with the maximum number of channels, or playback if // they are equal. This might limit our sample rate options, but so // be it. if (info->maxOutputChannels >= info->maxInputChannels) { fd = open(info->name.c_str(), O_WRONLY | O_NONBLOCK); channels = info->maxOutputChannels; } else { fd = open(info->name.c_str(), O_RDONLY | O_NONBLOCK); channels = info->maxInputChannels; } if (fd == -1) { // We've got some sort of conflict ... abort sprintf(message_, "RtApiOss: device (%s) won't reopen during probe.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // We have an open device ... set to maximum channels. i = channels; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &channels) == -1 || channels != i) { // We've got some sort of conflict ... abort close(fd); sprintf(message_, "RtApiOss: device (%s) won't revert to previous channel setting.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) { close(fd); sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Probe the supported data formats ... we don't care about endian-ness just yet. int format; info->nativeFormats = 0; #if defined (AFMT_S32_BE) // This format does not seem to be in the 2.4 kernel version of OSS soundcard.h if (mask & AFMT_S32_BE) { format = AFMT_S32_BE; info->nativeFormats |= RTAUDIO_SINT32; } #endif #if defined (AFMT_S32_LE) /* This format is not in the 2.4.4 kernel version of OSS soundcard.h */ if (mask & AFMT_S32_LE) { format = AFMT_S32_LE; info->nativeFormats |= RTAUDIO_SINT32; } #endif if (mask & AFMT_S8) { format = AFMT_S8; info->nativeFormats |= RTAUDIO_SINT8; } if (mask & AFMT_S16_BE) { format = AFMT_S16_BE; info->nativeFormats |= RTAUDIO_SINT16; } if (mask & AFMT_S16_LE) { format = AFMT_S16_LE; info->nativeFormats |= RTAUDIO_SINT16; } // Check that we have at least one supported format if (info->nativeFormats == 0) { close(fd); sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Set the format i = format; if (ioctl(fd, SNDCTL_DSP_SETFMT, &format) == -1 || format != i) { close(fd); sprintf(message_, "RtApiOss: device (%s) error setting data format.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Probe the supported sample rates. info->sampleRates.clear(); for (unsigned int k=0; ksampleRates.push_back(speed); } if (info->sampleRates.size() == 0) { close(fd); sprintf(message_, "RtApiOss: no supported sample rates found for device (%s).", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // That's all ... close the device and return close(fd); info->probed = true; return; } bool RtApiOss :: probeDeviceOpen(int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers) { int buffers, buffer_bytes, device_channels, device_format; int srate, temp, fd; int *handle = (int *) stream_.apiHandle; const char *name = devices_[device].name.c_str(); if (mode == OUTPUT) fd = open(name, O_WRONLY | O_NONBLOCK); else { // mode == INPUT if (stream_.mode == OUTPUT && stream_.device[0] == device) { // We just set the same device for playback ... close and reopen for duplex (OSS only). close(handle[0]); handle[0] = 0; // First check that the number previously set channels is the same. if (stream_.nUserChannels[0] != channels) { sprintf(message_, "RtApiOss: input/output channels must be equal for OSS duplex device (%s).", name); goto error; } fd = open(name, O_RDWR | O_NONBLOCK); } else fd = open(name, O_RDONLY | O_NONBLOCK); } if (fd == -1) { if (errno == EBUSY || errno == EAGAIN) sprintf(message_, "RtApiOss: device (%s) is busy and cannot be opened.", name); else sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name); goto error; } // Now reopen in blocking mode. close(fd); if (mode == OUTPUT) fd = open(name, O_WRONLY | O_SYNC); else { // mode == INPUT if (stream_.mode == OUTPUT && stream_.device[0] == device) fd = open(name, O_RDWR | O_SYNC); else fd = open(name, O_RDONLY | O_SYNC); } if (fd == -1) { sprintf(message_, "RtApiOss: device (%s) cannot be opened.", name); goto error; } // Get the sample format mask int mask; if (ioctl(fd, SNDCTL_DSP_GETFMTS, &mask) == -1) { close(fd); sprintf(message_, "RtApiOss: device (%s) can't get supported audio formats.", name); goto error; } // Determine how to set the device format. stream_.userFormat = format; device_format = -1; stream_.doByteSwap[mode] = false; if (format == RTAUDIO_SINT8) { if (mask & AFMT_S8) { device_format = AFMT_S8; stream_.deviceFormat[mode] = RTAUDIO_SINT8; } } else if (format == RTAUDIO_SINT16) { if (mask & AFMT_S16_NE) { device_format = AFMT_S16_NE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; } #if BYTE_ORDER == LITTLE_ENDIAN else if (mask & AFMT_S16_BE) { device_format = AFMT_S16_BE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; stream_.doByteSwap[mode] = true; } #else else if (mask & AFMT_S16_LE) { device_format = AFMT_S16_LE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; stream_.doByteSwap[mode] = true; } #endif } #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE) else if (format == RTAUDIO_SINT32) { if (mask & AFMT_S32_NE) { device_format = AFMT_S32_NE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; } #if BYTE_ORDER == LITTLE_ENDIAN else if (mask & AFMT_S32_BE) { device_format = AFMT_S32_BE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; stream_.doByteSwap[mode] = true; } #else else if (mask & AFMT_S32_LE) { device_format = AFMT_S32_LE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; stream_.doByteSwap[mode] = true; } #endif } #endif if (device_format == -1) { // The user requested format is not natively supported by the device. if (mask & AFMT_S16_NE) { device_format = AFMT_S16_NE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; } #if BYTE_ORDER == LITTLE_ENDIAN else if (mask & AFMT_S16_BE) { device_format = AFMT_S16_BE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; stream_.doByteSwap[mode] = true; } #else else if (mask & AFMT_S16_LE) { device_format = AFMT_S16_LE; stream_.deviceFormat[mode] = RTAUDIO_SINT16; stream_.doByteSwap[mode] = true; } #endif #if defined (AFMT_S32_NE) && defined (AFMT_S32_LE) && defined (AFMT_S32_BE) else if (mask & AFMT_S32_NE) { device_format = AFMT_S32_NE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; } #if BYTE_ORDER == LITTLE_ENDIAN else if (mask & AFMT_S32_BE) { device_format = AFMT_S32_BE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; stream_.doByteSwap[mode] = true; } #else else if (mask & AFMT_S32_LE) { device_format = AFMT_S32_LE; stream_.deviceFormat[mode] = RTAUDIO_SINT32; stream_.doByteSwap[mode] = true; } #endif #endif else if (mask & AFMT_S8) { device_format = AFMT_S8; stream_.deviceFormat[mode] = RTAUDIO_SINT8; } } if (stream_.deviceFormat[mode] == 0) { // This really shouldn't happen ... close(fd); sprintf(message_, "RtApiOss: device (%s) data format not supported by RtAudio.", name); goto error; } // Determine the number of channels for this device. Note that the // channel value requested by the user might be < min_X_Channels. stream_.nUserChannels[mode] = channels; device_channels = channels; if (mode == OUTPUT) { if (channels < devices_[device].minOutputChannels) device_channels = devices_[device].minOutputChannels; } else { // mode == INPUT if (stream_.mode == OUTPUT && stream_.device[0] == device) { // We're doing duplex setup here. if (channels < devices_[device].minDuplexChannels) device_channels = devices_[device].minDuplexChannels; } else { if (channels < devices_[device].minInputChannels) device_channels = devices_[device].minInputChannels; } } stream_.nDeviceChannels[mode] = device_channels; // Attempt to set the buffer size. According to OSS, the minimum // number of buffers is two. The supposed minimum buffer size is 16 // bytes, so that will be our lower bound. The argument to this // call is in the form 0xMMMMSSSS (hex), where the buffer size (in // bytes) is given as 2^SSSS and the number of buffers as 2^MMMM. // We'll check the actual value used near the end of the setup // procedure. buffer_bytes = *bufferSize * formatBytes(stream_.deviceFormat[mode]) * device_channels; if (buffer_bytes < 16) buffer_bytes = 16; buffers = numberOfBuffers; if (buffers < 2) buffers = 2; temp = ((int) buffers << 16) + (int)(log10((double)buffer_bytes)/log10(2.0)); if (ioctl(fd, SNDCTL_DSP_SETFRAGMENT, &temp)) { close(fd); sprintf(message_, "RtApiOss: error setting fragment size for device (%s).", name); goto error; } stream_.nBuffers = buffers; // Set the data format. temp = device_format; if (ioctl(fd, SNDCTL_DSP_SETFMT, &device_format) == -1 || device_format != temp) { close(fd); sprintf(message_, "RtApiOss: error setting data format for device (%s).", name); goto error; } // Set the number of channels. temp = device_channels; if (ioctl(fd, SNDCTL_DSP_CHANNELS, &device_channels) == -1 || device_channels != temp) { close(fd); sprintf(message_, "RtApiOss: error setting %d channels on device (%s).", temp, name); goto error; } // Set the sample rate. srate = sampleRate; temp = srate; if (ioctl(fd, SNDCTL_DSP_SPEED, &srate) == -1) { close(fd); sprintf(message_, "RtApiOss: error setting sample rate = %d on device (%s).", temp, name); goto error; } // Verify the sample rate setup worked. if (abs(srate - temp) > 100) { close(fd); sprintf(message_, "RtApiOss: error ... audio device (%s) doesn't support sample rate of %d.", name, temp); goto error; } stream_.sampleRate = sampleRate; if (ioctl(fd, SNDCTL_DSP_GETBLKSIZE, &buffer_bytes) == -1) { close(fd); sprintf(message_, "RtApiOss: error getting buffer size for device (%s).", name); goto error; } // Save buffer size (in sample frames). *bufferSize = buffer_bytes / (formatBytes(stream_.deviceFormat[mode]) * device_channels); stream_.bufferSize = *bufferSize; if (mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] == device) { // We're doing duplex setup here. stream_.deviceFormat[0] = stream_.deviceFormat[1]; stream_.nDeviceChannels[0] = device_channels; } // Allocate the stream handles if necessary and then save. if ( stream_.apiHandle == 0 ) { handle = (int *) calloc(2, sizeof(int)); stream_.apiHandle = (void *) handle; handle[0] = 0; handle[1] = 0; } else { handle = (int *) stream_.apiHandle; } handle[mode] = fd; // Set flags for buffer conversion stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode]) stream_.doConvertBuffer[mode] = true; // Allocate necessary internal buffers if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { close(fd); sprintf(message_, "RtApiOss: error allocating user buffer memory (%s).", name); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { close(fd); sprintf(message_, "RtApiOss: error allocating device buffer memory (%s).", name); goto error; } } } stream_.device[mode] = device; stream_.state = STREAM_STOPPED; if ( stream_.mode == OUTPUT && mode == INPUT ) { stream_.mode = DUPLEX; if (stream_.device[0] == device) handle[0] = fd; } else stream_.mode = mode; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; kstopStream(); } void RtApiOss :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; if ( info->usingCallback ) { sprintf(message_, "RtApiOss: A callback is already set for this stream!"); error(RtError::WARNING); return; } info->callback = (void *) callback; info->userData = userData; info->usingCallback = true; info->object = (void *) this; // Set the thread attributes for joinable and realtime scheduling // priority. The higher priority will only take affect if the // program is run as root or suid. pthread_attr_t attr; pthread_attr_init(&attr); pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); pthread_attr_setschedpolicy(&attr, SCHED_RR); int err = pthread_create(&(info->thread), &attr, ossCallbackHandler, &stream_.callbackInfo); pthread_attr_destroy(&attr); if (err) { info->usingCallback = false; sprintf(message_, "RtApiOss: error starting callback thread!"); error(RtError::THREAD_ERROR); } } void RtApiOss :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; pthread_join(stream_.callbackInfo.thread, NULL); stream_.callbackInfo.thread = 0; stream_.callbackInfo.callback = NULL; stream_.callbackInfo.userData = NULL; MUTEX_UNLOCK(&stream_.mutex); } } extern "C" void *ossCallbackHandler(void *ptr) { CallbackInfo *info = (CallbackInfo *) ptr; RtApiOss *object = (RtApiOss *) info->object; bool *usingCallback = &info->usingCallback; while ( *usingCallback ) { pthread_testcancel(); try { object->tickStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiOss: callback thread error (%s) ... closing thread.\n\n", exception.getMessageString()); break; } } return 0; } //******************** End of __LINUX_OSS__ *********************// #endif #if defined(__MACOSX_CORE__) // The OS X CoreAudio API is designed to use a separate callback // procedure for each of its audio devices. A single RtAudio duplex // stream using two different devices is supported here, though it // cannot be guaranteed to always behave correctly because we cannot // synchronize these two callbacks. This same functionality can be // achieved with better synchrony by opening two separate streams for // the devices and using RtAudio blocking calls (i.e. tickStream()). // // A property listener is installed for over/underrun information. // However, no functionality is currently provided to allow property // listeners to trigger user handlers because it is unclear what could // be done if a critical stream parameter (buffer size, sample rate, // device disconnect) notification arrived. The listeners entail // quite a bit of extra code and most likely, a user program wouldn't // be prepared for the result anyway. // A structure to hold various information related to the CoreAudio API // implementation. struct CoreHandle { UInt32 index[2]; bool stopStream; bool xrun; char *deviceBuffer; pthread_cond_t condition; CoreHandle() :stopStream(false), xrun(false), deviceBuffer(0) {} }; RtApiCore :: RtApiCore() { this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiCore: no Macintosh OS-X Core Audio devices found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiCore :: ~RtApiCore() { // The subclass destructor gets called before the base class // destructor, so close an existing stream before deallocating // apiDeviceId memory. if ( stream_.mode != UNINITIALIZED ) closeStream(); // Free our allocated apiDeviceId memory. AudioDeviceID *id; for ( unsigned int i=0; iapiDeviceId; err = AudioDeviceGetProperty( *id, 0, false, kAudioDevicePropertyDeviceManufacturer, &dataSize, name ); if (err != noErr) { sprintf( message_, "RtApiCore: OS-X error getting device manufacturer." ); error(RtError::DEBUG_WARNING); return; } strncpy(fullname, name, 256); strcat(fullname, ": " ); dataSize = 256; err = AudioDeviceGetProperty( *id, 0, false, kAudioDevicePropertyDeviceName, &dataSize, name ); if (err != noErr) { sprintf( message_, "RtApiCore: OS-X error getting device name." ); error(RtError::DEBUG_WARNING); return; } strncat(fullname, name, 254); info->name.erase(); info->name.append( (const char *)fullname, strlen(fullname)+1); // Get output channel information. unsigned int i, minChannels = 0, maxChannels = 0, nStreams = 0; AudioBufferList *bufferList = nil; err = AudioDeviceGetPropertyInfo( *id, 0, false, kAudioDevicePropertyStreamConfiguration, &dataSize, NULL ); if (err == noErr && dataSize > 0) { bufferList = (AudioBufferList *) malloc( dataSize ); if (bufferList == NULL) { sprintf(message_, "RtApiCore: memory allocation error!"); error(RtError::DEBUG_WARNING); return; } err = AudioDeviceGetProperty( *id, 0, false, kAudioDevicePropertyStreamConfiguration, &dataSize, bufferList ); if (err == noErr) { maxChannels = 0; minChannels = 1000; nStreams = bufferList->mNumberBuffers; for ( i=0; imBuffers[i].mNumberChannels; if ( bufferList->mBuffers[i].mNumberChannels < minChannels ) minChannels = bufferList->mBuffers[i].mNumberChannels; } } } free (bufferList); if (err != noErr || dataSize <= 0) { sprintf( message_, "RtApiCore: OS-X error getting output channels for device (%s).", info->name.c_str() ); error(RtError::DEBUG_WARNING); return; } if ( nStreams ) { if ( maxChannels > 0 ) info->maxOutputChannels = maxChannels; if ( minChannels > 0 ) info->minOutputChannels = minChannels; } // Get input channel information. bufferList = nil; err = AudioDeviceGetPropertyInfo( *id, 0, true, kAudioDevicePropertyStreamConfiguration, &dataSize, NULL ); if (err == noErr && dataSize > 0) { bufferList = (AudioBufferList *) malloc( dataSize ); if (bufferList == NULL) { sprintf(message_, "RtApiCore: memory allocation error!"); error(RtError::DEBUG_WARNING); return; } err = AudioDeviceGetProperty( *id, 0, true, kAudioDevicePropertyStreamConfiguration, &dataSize, bufferList ); if (err == noErr) { maxChannels = 0; minChannels = 1000; nStreams = bufferList->mNumberBuffers; for ( i=0; imBuffers[i].mNumberChannels < minChannels ) minChannels = bufferList->mBuffers[i].mNumberChannels; maxChannels += bufferList->mBuffers[i].mNumberChannels; } } } free (bufferList); if (err != noErr || dataSize <= 0) { sprintf( message_, "RtApiCore: OS-X error getting input channels for device (%s).", info->name.c_str() ); error(RtError::DEBUG_WARNING); return; } if ( nStreams ) { if ( maxChannels > 0 ) info->maxInputChannels = maxChannels; if ( minChannels > 0 ) info->minInputChannels = minChannels; } // If device opens for both playback and capture, we determine the channels. if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) { info->hasDuplexSupport = true; info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ? info->maxInputChannels : info->maxOutputChannels; info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ? info->minInputChannels : info->minOutputChannels; } // Probe the device sample rate and data format parameters. The // core audio query mechanism is performed on a "stream" // description, which can have a variable number of channels and // apply to input or output only. // Create a stream description structure. AudioStreamBasicDescription description; dataSize = sizeof( AudioStreamBasicDescription ); memset(&description, 0, sizeof(AudioStreamBasicDescription)); bool isInput = false; if ( info->maxOutputChannels == 0 ) isInput = true; bool isDuplex = false; if ( info->maxDuplexChannels > 0 ) isDuplex = true; // Determine the supported sample rates. info->sampleRates.clear(); for (unsigned int k=0; ksampleRates.push_back( SAMPLE_RATES[k] ); } if (info->sampleRates.size() == 0) { sprintf( message_, "RtApiCore: No supported sample rates found for OS-X device (%s).", info->name.c_str() ); error(RtError::DEBUG_WARNING); return; } // Determine the supported data formats. info->nativeFormats = 0; description.mFormatID = kAudioFormatLinearPCM; description.mBitsPerChannel = 8; description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT8; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT8; } description.mBitsPerChannel = 16; description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT16; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT16; } description.mBitsPerChannel = 32; description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT32; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT32; } description.mBitsPerChannel = 24; description.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsAlignedHigh | kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT24; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_SINT24; } description.mBitsPerChannel = 32; description.mFormatFlags = kLinearPCMFormatFlagIsFloat | kLinearPCMFormatFlagIsPacked | kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_FLOAT32; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_FLOAT32; } description.mBitsPerChannel = 64; description.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_FLOAT64; else { description.mFormatFlags &= ~kLinearPCMFormatFlagIsBigEndian; if ( deviceSupportsFormat( *id, isInput, &description, isDuplex ) ) info->nativeFormats |= RTAUDIO_FLOAT64; } // Check that we have at least one supported format. if (info->nativeFormats == 0) { sprintf(message_, "RtApiCore: OS-X device (%s) data format not supported by RtAudio.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } info->probed = true; } OSStatus callbackHandler( AudioDeviceID inDevice, const AudioTimeStamp* inNow, const AudioBufferList* inInputData, const AudioTimeStamp* inInputTime, AudioBufferList* outOutputData, const AudioTimeStamp* inOutputTime, void* infoPointer ) { CallbackInfo *info = (CallbackInfo *) infoPointer; RtApiCore *object = (RtApiCore *) info->object; try { object->callbackEvent( inDevice, (void *)inInputData, (void *)outOutputData ); } catch (RtError &exception) { fprintf(stderr, "\nRtApiCore: callback handler error (%s)!\n\n", exception.getMessageString()); return kAudioHardwareUnspecifiedError; } return kAudioHardwareNoError; } OSStatus deviceListener( AudioDeviceID inDevice, UInt32 channel, Boolean isInput, AudioDevicePropertyID propertyID, void* handlePointer ) { CoreHandle *handle = (CoreHandle *) handlePointer; if ( propertyID == kAudioDeviceProcessorOverload ) { if ( isInput ) fprintf(stderr, "\nRtApiCore: OS-X audio input overrun detected!\n"); else fprintf(stderr, "\nRtApiCore: OS-X audio output underrun detected!\n"); handle->xrun = true; } return kAudioHardwareNoError; } bool RtApiCore :: probeDeviceOpen( int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers ) { // Setup for stream mode. bool isInput = false; AudioDeviceID id = *((AudioDeviceID *) devices_[device].apiDeviceId); if ( mode == INPUT ) isInput = true; // Search for a stream which contains the desired number of channels. OSStatus err = noErr; UInt32 dataSize; unsigned int deviceChannels, nStreams = 0; UInt32 iChannel = 0, iStream = 0; AudioBufferList *bufferList = nil; err = AudioDeviceGetPropertyInfo( id, 0, isInput, kAudioDevicePropertyStreamConfiguration, &dataSize, NULL ); if (err == noErr && dataSize > 0) { bufferList = (AudioBufferList *) malloc( dataSize ); if (bufferList == NULL) { sprintf(message_, "RtApiCore: memory allocation error in probeDeviceOpen()!"); error(RtError::DEBUG_WARNING); return FAILURE; } err = AudioDeviceGetProperty( id, 0, isInput, kAudioDevicePropertyStreamConfiguration, &dataSize, bufferList ); if (err == noErr) { stream_.deInterleave[mode] = false; nStreams = bufferList->mNumberBuffers; for ( iStream=0; iStreammBuffers[iStream].mNumberChannels >= (unsigned int) channels ) break; iChannel += bufferList->mBuffers[iStream].mNumberChannels; } // If we didn't find a single stream above, see if we can meet // the channel specification in mono mode (i.e. using separate // non-interleaved buffers). This can only work if there are N // consecutive one-channel streams, where N is the number of // desired channels. iChannel = 0; if ( iStream >= nStreams && nStreams >= (unsigned int) channels ) { int counter = 0; for ( iStream=0; iStreammBuffers[iStream].mNumberChannels == 1 ) counter++; else counter = 0; if ( counter == channels ) { iStream -= channels - 1; iChannel -= channels - 1; stream_.deInterleave[mode] = true; break; } iChannel += bufferList->mBuffers[iStream].mNumberChannels; } } } } if (err != noErr || dataSize <= 0) { if ( bufferList ) free( bufferList ); sprintf( message_, "RtApiCore: OS-X error getting channels for device (%s).", devices_[device].name.c_str() ); error(RtError::DEBUG_WARNING); return FAILURE; } if (iStream >= nStreams) { free (bufferList); sprintf( message_, "RtApiCore: unable to find OS-X audio stream on device (%s) for requested channels (%d).", devices_[device].name.c_str(), channels ); error(RtError::DEBUG_WARNING); return FAILURE; } // This is ok even for mono mode ... it gets updated later. deviceChannels = bufferList->mBuffers[iStream].mNumberChannels; free (bufferList); // Determine the buffer size. AudioValueRange bufferRange; dataSize = sizeof(AudioValueRange); err = AudioDeviceGetProperty( id, 0, isInput, kAudioDevicePropertyBufferSizeRange, &dataSize, &bufferRange); if (err != noErr) { sprintf( message_, "RtApiCore: OS-X error getting buffer size range for device (%s).", devices_[device].name.c_str() ); error(RtError::DEBUG_WARNING); return FAILURE; } long bufferBytes = *bufferSize * deviceChannels * formatBytes(RTAUDIO_FLOAT32); if (bufferRange.mMinimum > bufferBytes) bufferBytes = (int) bufferRange.mMinimum; else if (bufferRange.mMaximum < bufferBytes) bufferBytes = (int) bufferRange.mMaximum; // Set the buffer size. For mono mode, I'm assuming we only need to // make this setting for the first channel. UInt32 theSize = (UInt32) bufferBytes; dataSize = sizeof( UInt32); err = AudioDeviceSetProperty(id, NULL, 0, isInput, kAudioDevicePropertyBufferSize, dataSize, &theSize); if (err != noErr) { sprintf( message_, "RtApiCore: OS-X error setting the buffer size for device (%s).", devices_[device].name.c_str() ); error(RtError::DEBUG_WARNING); return FAILURE; } // If attempting to setup a duplex stream, the bufferSize parameter // MUST be the same in both directions! *bufferSize = bufferBytes / ( deviceChannels * formatBytes(RTAUDIO_FLOAT32) ); if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) { sprintf( message_, "RtApiCore: OS-X error setting buffer size for duplex stream on device (%s).", devices_[device].name.c_str() ); error(RtError::DEBUG_WARNING); return FAILURE; } stream_.bufferSize = *bufferSize; stream_.nBuffers = 1; // Set the stream format description. Do for each channel in mono mode. AudioStreamBasicDescription description; dataSize = sizeof( AudioStreamBasicDescription ); if ( stream_.deInterleave[mode] ) nStreams = channels; else nStreams = 1; for ( unsigned int i=0; i 1 && stream_.deInterleave[mode]) stream_.doConvertBuffer[mode] = true; // Allocate our CoreHandle structure for the stream. CoreHandle *handle; if ( stream_.apiHandle == 0 ) { handle = (CoreHandle *) calloc(1, sizeof(CoreHandle)); if ( handle == NULL ) { sprintf(message_, "RtApiCore: OS-X error allocating coreHandle memory (%s).", devices_[device].name.c_str()); goto error; } handle->index[0] = 0; handle->index[1] = 0; if ( pthread_cond_init(&handle->condition, NULL) ) { sprintf(message_, "RtApiCore: error initializing pthread condition variable (%s).", devices_[device].name.c_str()); goto error; } stream_.apiHandle = (void *) handle; } else handle = (CoreHandle *) stream_.apiHandle; handle->index[mode] = iStream; // Allocate necessary internal buffers. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { sprintf(message_, "RtApiCore: OS-X error allocating user buffer memory (%s).", devices_[device].name.c_str()); goto error; } } if ( stream_.deInterleave[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiCore: error allocating device buffer memory (%s).", devices_[device].name.c_str()); goto error; } // If not de-interleaving, we point stream_.deviceBuffer to the // OS X supplied device buffer before doing any necessary data // conversions. This presents a problem if we have a duplex // stream using one device which needs de-interleaving and // another device which doesn't. So, save a pointer to our own // device buffer in the CallbackInfo structure. handle->deviceBuffer = stream_.deviceBuffer; } } stream_.sampleRate = sampleRate; stream_.device[mode] = device; stream_.state = STREAM_STOPPED; stream_.callbackInfo.object = (void *) this; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; kcondition); free(handle); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } error(RtError::DEBUG_WARNING); return FAILURE; } void RtApiCore :: closeStream() { // We don't want an exception to be thrown here because this // function is called by our class destructor. So, do our own // stream check. if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtApiCore::closeStream(): no open stream to close!"); error(RtError::WARNING); return; } AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId ); if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { if (stream_.state == STREAM_RUNNING) AudioDeviceStop( id, callbackHandler ); AudioDeviceRemoveIOProc( id, callbackHandler ); } id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId ); if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) { if (stream_.state == STREAM_RUNNING) AudioDeviceStop( id, callbackHandler ); AudioDeviceRemoveIOProc( id, callbackHandler ); } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } if ( stream_.deInterleave[0] || stream_.deInterleave[1] ) { free(stream_.deviceBuffer); stream_.deviceBuffer = 0; } CoreHandle *handle = (CoreHandle *) stream_.apiHandle; // Destroy pthread condition variable and free the CoreHandle structure. if ( handle ) { pthread_cond_destroy(&handle->condition); free( handle ); stream_.apiHandle = 0; } stream_.mode = UNINITIALIZED; } void RtApiCore :: startStream() { verifyStream(); if (stream_.state == STREAM_RUNNING) return; MUTEX_LOCK(&stream_.mutex); OSStatus err; AudioDeviceID id; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId ); err = AudioDeviceStart(id, callbackHandler); if (err != noErr) { sprintf(message_, "RtApiCore: OS-X error starting callback procedure on device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) { id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId ); err = AudioDeviceStart(id, callbackHandler); if (err != noErr) { sprintf(message_, "RtApiCore: OS-X error starting input callback procedure on device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } CoreHandle *handle = (CoreHandle *) stream_.apiHandle; handle->stopStream = false; stream_.state = STREAM_RUNNING; MUTEX_UNLOCK(&stream_.mutex); } void RtApiCore :: stopStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); OSStatus err; AudioDeviceID id; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId ); err = AudioDeviceStop(id, callbackHandler); if (err != noErr) { sprintf(message_, "RtApiCore: OS-X error stopping callback procedure on device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } if (stream_.mode == INPUT || ( stream_.mode == DUPLEX && stream_.device[0] != stream_.device[1]) ) { id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId ); err = AudioDeviceStop(id, callbackHandler); if (err != noErr) { sprintf(message_, "RtApiCore: OS-X error stopping input callback procedure on device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } MUTEX_UNLOCK(&stream_.mutex); } void RtApiCore :: abortStream() { stopStream(); } void RtApiCore :: tickStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; if (stream_.callbackInfo.usingCallback) { sprintf(message_, "RtApiCore: tickStream() should not be used when a callback function is set!"); error(RtError::WARNING); return; } CoreHandle *handle = (CoreHandle *) stream_.apiHandle; MUTEX_LOCK(&stream_.mutex); pthread_cond_wait(&handle->condition, &stream_.mutex); MUTEX_UNLOCK(&stream_.mutex); } void RtApiCore :: callbackEvent( AudioDeviceID deviceId, void *inData, void *outData ) { verifyStream(); if (stream_.state == STREAM_STOPPED) return; CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; CoreHandle *handle = (CoreHandle *) stream_.apiHandle; AudioBufferList *inBufferList = (AudioBufferList *) inData; AudioBufferList *outBufferList = (AudioBufferList *) outData; if ( info->usingCallback && handle->stopStream ) { // Check if the stream should be stopped (via the previous user // callback return value). We stop the stream here, rather than // after the function call, so that output data can first be // processed. this->stopStream(); return; } MUTEX_LOCK(&stream_.mutex); // Invoke user callback first, to get fresh output data. Don't // invoke the user callback if duplex mode AND the input/output devices // are different AND this function is called for the input device. AudioDeviceID id = *( (AudioDeviceID *) devices_[stream_.device[0]].apiDeviceId ); if ( info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) { RtAudioCallback callback = (RtAudioCallback) info->callback; handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData); if ( handle->xrun == true ) { handle->xrun = false; MUTEX_UNLOCK(&stream_.mutex); return; } } if ( stream_.mode == OUTPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) { if (stream_.doConvertBuffer[0]) { if ( !stream_.deInterleave[0] ) stream_.deviceBuffer = (char *) outBufferList->mBuffers[handle->index[0]].mData; else stream_.deviceBuffer = handle->deviceBuffer; convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] ); if ( stream_.doByteSwap[0] ) byteSwapBuffer(stream_.deviceBuffer, stream_.bufferSize * stream_.nDeviceChannels[0], stream_.deviceFormat[0]); if ( stream_.deInterleave[0] ) { int bufferBytes = outBufferList->mBuffers[handle->index[0]].mDataByteSize; for ( int i=0; imBuffers[handle->index[0]+i].mData, &stream_.deviceBuffer[i*bufferBytes], bufferBytes ); } } } else { if (stream_.doByteSwap[0]) byteSwapBuffer(stream_.userBuffer, stream_.bufferSize * stream_.nUserChannels[0], stream_.userFormat); memcpy(outBufferList->mBuffers[handle->index[0]].mData, stream_.userBuffer, outBufferList->mBuffers[handle->index[0]].mDataByteSize ); } } id = *( (AudioDeviceID *) devices_[stream_.device[1]].apiDeviceId ); if ( stream_.mode == INPUT || ( stream_.mode == DUPLEX && deviceId == id ) ) { if (stream_.doConvertBuffer[1]) { if ( stream_.deInterleave[1] ) { stream_.deviceBuffer = (char *) handle->deviceBuffer; int bufferBytes = inBufferList->mBuffers[handle->index[1]].mDataByteSize; for ( int i=0; imBuffers[handle->index[1]+i].mData, bufferBytes ); } } else stream_.deviceBuffer = (char *) inBufferList->mBuffers[handle->index[1]].mData; if ( stream_.doByteSwap[1] ) byteSwapBuffer(stream_.deviceBuffer, stream_.bufferSize * stream_.nDeviceChannels[1], stream_.deviceFormat[1]); convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] ); } else { memcpy(stream_.userBuffer, inBufferList->mBuffers[handle->index[1]].mData, inBufferList->mBuffers[handle->index[1]].mDataByteSize ); if (stream_.doByteSwap[1]) byteSwapBuffer(stream_.userBuffer, stream_.bufferSize * stream_.nUserChannels[1], stream_.userFormat); } } if ( !info->usingCallback && (stream_.mode != DUPLEX || deviceId == id ) ) pthread_cond_signal(&handle->condition); MUTEX_UNLOCK(&stream_.mutex); } void RtApiCore :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); if ( stream_.callbackInfo.usingCallback ) { sprintf(message_, "RtApiCore: A callback is already set for this stream!"); error(RtError::WARNING); return; } stream_.callbackInfo.callback = (void *) callback; stream_.callbackInfo.userData = userData; stream_.callbackInfo.usingCallback = true; } void RtApiCore :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; stream_.callbackInfo.userData = NULL; stream_.state = STREAM_STOPPED; stream_.callbackInfo.callback = NULL; MUTEX_UNLOCK(&stream_.mutex); } } //******************** End of __MACOSX_CORE__ *********************// #endif #if defined(__LINUX_JACK__) // JACK is a low-latency audio server, written primarily for the // GNU/Linux operating system. It can connect a number of different // applications to an audio device, as well as allowing them to share // audio between themselves. // // The JACK server must be running before RtApiJack can be instantiated. // RtAudio will report just a single "device", which is the JACK audio // server. The JACK server is typically started in a terminal as follows: // // .jackd -d alsa -d hw:0 // // or through an interface program such as qjackctl. Many of the // parameters normally set for a stream are fixed by the JACK server // and can be specified when the JACK server is started. In // particular, // // .jackd -d alsa -d hw:0 -r 44100 -p 512 -n 4 // // specifies a sample rate of 44100 Hz, a buffer size of 512 sample // frames, and number of buffers = 4. Once the server is running, it // is not possible to override these values. If the values are not // specified in the command-line, the JACK server uses default values. #include #include // A structure to hold various information related to the Jack API // implementation. struct JackHandle { jack_client_t *client; jack_port_t **ports[2]; bool clientOpen; bool stopStream; pthread_cond_t condition; JackHandle() :client(0), clientOpen(false), stopStream(false) {} }; std::string jackmsg; static void jackerror (const char *desc) { jackmsg.erase(); jackmsg.append( desc, strlen(desc)+1 ); } RtApiJack :: RtApiJack() { this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiJack: no Linux Jack server found or connection error (jack: %s)!", jackmsg.c_str()); error(RtError::NO_DEVICES_FOUND); } } RtApiJack :: ~RtApiJack() { if ( stream_.mode != UNINITIALIZED ) closeStream(); } void RtApiJack :: initialize(void) { nDevices_ = 0; // Tell the jack server to call jackerror() when it experiences an // error. This function saves the error message for subsequent // reporting via the normal RtAudio error function. jack_set_error_function( jackerror ); // Look for jack server and try to become a client. jack_client_t *client; if ( (client = jack_client_new( "RtApiJack" )) == 0) return; /* RtApiDevice device; // Determine the name of the device. device.name = "Jack Server"; devices_.push_back(device); nDevices_++; */ const char **ports; std::string port, prevPort; unsigned int nChannels = 0; ports = jack_get_ports( client, NULL, NULL, 0 ); if ( ports ) { port = (char *) ports[ nChannels ]; unsigned int colonPos = 0; do { port = (char *) ports[ nChannels ]; if ( (colonPos = port.find(":")) != std::string::npos ) { port = port.substr( 0, colonPos+1 ); if ( port != prevPort ) { RtApiDevice device; device.name = port; devices_.push_back( device ); nDevices_++; prevPort = port; } } } while ( ports[++nChannels] ); free( ports ); } jack_client_close(client); } void RtApiJack :: probeDeviceInfo(RtApiDevice *info) { // Look for jack server and try to become a client. jack_client_t *client; if ( (client = jack_client_new( "RtApiJack_Probe" )) == 0) { sprintf(message_, "RtApiJack: error connecting to Linux Jack server in probeDeviceInfo() (jack: %s)!", jackmsg.c_str()); error(RtError::WARNING); return; } // Get the current jack server sample rate. info->sampleRates.clear(); info->sampleRates.push_back( jack_get_sample_rate(client) ); // Count the available ports as device channels. Jack "input ports" // equal RtAudio output channels. const char **ports; char *port; unsigned int nChannels = 0; ports = jack_get_ports( client, info->name.c_str(), NULL, JackPortIsInput ); if ( ports ) { port = (char *) ports[nChannels]; while ( port ) port = (char *) ports[++nChannels]; free( ports ); info->maxOutputChannels = nChannels; info->minOutputChannels = 1; } // Jack "output ports" equal RtAudio input channels. nChannels = 0; ports = jack_get_ports( client, info->name.c_str(), NULL, JackPortIsOutput ); if ( ports ) { port = (char *) ports[nChannels]; while ( port ) port = (char *) ports[++nChannels]; free( ports ); info->maxInputChannels = nChannels; info->minInputChannels = 1; } if (info->maxOutputChannels == 0 && info->maxInputChannels == 0) { jack_client_close(client); sprintf(message_, "RtApiJack: error determining jack input/output channels!"); error(RtError::DEBUG_WARNING); return; } if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) { info->hasDuplexSupport = true; info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ? info->maxInputChannels : info->maxOutputChannels; info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ? info->minInputChannels : info->minOutputChannels; } // Get the jack data format type. There isn't much documentation // regarding supported data formats in jack. I'm assuming here that // the default type will always be a floating-point type, of length // equal to either 4 or 8 bytes. int sample_size = sizeof( jack_default_audio_sample_t ); if ( sample_size == 4 ) info->nativeFormats = RTAUDIO_FLOAT32; else if ( sample_size == 8 ) info->nativeFormats = RTAUDIO_FLOAT64; // Check that we have a supported format if (info->nativeFormats == 0) { jack_client_close(client); sprintf(message_, "RtApiJack: error determining jack server data format!"); error(RtError::DEBUG_WARNING); return; } jack_client_close(client); info->probed = true; } int jackCallbackHandler(jack_nframes_t nframes, void *infoPointer) { CallbackInfo *info = (CallbackInfo *) infoPointer; RtApiJack *object = (RtApiJack *) info->object; try { object->callbackEvent( (unsigned long) nframes ); } catch (RtError &exception) { fprintf(stderr, "\nRtApiJack: callback handler error (%s)!\n\n", exception.getMessageString()); return 0; } return 0; } void jackShutdown(void *infoPointer) { CallbackInfo *info = (CallbackInfo *) infoPointer; JackHandle *handle = (JackHandle *) info->apiInfo; handle->clientOpen = false; RtApiJack *object = (RtApiJack *) info->object; // Check current stream state. If stopped, then we'll assume this // was called as a result of a call to RtApiJack::stopStream (the // deactivation of a client handle causes this function to be called). // If not, we'll assume the Jack server is shutting down or some // other problem occurred and we should close the stream. if ( object->getStreamState() == RtApi::STREAM_STOPPED ) return; try { object->closeStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiJack: jackShutdown error (%s)!\n\n", exception.getMessageString()); return; } fprintf(stderr, "\nRtApiJack: the Jack server is shutting down this client ... stream stopped and closed!!!\n\n"); } int jackXrun( void * ) { fprintf(stderr, "\nRtApiJack: audio overrun/underrun reported!\n"); return 0; } bool RtApiJack :: probeDeviceOpen(int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers) { // Compare the jack server channels to the requested number of channels. if ( (mode == OUTPUT && devices_[device].maxOutputChannels < channels ) || (mode == INPUT && devices_[device].maxInputChannels < channels ) ) { sprintf(message_, "RtApiJack: the Jack server does not support requested channels!"); error(RtError::DEBUG_WARNING); return FAILURE; } JackHandle *handle = (JackHandle *) stream_.apiHandle; // Look for jack server and try to become a client (only do once per stream). char label[32]; jack_client_t *client = 0; if ( mode == OUTPUT || (mode == INPUT && stream_.mode != OUTPUT) ) { snprintf(label, 32, "RtApiJack"); if ( (client = jack_client_new( (const char *) label )) == 0) { sprintf(message_, "RtApiJack: cannot connect to Linux Jack server in probeDeviceOpen() (jack: %s)!", jackmsg.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } } else { // The handle must have been created on an earlier pass. client = handle->client; } // First, check the jack server sample rate. int jack_rate; jack_rate = (int) jack_get_sample_rate(client); if ( sampleRate != jack_rate ) { jack_client_close(client); sprintf( message_, "RtApiJack: the requested sample rate (%d) is different than the JACK server rate (%d).", sampleRate, jack_rate ); error(RtError::DEBUG_WARNING); return FAILURE; } stream_.sampleRate = jack_rate; // The jack server seems to support just a single floating-point // data type. Since we already checked it before, just use what we // found then. stream_.deviceFormat[mode] = devices_[device].nativeFormats; stream_.userFormat = format; // Jack always uses non-interleaved buffers. We'll need to // de-interleave if we have more than one channel. stream_.deInterleave[mode] = false; if ( channels > 1 ) stream_.deInterleave[mode] = true; // Jack always provides host byte-ordered data. stream_.doByteSwap[mode] = false; // Get the buffer size. The buffer size and number of buffers // (periods) is set when the jack server is started. stream_.bufferSize = (int) jack_get_buffer_size(client); *bufferSize = stream_.bufferSize; stream_.nDeviceChannels[mode] = channels; stream_.nUserChannels[mode] = channels; stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.deInterleave[mode]) stream_.doConvertBuffer[mode] = true; // Allocate our JackHandle structure for the stream. if ( handle == 0 ) { handle = (JackHandle *) calloc(1, sizeof(JackHandle)); if ( handle == NULL ) { sprintf(message_, "RtApiJack: error allocating JackHandle memory (%s).", devices_[device].name.c_str()); goto error; } handle->ports[0] = 0; handle->ports[1] = 0; if ( pthread_cond_init(&handle->condition, NULL) ) { sprintf(message_, "RtApiJack: error initializing pthread condition variable!"); goto error; } stream_.apiHandle = (void *) handle; handle->client = client; handle->clientOpen = true; } // Allocate necessary internal buffers. if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { sprintf(message_, "RtApiJack: error allocating user buffer memory (%s).", devices_[device].name.c_str()); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiJack: error allocating device buffer memory (%s).", devices_[device].name.c_str()); goto error; } } } // Allocate memory for the Jack ports (channels) identifiers. handle->ports[mode] = (jack_port_t **) malloc (sizeof (jack_port_t *) * channels); if ( handle->ports[mode] == NULL ) { sprintf(message_, "RtApiJack: error allocating port handle memory (%s).", devices_[device].name.c_str()); goto error; } stream_.device[mode] = device; stream_.state = STREAM_STOPPED; stream_.callbackInfo.usingCallback = false; stream_.callbackInfo.object = (void *) this; stream_.callbackInfo.apiInfo = (void *) handle; if ( stream_.mode == OUTPUT && mode == INPUT ) // We had already set up the stream for output. stream_.mode = DUPLEX; else { stream_.mode = mode; jack_set_process_callback( handle->client, jackCallbackHandler, (void *) &stream_.callbackInfo ); jack_set_xrun_callback( handle->client, jackXrun, NULL ); jack_on_shutdown( handle->client, jackShutdown, (void *) &stream_.callbackInfo ); } // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; kcondition); if ( handle->clientOpen == true ) jack_client_close(handle->client); if ( handle->ports[0] ) free(handle->ports[0]); if ( handle->ports[1] ) free(handle->ports[1]); free( handle ); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } error(RtError::DEBUG_WARNING); return FAILURE; } void RtApiJack :: closeStream() { // We don't want an exception to be thrown here because this // function is called by our class destructor. So, do our own // stream check. if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtApiJack::closeStream(): no open stream to close!"); error(RtError::WARNING); return; } JackHandle *handle = (JackHandle *) stream_.apiHandle; if ( handle && handle->clientOpen == true ) { if (stream_.state == STREAM_RUNNING) jack_deactivate(handle->client); jack_client_close(handle->client); } if ( handle ) { if ( handle->ports[0] ) free(handle->ports[0]); if ( handle->ports[1] ) free(handle->ports[1]); pthread_cond_destroy(&handle->condition); free( handle ); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } if (stream_.deviceBuffer) { free(stream_.deviceBuffer); stream_.deviceBuffer = 0; } stream_.mode = UNINITIALIZED; } void RtApiJack :: startStream() { verifyStream(); if (stream_.state == STREAM_RUNNING) return; MUTEX_LOCK(&stream_.mutex); char label[64]; JackHandle *handle = (JackHandle *) stream_.apiHandle; if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { for ( int i=0; iports[0][i] = jack_port_register(handle->client, (const char *)label, JACK_DEFAULT_AUDIO_TYPE, JackPortIsOutput, 0); } } if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) { for ( int i=0; iports[1][i] = jack_port_register(handle->client, (const char *)label, JACK_DEFAULT_AUDIO_TYPE, JackPortIsInput, 0); } } if (jack_activate(handle->client)) { sprintf(message_, "RtApiJack: unable to activate JACK client!"); error(RtError::SYSTEM_ERROR); } const char **ports; int result; // Get the list of available ports. if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { ports = jack_get_ports(handle->client, devices_[stream_.device[0]].name.c_str(), NULL, JackPortIsInput); if ( ports == NULL) { sprintf(message_, "RtApiJack: error determining available jack input ports!"); error(RtError::SYSTEM_ERROR); } // Now make the port connections. Since RtAudio wasn't designed to // allow the user to select particular channels of a device, we'll // just open the first "nChannels" ports. for ( int i=0; iclient, jack_port_name(handle->ports[0][i]), ports[i] ); if ( result ) { free(ports); sprintf(message_, "RtApiJack: error connecting output ports!"); error(RtError::SYSTEM_ERROR); } } free(ports); } if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) { ports = jack_get_ports( handle->client, devices_[stream_.device[1]].name.c_str(), NULL, JackPortIsOutput ); if ( ports == NULL) { sprintf(message_, "RtApiJack: error determining available jack output ports!"); error(RtError::SYSTEM_ERROR); } // Now make the port connections. See note above. for ( int i=0; iclient, ports[i], jack_port_name(handle->ports[1][i]) ); if ( result ) { free(ports); sprintf(message_, "RtApiJack: error connecting input ports!"); error(RtError::SYSTEM_ERROR); } } free(ports); } handle->stopStream = false; stream_.state = STREAM_RUNNING; MUTEX_UNLOCK(&stream_.mutex); } void RtApiJack :: stopStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); JackHandle *handle = (JackHandle *) stream_.apiHandle; jack_deactivate(handle->client); MUTEX_UNLOCK(&stream_.mutex); } void RtApiJack :: abortStream() { stopStream(); } void RtApiJack :: tickStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; if (stream_.callbackInfo.usingCallback) { sprintf(message_, "RtApiJack: tickStream() should not be used when a callback function is set!"); error(RtError::WARNING); return; } JackHandle *handle = (JackHandle *) stream_.apiHandle; MUTEX_LOCK(&stream_.mutex); pthread_cond_wait(&handle->condition, &stream_.mutex); MUTEX_UNLOCK(&stream_.mutex); } void RtApiJack :: callbackEvent( unsigned long nframes ) { verifyStream(); if (stream_.state == STREAM_STOPPED) return; CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; JackHandle *handle = (JackHandle *) stream_.apiHandle; if ( info->usingCallback && handle->stopStream ) { // Check if the stream should be stopped (via the previous user // callback return value). We stop the stream here, rather than // after the function call, so that output data can first be // processed. this->stopStream(); return; } MUTEX_LOCK(&stream_.mutex); // Invoke user callback first, to get fresh output data. if ( info->usingCallback ) { RtAudioCallback callback = (RtAudioCallback) info->callback; handle->stopStream = callback(stream_.userBuffer, stream_.bufferSize, info->userData); } jack_default_audio_sample_t *jackbuffer; long bufferBytes = nframes * sizeof(jack_default_audio_sample_t); if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { if (stream_.doConvertBuffer[0]) { convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] ); for ( int i=0; iports[0][i], (jack_nframes_t) nframes); memcpy(jackbuffer, &stream_.deviceBuffer[i*bufferBytes], bufferBytes ); } } else { // single channel only jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[0][0], (jack_nframes_t) nframes); memcpy(jackbuffer, stream_.userBuffer, bufferBytes ); } } if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) { if (stream_.doConvertBuffer[1]) { for ( int i=0; iports[1][i], (jack_nframes_t) nframes); memcpy(&stream_.deviceBuffer[i*bufferBytes], jackbuffer, bufferBytes ); } convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] ); } else { // single channel only jackbuffer = (jack_default_audio_sample_t *) jack_port_get_buffer(handle->ports[1][0], (jack_nframes_t) nframes); memcpy(stream_.userBuffer, jackbuffer, bufferBytes ); } } if ( !info->usingCallback ) pthread_cond_signal(&handle->condition); MUTEX_UNLOCK(&stream_.mutex); } void RtApiJack :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); if ( stream_.callbackInfo.usingCallback ) { sprintf(message_, "RtApiJack: A callback is already set for this stream!"); error(RtError::WARNING); return; } stream_.callbackInfo.callback = (void *) callback; stream_.callbackInfo.userData = userData; stream_.callbackInfo.usingCallback = true; } void RtApiJack :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; stream_.callbackInfo.userData = NULL; stream_.state = STREAM_STOPPED; stream_.callbackInfo.callback = NULL; MUTEX_UNLOCK(&stream_.mutex); } } #endif #if defined(__LINUX_ALSA__) #include #include #include // A structure to hold various information related to the ALSA API // implementation. struct AlsaHandle { snd_pcm_t *handles[2]; bool synchronized; char *tempBuffer; AlsaHandle() :synchronized(false), tempBuffer(0) {} }; extern "C" void *alsaCallbackHandler(void * ptr); RtApiAlsa :: RtApiAlsa() { this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiAlsa: no Linux ALSA audio devices found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiAlsa :: ~RtApiAlsa() { if ( stream_.mode != UNINITIALIZED ) closeStream(); } void RtApiAlsa :: initialize(void) { int card, subdevice, result; char name[64]; const char *cardId; snd_ctl_t *handle; snd_ctl_card_info_t *info; snd_ctl_card_info_alloca(&info); RtApiDevice device; // Count cards and devices nDevices_ = 0; card = -1; snd_card_next(&card); while ( card >= 0 ) { sprintf(name, "hw:%d", card); result = snd_ctl_open(&handle, name, 0); if (result < 0) { sprintf(message_, "RtApiAlsa: control open (%i): %s.", card, snd_strerror(result)); error(RtError::DEBUG_WARNING); goto next_card; } result = snd_ctl_card_info(handle, info); if (result < 0) { sprintf(message_, "RtApiAlsa: control hardware info (%i): %s.", card, snd_strerror(result)); error(RtError::DEBUG_WARNING); goto next_card; } cardId = snd_ctl_card_info_get_id(info); subdevice = -1; while (1) { result = snd_ctl_pcm_next_device(handle, &subdevice); if (result < 0) { sprintf(message_, "RtApiAlsa: control next device (%i): %s.", card, snd_strerror(result)); error(RtError::DEBUG_WARNING); break; } if (subdevice < 0) break; sprintf( name, "hw:%d,%d", card, subdevice ); // If a cardId exists and it contains at least one non-numeric // character, use it to identify the device. This avoids a bug // in ALSA such that a numeric string is interpreted as a device // number. for ( unsigned int i=0; iname.c_str(), 64 ); card = strtok(name, ","); err = snd_ctl_open(&chandle, card, SND_CTL_NONBLOCK); if (err < 0) { sprintf(message_, "RtApiAlsa: control open (%s): %s.", card, snd_strerror(err)); error(RtError::DEBUG_WARNING); return; } unsigned int dev = (unsigned int) atoi( strtok(NULL, ",") ); // First try for playback stream = SND_PCM_STREAM_PLAYBACK; snd_pcm_info_set_device(pcminfo, dev); snd_pcm_info_set_subdevice(pcminfo, 0); snd_pcm_info_set_stream(pcminfo, stream); if ((err = snd_ctl_pcm_info(chandle, pcminfo)) < 0) { if (err == -ENOENT) { sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle output!", info->name.c_str()); error(RtError::DEBUG_WARNING); } else { sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) output: %s", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); } goto capture_probe; } err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK ); if (err < 0) { if ( err == EBUSY ) sprintf(message_, "RtApiAlsa: pcm playback device (%s) is busy: %s.", info->name.c_str(), snd_strerror(err)); else sprintf(message_, "RtApiAlsa: pcm playback open (%s) error: %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); goto capture_probe; } // We have an open device ... allocate the parameter structure. err = snd_pcm_hw_params_any(handle, params); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); goto capture_probe; } // Get output channel information. unsigned int value; err = snd_pcm_hw_params_get_channels_min(params, &value); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware minimum channel probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); goto capture_probe; } info->minOutputChannels = value; err = snd_pcm_hw_params_get_channels_max(params, &value); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware maximum channel probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); goto capture_probe; } info->maxOutputChannels = value; snd_pcm_close(handle); capture_probe: // Now try for capture stream = SND_PCM_STREAM_CAPTURE; snd_pcm_info_set_stream(pcminfo, stream); err = snd_ctl_pcm_info(chandle, pcminfo); snd_ctl_close(chandle); if ( err < 0 ) { if (err == -ENOENT) { sprintf(message_, "RtApiAlsa: pcm device (%s) doesn't handle input!", info->name.c_str()); error(RtError::DEBUG_WARNING); } else { sprintf(message_, "RtApiAlsa: snd_ctl_pcm_info error for device (%s) input: %s", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); } if (info->maxOutputChannels == 0) // didn't open for playback either ... device invalid return; goto probe_parameters; } err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode | SND_PCM_NONBLOCK); if (err < 0) { if ( err == EBUSY ) sprintf(message_, "RtApiAlsa: pcm capture device (%s) is busy: %s.", info->name.c_str(), snd_strerror(err)); else sprintf(message_, "RtApiAlsa: pcm capture open (%s) error: %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); if (info->maxOutputChannels == 0) // didn't open for playback either ... device invalid return; goto probe_parameters; } // We have an open capture device ... allocate the parameter structure. err = snd_pcm_hw_params_any(handle, params); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); if (info->maxOutputChannels > 0) goto probe_parameters; else return; } // Get input channel information. err = snd_pcm_hw_params_get_channels_min(params, &value); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware minimum in channel probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); if (info->maxOutputChannels > 0) goto probe_parameters; else return; } info->minInputChannels = value; err = snd_pcm_hw_params_get_channels_max(params, &value); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware maximum in channel probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); if (info->maxOutputChannels > 0) goto probe_parameters; else return; } info->maxInputChannels = value; snd_pcm_close(handle); // If device opens for both playback and capture, we determine the channels. if (info->maxOutputChannels == 0 || info->maxInputChannels == 0) goto probe_parameters; info->hasDuplexSupport = true; info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ? info->maxInputChannels : info->maxOutputChannels; info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ? info->minInputChannels : info->minOutputChannels; probe_parameters: // At this point, we just need to figure out the supported data // formats and sample rates. We'll proceed by opening the device in // the direction with the maximum number of channels, or playback if // they are equal. This might limit our sample rate options, but so // be it. if (info->maxOutputChannels >= info->maxInputChannels) stream = SND_PCM_STREAM_PLAYBACK; else stream = SND_PCM_STREAM_CAPTURE; err = snd_pcm_open(&handle, info->name.c_str(), stream, open_mode); if (err < 0) { sprintf(message_, "RtApiAlsa: pcm (%s) won't reopen during probe: %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); return; } // We have an open device ... allocate the parameter structure. err = snd_pcm_hw_params_any(handle, params); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: hardware reopen probe error (%s): %s.", info->name.c_str(), snd_strerror(err)); error(RtError::DEBUG_WARNING); return; } // Test our discrete set of sample rate values. int dir = 0; info->sampleRates.clear(); for (unsigned int i=0; isampleRates.push_back(SAMPLE_RATES[i]); } if (info->sampleRates.size() == 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: no supported sample rates found for device (%s).", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Probe the supported data formats ... we don't care about endian-ness just yet snd_pcm_format_t format; info->nativeFormats = 0; format = SND_PCM_FORMAT_S8; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_SINT8; format = SND_PCM_FORMAT_S16; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_SINT16; format = SND_PCM_FORMAT_S24; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_SINT24; format = SND_PCM_FORMAT_S32; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_SINT32; format = SND_PCM_FORMAT_FLOAT; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_FLOAT32; format = SND_PCM_FORMAT_FLOAT64; if (snd_pcm_hw_params_test_format(handle, params, format) == 0) info->nativeFormats |= RTAUDIO_FLOAT64; // Check that we have at least one supported format if (info->nativeFormats == 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // That's all ... close the device and return snd_pcm_close(handle); info->probed = true; return; } bool RtApiAlsa :: probeDeviceOpen( int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers ) { #if defined(__RTAUDIO_DEBUG__) snd_output_t *out; snd_output_stdio_attach(&out, stderr, 0); #endif // I'm not using the "plug" interface ... too much inconsistent behavior. const char *name = devices_[device].name.c_str(); snd_pcm_stream_t alsa_stream; if (mode == OUTPUT) alsa_stream = SND_PCM_STREAM_PLAYBACK; else alsa_stream = SND_PCM_STREAM_CAPTURE; int err; snd_pcm_t *handle; int alsa_open_mode = SND_PCM_ASYNC; err = snd_pcm_open(&handle, name, alsa_stream, alsa_open_mode); if (err < 0) { sprintf(message_,"RtApiAlsa: pcm device (%s) won't open: %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Fill the parameter structure. snd_pcm_hw_params_t *hw_params; snd_pcm_hw_params_alloca(&hw_params); err = snd_pcm_hw_params_any(handle, hw_params); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error getting parameter handle (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\nRtApiAlsa: dump hardware params just after device open:\n\n"); snd_pcm_hw_params_dump(hw_params, out); #endif // Set access ... try interleaved access first, then non-interleaved if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED) ) { err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_INTERLEAVED); } else if ( !snd_pcm_hw_params_test_access( handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED) ) { err = snd_pcm_hw_params_set_access(handle, hw_params, SND_PCM_ACCESS_RW_NONINTERLEAVED); stream_.deInterleave[mode] = true; } else { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: device (%s) access not supported by RtAudio.", name); error(RtError::DEBUG_WARNING); return FAILURE; } if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting access ( (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Determine how to set the device format. stream_.userFormat = format; snd_pcm_format_t device_format = SND_PCM_FORMAT_UNKNOWN; if (format == RTAUDIO_SINT8) device_format = SND_PCM_FORMAT_S8; else if (format == RTAUDIO_SINT16) device_format = SND_PCM_FORMAT_S16; else if (format == RTAUDIO_SINT24) device_format = SND_PCM_FORMAT_S24; else if (format == RTAUDIO_SINT32) device_format = SND_PCM_FORMAT_S32; else if (format == RTAUDIO_FLOAT32) device_format = SND_PCM_FORMAT_FLOAT; else if (format == RTAUDIO_FLOAT64) device_format = SND_PCM_FORMAT_FLOAT64; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = format; goto set_format; } // The user requested format is not natively supported by the device. device_format = SND_PCM_FORMAT_FLOAT64; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_FLOAT64; goto set_format; } device_format = SND_PCM_FORMAT_FLOAT; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_FLOAT32; goto set_format; } device_format = SND_PCM_FORMAT_S32; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_SINT32; goto set_format; } device_format = SND_PCM_FORMAT_S24; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_SINT24; goto set_format; } device_format = SND_PCM_FORMAT_S16; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_SINT16; goto set_format; } device_format = SND_PCM_FORMAT_S8; if (snd_pcm_hw_params_test_format(handle, hw_params, device_format) == 0) { stream_.deviceFormat[mode] = RTAUDIO_SINT8; goto set_format; } // If we get here, no supported format was found. sprintf(message_,"RtApiAlsa: pcm device (%s) data format not supported by RtAudio.", name); snd_pcm_close(handle); error(RtError::DEBUG_WARNING); return FAILURE; set_format: err = snd_pcm_hw_params_set_format(handle, hw_params, device_format); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting format (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Determine whether byte-swaping is necessary. stream_.doByteSwap[mode] = false; if (device_format != SND_PCM_FORMAT_S8) { err = snd_pcm_format_cpu_endian(device_format); if (err == 0) stream_.doByteSwap[mode] = true; else if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error getting format endian-ness (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } } // Set the sample rate. err = snd_pcm_hw_params_set_rate(handle, hw_params, (unsigned int)sampleRate, 0); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting sample rate (%d) on device (%s): %s.", sampleRate, name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Determine the number of channels for this device. We support a possible // minimum device channel number > than the value requested by the user. stream_.nUserChannels[mode] = channels; unsigned int value; err = snd_pcm_hw_params_get_channels_max(hw_params, &value); int device_channels = value; if (err < 0 || device_channels < channels) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: channels (%d) not supported by device (%s).", channels, name); error(RtError::DEBUG_WARNING); return FAILURE; } err = snd_pcm_hw_params_get_channels_min(hw_params, &value); if (err < 0 ) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error getting min channels count on device (%s).", name); error(RtError::DEBUG_WARNING); return FAILURE; } device_channels = value; if (device_channels < channels) device_channels = channels; stream_.nDeviceChannels[mode] = device_channels; // Set the device channels. err = snd_pcm_hw_params_set_channels(handle, hw_params, device_channels); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting channels (%d) on device (%s): %s.", device_channels, name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the buffer number, which in ALSA is referred to as the "period". int dir; unsigned int periods = numberOfBuffers; // Even though the hardware might allow 1 buffer, it won't work reliably. if (periods < 2) periods = 2; err = snd_pcm_hw_params_set_periods_near(handle, hw_params, &periods, &dir); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting periods (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the buffer (or period) size. snd_pcm_uframes_t period_size = *bufferSize; err = snd_pcm_hw_params_set_period_size_near(handle, hw_params, &period_size, &dir); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error setting period size (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } *bufferSize = period_size; // If attempting to setup a duplex stream, the bufferSize parameter // MUST be the same in both directions! if ( stream_.mode == OUTPUT && mode == INPUT && *bufferSize != stream_.bufferSize ) { sprintf( message_, "RtApiAlsa: error setting buffer size for duplex stream on device (%s).", name ); error(RtError::DEBUG_WARNING); return FAILURE; } stream_.bufferSize = *bufferSize; // Install the hardware configuration err = snd_pcm_hw_params(handle, hw_params); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtApiAlsa: error installing hardware configuration (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\nRtApiAlsa: dump hardware params after installation:\n\n"); snd_pcm_hw_params_dump(hw_params, out); #endif // Set the software configuration to fill buffers with zeros and prevent device stopping on xruns. snd_pcm_sw_params_t *sw_params = NULL; snd_pcm_sw_params_alloca( &sw_params ); snd_pcm_sw_params_current( handle, sw_params ); snd_pcm_sw_params_set_start_threshold( handle, sw_params, *bufferSize ); snd_pcm_sw_params_set_stop_threshold( handle, sw_params, 0x7fffffff ); snd_pcm_sw_params_set_silence_threshold( handle, sw_params, 0 ); snd_pcm_sw_params_set_silence_size( handle, sw_params, INT_MAX ); err = snd_pcm_sw_params( handle, sw_params ); if (err < 0) { snd_pcm_close(handle); sprintf(message_, "RtAudio: ALSA error installing software configuration (%s): %s.", name, snd_strerror(err)); error(RtError::DEBUG_WARNING); return FAILURE; } #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\nRtApiAlsa: dump software params after installation:\n\n"); snd_pcm_sw_params_dump(sw_params, out); #endif // Allocate the ApiHandle if necessary and then save. AlsaHandle *apiInfo = 0; if ( stream_.apiHandle == 0 ) { apiInfo = (AlsaHandle *) new AlsaHandle; stream_.apiHandle = (void *) apiInfo; apiInfo->handles[0] = 0; apiInfo->handles[1] = 0; } else { apiInfo = (AlsaHandle *) stream_.apiHandle; } apiInfo->handles[mode] = handle; // Set flags for buffer conversion stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode]) stream_.doConvertBuffer[mode] = true; // Allocate necessary internal buffers if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); if (apiInfo->tempBuffer) free(apiInfo->tempBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); apiInfo->tempBuffer = (char *) calloc(buffer_bytes, 1); if ( stream_.userBuffer == NULL || apiInfo->tempBuffer == NULL ) { sprintf(message_, "RtApiAlsa: error allocating user buffer memory (%s).", devices_[device].name.c_str()); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiAlsa: error allocating device buffer memory (%s).", devices_[device].name.c_str()); goto error; } } } stream_.device[mode] = device; stream_.state = STREAM_STOPPED; if ( stream_.mode == OUTPUT && mode == INPUT ) { // We had already set up an output stream. stream_.mode = DUPLEX; // Link the streams if possible. apiInfo->synchronized = false; if (snd_pcm_link( apiInfo->handles[0], apiInfo->handles[1] ) == 0) apiInfo->synchronized = true; else { sprintf(message_, "RtApiAlsa: unable to synchronize input and output streams (%s).", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); } } else stream_.mode = mode; stream_.nBuffers = periods; stream_.sampleRate = sampleRate; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; khandles[0]) snd_pcm_close(apiInfo->handles[0]); if (apiInfo->handles[1]) snd_pcm_close(apiInfo->handles[1]); if ( apiInfo->tempBuffer ) free(apiInfo->tempBuffer); delete apiInfo; stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } error(RtError::DEBUG_WARNING); return FAILURE; } void RtApiAlsa :: closeStream() { // We don't want an exception to be thrown here because this // function is called by our class destructor. So, do our own // stream check. if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtApiAlsa::closeStream(): no open stream to close!"); error(RtError::WARNING); return; } AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; if (stream_.state == STREAM_RUNNING) { if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) snd_pcm_drop(apiInfo->handles[0]); if (stream_.mode == INPUT || stream_.mode == DUPLEX) snd_pcm_drop(apiInfo->handles[1]); stream_.state = STREAM_STOPPED; } if (stream_.callbackInfo.usingCallback) { stream_.callbackInfo.usingCallback = false; pthread_join(stream_.callbackInfo.thread, NULL); } if (apiInfo) { if (apiInfo->handles[0]) snd_pcm_close(apiInfo->handles[0]); if (apiInfo->handles[1]) snd_pcm_close(apiInfo->handles[1]); free(apiInfo->tempBuffer); delete apiInfo; stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } if (stream_.deviceBuffer) { free(stream_.deviceBuffer); stream_.deviceBuffer = 0; } stream_.mode = UNINITIALIZED; } // Pump a bunch of zeros into the output buffer. This is needed only when we // are doing duplex operations. bool RtApiAlsa :: primeOutputBuffer() { int err; char *buffer; int channels; snd_pcm_t **handle; RtAudioFormat format; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; handle = (snd_pcm_t **) apiInfo->handles; if (stream_.mode == DUPLEX) { // Setup parameters and do buffer conversion if necessary. if ( stream_.doConvertBuffer[0] ) { convertBuffer( stream_.deviceBuffer, apiInfo->tempBuffer, stream_.convertInfo[0] ); channels = stream_.nDeviceChannels[0]; format = stream_.deviceFormat[0]; } else { channels = stream_.nUserChannels[0]; format = stream_.userFormat; } buffer = new char[stream_.bufferSize * formatBytes(format) * channels]; bzero(buffer, stream_.bufferSize * formatBytes(format) * channels); for (int i=0; ihandles; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { state = snd_pcm_state(handle[0]); if (state != SND_PCM_STATE_PREPARED) { err = snd_pcm_prepare(handle[0]); if (err < 0) { sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.", devices_[stream_.device[0]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } // Reprime output buffer if needed if ( (stream_.mode == DUPLEX) && ( !primeOutputBuffer() ) ) { MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } } if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) { state = snd_pcm_state(handle[1]); if (state != SND_PCM_STATE_PREPARED) { err = snd_pcm_prepare(handle[1]); if (err < 0) { sprintf(message_, "RtApiAlsa: error preparing pcm device (%s): %s.", devices_[stream_.device[1]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } } if ( (stream_.mode == DUPLEX) && ( !primeOutputBuffer() ) ) { MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } stream_.state = STREAM_RUNNING; MUTEX_UNLOCK(&stream_.mutex); } void RtApiAlsa :: stopStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); int err; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { err = snd_pcm_drain(handle[0]); if (err < 0) { sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.", devices_[stream_.device[0]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) { err = snd_pcm_drain(handle[1]); if (err < 0) { sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.", devices_[stream_.device[1]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } MUTEX_UNLOCK(&stream_.mutex); } void RtApiAlsa :: abortStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); int err; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { err = snd_pcm_drop(handle[0]); if (err < 0) { sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.", devices_[stream_.device[0]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } if ( (stream_.mode == INPUT || stream_.mode == DUPLEX) && !apiInfo->synchronized ) { err = snd_pcm_drop(handle[1]); if (err < 0) { sprintf(message_, "RtApiAlsa: error draining pcm device (%s): %s.", devices_[stream_.device[1]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } MUTEX_UNLOCK(&stream_.mutex); } int RtApiAlsa :: streamWillBlock() { verifyStream(); if (stream_.state == STREAM_STOPPED) return 0; MUTEX_LOCK(&stream_.mutex); int err = 0, frames = 0; AlsaHandle *apiInfo = (AlsaHandle *) stream_.apiHandle; snd_pcm_t **handle = (snd_pcm_t **) apiInfo->handles; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { err = snd_pcm_avail_update(handle[0]); if (err < 0) { sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.", devices_[stream_.device[0]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } } frames = err; if (stream_.mode == INPUT || stream_.mode == DUPLEX) { err = snd_pcm_avail_update(handle[1]); if (err < 0) { sprintf(message_, "RtApiAlsa: error getting available frames for device (%s): %s.", devices_[stream_.device[1]].name.c_str(), snd_strerror(err)); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } if (frames > err) frames = err; } frames = stream_.bufferSize - frames; if (frames < 0) frames = 0; MUTEX_UNLOCK(&stream_.mutex); return frames; } void RtApiAlsa :: tickStream() { verifyStream(); int stopStream = 0; if (stream_.state == STREAM_STOPPED) { if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds return; } else if (stream_.callbackInfo.usingCallback) { RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback; stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData); } MUTEX_LOCK(&stream_.mutex); // The state might change while waiting on a mutex. if (stream_.state == STREAM_STOPPED) goto unlock; int err; char *buffer; int channels; AlsaHandle *apiInfo; snd_pcm_t **handle; RtAudioFormat format; apiInfo = (AlsaHandle *) stream_.apiHandle; handle = (snd_pcm_t **) apiInfo->handles; if ( stream_.mode == DUPLEX ) { // In duplex mode, we need to make the snd_pcm_read call before // the snd_pcm_write call in order to avoid under/over runs. So, // copy the userData to our temporary buffer. int bufferBytes; bufferBytes = stream_.bufferSize * stream_.nUserChannels[0] * formatBytes(stream_.userFormat); memcpy( apiInfo->tempBuffer, stream_.userBuffer, bufferBytes ); } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { // Setup parameters. if (stream_.doConvertBuffer[1]) { buffer = stream_.deviceBuffer; channels = stream_.nDeviceChannels[1]; format = stream_.deviceFormat[1]; } else { buffer = stream_.userBuffer; channels = stream_.nUserChannels[1]; format = stream_.userFormat; } // Read samples from device in interleaved/non-interleaved format. if (stream_.deInterleave[1]) { void *bufs[channels]; size_t offset = stream_.bufferSize * formatBytes(format); for (int i=0; itempBuffer, stream_.convertInfo[0] ); else convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] ); channels = stream_.nDeviceChannels[0]; format = stream_.deviceFormat[0]; } else { if ( stream_.mode == DUPLEX ) buffer = apiInfo->tempBuffer; else buffer = stream_.userBuffer; channels = stream_.nUserChannels[0]; format = stream_.userFormat; } // Do byte swapping if necessary. if (stream_.doByteSwap[0]) byteSwapBuffer(buffer, stream_.bufferSize * channels, format); // Write samples to device in interleaved/non-interleaved format. if (stream_.deInterleave[0]) { void *bufs[channels]; size_t offset = stream_.bufferSize * formatBytes(format); for (int i=0; istopStream(); } void RtApiAlsa :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; if ( info->usingCallback ) { sprintf(message_, "RtApiAlsa: A callback is already set for this stream!"); error(RtError::WARNING); return; } info->callback = (void *) callback; info->userData = userData; info->usingCallback = true; info->object = (void *) this; // Set the thread attributes for joinable and realtime scheduling // priority. The higher priority will only take affect if the // program is run as root or suid. pthread_attr_t attr; pthread_attr_init(&attr); pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); pthread_attr_setschedpolicy(&attr, SCHED_RR); int err = pthread_create(&info->thread, &attr, alsaCallbackHandler, &stream_.callbackInfo); pthread_attr_destroy(&attr); if (err) { info->usingCallback = false; sprintf(message_, "RtApiAlsa: error starting callback thread!"); error(RtError::THREAD_ERROR); } } void RtApiAlsa :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; pthread_join(stream_.callbackInfo.thread, NULL); stream_.callbackInfo.thread = 0; stream_.callbackInfo.callback = NULL; stream_.callbackInfo.userData = NULL; MUTEX_UNLOCK(&stream_.mutex); } } extern "C" void *alsaCallbackHandler(void *ptr) { CallbackInfo *info = (CallbackInfo *) ptr; RtApiAlsa *object = (RtApiAlsa *) info->object; bool *usingCallback = &info->usingCallback; while ( *usingCallback ) { try { object->tickStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiAlsa: callback thread error (%s) ... closing thread.\n\n", exception.getMessageString()); break; } } pthread_exit(NULL); } //******************** End of __LINUX_ALSA__ *********************// #endif #if defined(__WINDOWS_ASIO__) // ASIO API on Windows // The ASIO API is designed around a callback scheme, so this // implementation is similar to that used for OS-X CoreAudio and Linux // Jack. The primary constraint with ASIO is that it only allows // access to a single driver at a time. Thus, it is not possible to // have more than one simultaneous RtAudio stream. // // This implementation also requires a number of external ASIO files // and a few global variables. The ASIO callback scheme does not // allow for the passing of user data, so we must create a global // pointer to our callbackInfo structure. // // On unix systems, we make use of a pthread condition variable. // Since there is no equivalent in Windows, I hacked something based // on information found in // http://www.cs.wustl.edu/~schmidt/win32-cv-1.html. #include "asio/asiosys.h" #include "asio/asio.h" #include "asio/iasiothiscallresolver.h" #include "asio/asiodrivers.h" #include AsioDrivers drivers; ASIOCallbacks asioCallbacks; ASIODriverInfo driverInfo; CallbackInfo *asioCallbackInfo; struct AsioHandle { bool stopStream; ASIOBufferInfo *bufferInfos; HANDLE condition; AsioHandle() :stopStream(false), bufferInfos(0) {} }; static const char* GetAsioErrorString( ASIOError result ) { struct Messages { ASIOError value; const char*message; }; static Messages m[] = { { ASE_NotPresent, "Hardware input or output is not present or available." }, { ASE_HWMalfunction, "Hardware is malfunctioning." }, { ASE_InvalidParameter, "Invalid input parameter." }, { ASE_InvalidMode, "Invalid mode." }, { ASE_SPNotAdvancing, "Sample position not advancing." }, { ASE_NoClock, "Sample clock or rate cannot be determined or is not present." }, { ASE_NoMemory, "Not enough memory to complete the request." } }; for (unsigned int i = 0; i < sizeof(m)/sizeof(m[0]); ++i) if (m[i].value == result) return m[i].message; return "Unknown error."; } RtApiAsio :: RtApiAsio() { this->coInitialized = false; this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiAsio: no Windows ASIO audio drivers found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiAsio :: ~RtApiAsio() { if ( stream_.mode != UNINITIALIZED ) closeStream(); if ( coInitialized ) CoUninitialize(); } void RtApiAsio :: initialize(void) { // ASIO cannot run on a multi-threaded appartment. You can call CoInitialize beforehand, but it must be // for appartment threading (in which case, CoInitilialize will return S_FALSE here). coInitialized = false; HRESULT hr = CoInitialize(NULL); if ( FAILED(hr) ) { sprintf(message_,"RtApiAsio: ASIO requires a single-threaded appartment. Call CoInitializeEx(0,COINIT_APARTMENTTHREADED)"); } coInitialized = true; nDevices_ = drivers.asioGetNumDev(); if (nDevices_ <= 0) return; // Create device structures and write device driver names to each. RtApiDevice device; char name[128]; for (int i=0; iname.c_str() ) ) { sprintf(message_, "RtApiAsio: error loading driver (%s).", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } ASIOError result = ASIOInit( &driverInfo ); if ( result != ASE_OK ) { sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", GetAsioErrorString(result), info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Determine the device channel information. long inputChannels, outputChannels; result = ASIOGetChannels( &inputChannels, &outputChannels ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).", GetAsioErrorString(result), info->name.c_str()); error(RtError::DEBUG_WARNING); return; } info->maxOutputChannels = outputChannels; if ( outputChannels > 0 ) info->minOutputChannels = 1; info->maxInputChannels = inputChannels; if ( inputChannels > 0 ) info->minInputChannels = 1; // If device opens for both playback and capture, we determine the channels. if (info->maxOutputChannels > 0 && info->maxInputChannels > 0) { info->hasDuplexSupport = true; info->maxDuplexChannels = (info->maxOutputChannels > info->maxInputChannels) ? info->maxInputChannels : info->maxOutputChannels; info->minDuplexChannels = (info->minOutputChannels > info->minInputChannels) ? info->minInputChannels : info->minOutputChannels; } // Determine the supported sample rates. info->sampleRates.clear(); for (unsigned int i=0; isampleRates.push_back( SAMPLE_RATES[i] ); } if (info->sampleRates.size() == 0) { drivers.removeCurrentDriver(); sprintf( message_, "RtApiAsio: No supported sample rates found for driver (%s).", info->name.c_str() ); error(RtError::DEBUG_WARNING); return; } // Determine supported data types ... just check first channel and assume rest are the same. ASIOChannelInfo channelInfo; channelInfo.channel = 0; channelInfo.isInput = true; if ( info->maxInputChannels <= 0 ) channelInfo.isInput = false; result = ASIOGetChannelInfo( &channelInfo ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: error (%s) getting driver (%s) channel information.", GetAsioErrorString(result), info->name.c_str()); error(RtError::DEBUG_WARNING); return; } if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) info->nativeFormats |= RTAUDIO_SINT16; else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) info->nativeFormats |= RTAUDIO_SINT32; else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) info->nativeFormats |= RTAUDIO_FLOAT32; else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) info->nativeFormats |= RTAUDIO_FLOAT64; // Check that we have at least one supported format. if (info->nativeFormats == 0) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } info->probed = true; drivers.removeCurrentDriver(); } void bufferSwitch(long index, ASIOBool processNow) { RtApiAsio *object = (RtApiAsio *) asioCallbackInfo->object; try { object->callbackEvent( index ); } catch (RtError &exception) { fprintf(stderr, "\nRtApiAsio: callback handler error (%s)!\n\n", exception.getMessageString()); return; } return; } void sampleRateChanged(ASIOSampleRate sRate) { // The ASIO documentation says that this usually only happens during // external sync. Audio processing is not stopped by the driver, // actual sample rate might not have even changed, maybe only the // sample rate status of an AES/EBU or S/PDIF digital input at the // audio device. RtAudio *object = (RtAudio *) asioCallbackInfo->object; try { object->stopStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiAsio: sampleRateChanged() error (%s)!\n\n", exception.getMessageString()); return; } fprintf(stderr, "\nRtApiAsio: driver reports sample rate changed to %d ... stream stopped!!!", (int) sRate); } long asioMessages(long selector, long value, void* message, double* opt) { long ret = 0; switch(selector) { case kAsioSelectorSupported: if(value == kAsioResetRequest || value == kAsioEngineVersion || value == kAsioResyncRequest || value == kAsioLatenciesChanged // The following three were added for ASIO 2.0, you don't // necessarily have to support them. || value == kAsioSupportsTimeInfo || value == kAsioSupportsTimeCode || value == kAsioSupportsInputMonitor) ret = 1L; break; case kAsioResetRequest: // Defer the task and perform the reset of the driver during the // next "safe" situation. You cannot reset the driver right now, // as this code is called from the driver. Reset the driver is // done by completely destruct is. I.e. ASIOStop(), // ASIODisposeBuffers(), Destruction Afterwards you initialize the // driver again. fprintf(stderr, "\nRtApiAsio: driver reset requested!!!"); ret = 1L; break; case kAsioResyncRequest: // This informs the application that the driver encountered some // non-fatal data loss. It is used for synchronization purposes // of different media. Added mainly to work around the Win16Mutex // problems in Windows 95/98 with the Windows Multimedia system, // which could lose data because the Mutex was held too long by // another thread. However a driver can issue it in other // situations, too. fprintf(stderr, "\nRtApiAsio: driver resync requested!!!"); ret = 1L; break; case kAsioLatenciesChanged: // This will inform the host application that the drivers were // latencies changed. Beware, it this does not mean that the // buffer sizes have changed! You might need to update internal // delay data. fprintf(stderr, "\nRtApiAsio: driver latency may have changed!!!"); ret = 1L; break; case kAsioEngineVersion: // Return the supported ASIO version of the host application. If // a host application does not implement this selector, ASIO 1.0 // is assumed by the driver. ret = 2L; break; case kAsioSupportsTimeInfo: // Informs the driver whether the // asioCallbacks.bufferSwitchTimeInfo() callback is supported. // For compatibility with ASIO 1.0 drivers the host application // should always support the "old" bufferSwitch method, too. ret = 0; break; case kAsioSupportsTimeCode: // Informs the driver wether application is interested in time // code info. If an application does not need to know about time // code, the driver has less work to do. ret = 0; break; } return ret; } bool RtApiAsio :: probeDeviceOpen(int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers) { // For ASIO, a duplex stream MUST use the same driver. if ( mode == INPUT && stream_.mode == OUTPUT && stream_.device[0] != device ) { sprintf(message_, "RtApiAsio: duplex stream must use the same device for input and output."); error(RtError::WARNING); return FAILURE; } // Only load the driver once for duplex stream. ASIOError result; if ( mode != INPUT || stream_.mode != OUTPUT ) { if ( !drivers.loadDriver( (char *)devices_[device].name.c_str() ) ) { sprintf(message_, "RtApiAsio: error loading driver (%s).", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } result = ASIOInit( &driverInfo ); if ( result != ASE_OK ) { sprintf(message_, "RtApiAsio: error (%s) initializing driver (%s).", GetAsioErrorString(result), devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } } // Check the device channel count. long inputChannels, outputChannels; result = ASIOGetChannels( &inputChannels, &outputChannels ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: error (%s) getting input/output channel count (%s).", GetAsioErrorString(result), devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } if ( ( mode == OUTPUT && channels > outputChannels) || ( mode == INPUT && channels > inputChannels) ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) does not support requested channel count (%d).", devices_[device].name.c_str(), channels); error(RtError::DEBUG_WARNING); return FAILURE; } stream_.nDeviceChannels[mode] = channels; stream_.nUserChannels[mode] = channels; // Verify the sample rate is supported. result = ASIOCanSampleRate( (ASIOSampleRate) sampleRate ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) does not support requested sample rate (%d).", devices_[device].name.c_str(), sampleRate); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the sample rate. result = ASIOSetSampleRate( (ASIOSampleRate) sampleRate ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) error setting sample rate (%d).", devices_[device].name.c_str(), sampleRate); error(RtError::DEBUG_WARNING); return FAILURE; } // Determine the driver data type. ASIOChannelInfo channelInfo; channelInfo.channel = 0; if ( mode == OUTPUT ) channelInfo.isInput = false; else channelInfo.isInput = true; result = ASIOGetChannelInfo( &channelInfo ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) error getting data format.", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } // Assuming WINDOWS host is always little-endian. stream_.doByteSwap[mode] = false; stream_.userFormat = format; stream_.deviceFormat[mode] = 0; if ( channelInfo.type == ASIOSTInt16MSB || channelInfo.type == ASIOSTInt16LSB ) { stream_.deviceFormat[mode] = RTAUDIO_SINT16; if ( channelInfo.type == ASIOSTInt16MSB ) stream_.doByteSwap[mode] = true; } else if ( channelInfo.type == ASIOSTInt32MSB || channelInfo.type == ASIOSTInt32LSB ) { stream_.deviceFormat[mode] = RTAUDIO_SINT32; if ( channelInfo.type == ASIOSTInt32MSB ) stream_.doByteSwap[mode] = true; } else if ( channelInfo.type == ASIOSTFloat32MSB || channelInfo.type == ASIOSTFloat32LSB ) { stream_.deviceFormat[mode] = RTAUDIO_FLOAT32; if ( channelInfo.type == ASIOSTFloat32MSB ) stream_.doByteSwap[mode] = true; } else if ( channelInfo.type == ASIOSTFloat64MSB || channelInfo.type == ASIOSTFloat64LSB ) { stream_.deviceFormat[mode] = RTAUDIO_FLOAT64; if ( channelInfo.type == ASIOSTFloat64MSB ) stream_.doByteSwap[mode] = true; } if ( stream_.deviceFormat[mode] == 0 ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: driver (%s) data format not supported by RtAudio.", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the buffer size. For a duplex stream, this will end up // setting the buffer size based on the input constraints, which // should be ok. long minSize, maxSize, preferSize, granularity; result = ASIOGetBufferSize( &minSize, &maxSize, &preferSize, &granularity ); if ( result != ASE_OK ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: error (%s) on driver (%s) error getting buffer size.", GetAsioErrorString(result), devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } if ( *bufferSize < minSize ) *bufferSize = minSize; else if ( *bufferSize > maxSize ) *bufferSize = maxSize; else if ( granularity == -1 ) { // Make sure bufferSize is a power of two. double power = log10( (double) *bufferSize ) / log10( 2.0 ); *bufferSize = (int) pow( 2.0, floor(power+0.5) ); if ( *bufferSize < minSize ) *bufferSize = minSize; else if ( *bufferSize > maxSize ) *bufferSize = maxSize; else *bufferSize = preferSize; } else if (granularity != 0) { // to an even multiple of granularity, rounding up. *bufferSize = (*bufferSize + granularity-1)/granularity*granularity; } if ( mode == INPUT && stream_.mode == OUTPUT && stream_.bufferSize != *bufferSize ) std::cerr << "Possible input/output buffersize discrepancy!" << std::endl; stream_.bufferSize = *bufferSize; stream_.nBuffers = 2; // ASIO always uses deinterleaved channels. stream_.deInterleave[mode] = true; // Allocate, if necessary, our AsioHandle structure for the stream. AsioHandle *handle = (AsioHandle *) stream_.apiHandle; if ( handle == 0 ) { handle = (AsioHandle *) calloc(1, sizeof(AsioHandle)); if ( handle == NULL ) { drivers.removeCurrentDriver(); sprintf(message_, "RtApiAsio: error allocating AsioHandle memory (%s).", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } handle->bufferInfos = 0; // Create a manual-reset event. handle->condition = CreateEvent( NULL, // no security TRUE, // manual-reset FALSE, // non-signaled initially NULL ); // unnamed stream_.apiHandle = (void *) handle; } // Create the ASIO internal buffers. Since RtAudio sets up input // and output separately, we'll have to dispose of previously // created output buffers for a duplex stream. if ( mode == INPUT && stream_.mode == OUTPUT ) { ASIODisposeBuffers(); if ( handle->bufferInfos ) free( handle->bufferInfos ); } // Allocate, initialize, and save the bufferInfos in our stream callbackInfo structure. int i, nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1]; handle->bufferInfos = (ASIOBufferInfo *) malloc( nChannels * sizeof(ASIOBufferInfo) ); if (handle->bufferInfos == NULL) { sprintf(message_, "RtApiAsio: error allocating bufferInfo memory (%s).", devices_[device].name.c_str()); goto error; } ASIOBufferInfo *infos; infos = handle->bufferInfos; for ( i=0; iisInput = ASIOFalse; infos->channelNum = i; infos->buffers[0] = infos->buffers[1] = 0; } for ( i=0; iisInput = ASIOTrue; infos->channelNum = i; infos->buffers[0] = infos->buffers[1] = 0; } // Set up the ASIO callback structure and create the ASIO data buffers. asioCallbacks.bufferSwitch = &bufferSwitch; asioCallbacks.sampleRateDidChange = &sampleRateChanged; asioCallbacks.asioMessage = &asioMessages; asioCallbacks.bufferSwitchTimeInfo = NULL; result = ASIOCreateBuffers( handle->bufferInfos, nChannels, stream_.bufferSize, &asioCallbacks); if ( result != ASE_OK ) { sprintf(message_, "RtApiAsio: eror (%s) on driver (%s) error creating buffers.", GetAsioErrorString(result), devices_[device].name.c_str()); goto error; } // Set flags for buffer conversion. stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] > 1 && stream_.deInterleave[mode]) stream_.doConvertBuffer[mode] = true; // Allocate necessary internal buffers if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { sprintf(message_, "RtApiAsio: error (%s) allocating user buffer memory (%s).", GetAsioErrorString(result), devices_[device].name.c_str()); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiAsio: error (%s) allocating device buffer memory (%s).", GetAsioErrorString(result), devices_[device].name.c_str()); goto error; } } } stream_.device[mode] = device; stream_.state = STREAM_STOPPED; if ( stream_.mode == OUTPUT && mode == INPUT ) // We had already set up an output stream. stream_.mode = DUPLEX; else stream_.mode = mode; stream_.sampleRate = sampleRate; asioCallbackInfo = &stream_.callbackInfo; stream_.callbackInfo.object = (void *) this; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; kcondition ); if ( handle->bufferInfos ) free( handle->bufferInfos ); free( handle ); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } error(RtError::DEBUG_WARNING); return FAILURE; } void RtApiAsio :: closeStream() { // We don't want an exception to be thrown here because this // function is called by our class destructor. So, do our own // streamId check. if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtApiAsio::closeStream(): no open stream to close!"); error(RtError::WARNING); return; } if (stream_.state == STREAM_RUNNING) ASIOStop(); ASIODisposeBuffers(); drivers.removeCurrentDriver(); AsioHandle *handle = (AsioHandle *) stream_.apiHandle; if ( handle ) { CloseHandle( handle->condition ); if ( handle->bufferInfos ) free( handle->bufferInfos ); free( handle ); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } if (stream_.deviceBuffer) { free(stream_.deviceBuffer); stream_.deviceBuffer = 0; } stream_.mode = UNINITIALIZED; } void RtApiAsio :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); if ( stream_.callbackInfo.usingCallback ) { sprintf(message_, "RtApiAsio: A callback is already set for this stream!"); error(RtError::WARNING); return; } stream_.callbackInfo.callback = (void *) callback; stream_.callbackInfo.userData = userData; stream_.callbackInfo.usingCallback = true; } void RtApiAsio :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; stream_.callbackInfo.userData = NULL; stream_.state = STREAM_STOPPED; stream_.callbackInfo.callback = NULL; MUTEX_UNLOCK(&stream_.mutex); } } void RtApiAsio :: startStream() { verifyStream(); if (stream_.state == STREAM_RUNNING) return; MUTEX_LOCK(&stream_.mutex); ASIOError result = ASIOStart(); if ( result != ASE_OK ) { sprintf(message_, "RtApiAsio: error starting device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } AsioHandle *handle = (AsioHandle *) stream_.apiHandle; handle->stopStream = false; stream_.state = STREAM_RUNNING; MUTEX_UNLOCK(&stream_.mutex); } void RtApiAsio :: stopStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); ASIOError result = ASIOStop(); if ( result != ASE_OK ) { sprintf(message_, "RtApiAsio: error stopping device (%s).", devices_[stream_.device[0]].name.c_str()); MUTEX_UNLOCK(&stream_.mutex); error(RtError::DRIVER_ERROR); } MUTEX_UNLOCK(&stream_.mutex); } void RtApiAsio :: abortStream() { stopStream(); } void RtApiAsio :: tickStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; if (stream_.callbackInfo.usingCallback) { sprintf(message_, "RtApiAsio: tickStream() should not be used when a callback function is set!"); error(RtError::WARNING); return; } AsioHandle *handle = (AsioHandle *) stream_.apiHandle; MUTEX_LOCK(&stream_.mutex); // Release the stream_mutex here and wait for the event // to become signaled by the callback process. MUTEX_UNLOCK(&stream_.mutex); WaitForMultipleObjects(1, &handle->condition, FALSE, INFINITE); ResetEvent( handle->condition ); } void RtApiAsio :: callbackEvent(long bufferIndex) { verifyStream(); if (stream_.state == STREAM_STOPPED) return; CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; AsioHandle *handle = (AsioHandle *) stream_.apiHandle; if ( info->usingCallback && handle->stopStream ) { // Check if the stream should be stopped (via the previous user // callback return value). We stop the stream here, rather than // after the function call, so that output data can first be // processed. this->stopStream(); return; } MUTEX_LOCK(&stream_.mutex); // Invoke user callback first, to get fresh output data. if ( info->usingCallback ) { RtAudioCallback callback = (RtAudioCallback) info->callback; if ( callback(stream_.userBuffer, stream_.bufferSize, info->userData) ) handle->stopStream = true; } int bufferBytes, j; int nChannels = stream_.nDeviceChannels[0] + stream_.nDeviceChannels[1]; if ( stream_.mode == OUTPUT || stream_.mode == DUPLEX ) { bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[0]); if (stream_.doConvertBuffer[0]) { convertBuffer( stream_.deviceBuffer, stream_.userBuffer, stream_.convertInfo[0] ); if ( stream_.doByteSwap[0] ) byteSwapBuffer(stream_.deviceBuffer, stream_.bufferSize * stream_.nDeviceChannels[0], stream_.deviceFormat[0]); // Always de-interleave ASIO output data. j = 0; for ( int i=0; ibufferInfos[i].isInput != ASIOTrue ) memcpy(handle->bufferInfos[i].buffers[bufferIndex], &stream_.deviceBuffer[j++*bufferBytes], bufferBytes ); } } else { // single channel only if (stream_.doByteSwap[0]) byteSwapBuffer(stream_.userBuffer, stream_.bufferSize * stream_.nUserChannels[0], stream_.userFormat); for ( int i=0; ibufferInfos[i].isInput != ASIOTrue ) { memcpy(handle->bufferInfos[i].buffers[bufferIndex], stream_.userBuffer, bufferBytes ); break; } } } } if ( stream_.mode == INPUT || stream_.mode == DUPLEX ) { bufferBytes = stream_.bufferSize * formatBytes(stream_.deviceFormat[1]); if (stream_.doConvertBuffer[1]) { // Always interleave ASIO input data. j = 0; for ( int i=0; ibufferInfos[i].isInput == ASIOTrue ) memcpy(&stream_.deviceBuffer[j++*bufferBytes], handle->bufferInfos[i].buffers[bufferIndex], bufferBytes ); } if ( stream_.doByteSwap[1] ) byteSwapBuffer(stream_.deviceBuffer, stream_.bufferSize * stream_.nDeviceChannels[1], stream_.deviceFormat[1]); convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] ); } else { // single channel only for ( int i=0; ibufferInfos[i].isInput == ASIOTrue ) { memcpy(stream_.userBuffer, handle->bufferInfos[i].buffers[bufferIndex], bufferBytes ); break; } } if (stream_.doByteSwap[1]) byteSwapBuffer(stream_.userBuffer, stream_.bufferSize * stream_.nUserChannels[1], stream_.userFormat); } } if ( !info->usingCallback ) SetEvent( handle->condition ); // The following call was suggested by Malte Clasen. While the API // documentation indicates it should not be required, some device // drivers apparently do not function correctly without it. ASIOOutputReady(); MUTEX_UNLOCK(&stream_.mutex); } //******************** End of __WINDOWS_ASIO__ *********************// #endif #if defined(__WINDOWS_DS__) // Windows DirectSound API #include #include #define MINIMUM_DEVICE_BUFFER_SIZE 32768 #ifdef _MSC_VER // if Microsoft Visual C++ #pragma comment(lib,"winmm.lib") // then, auto-link winmm.lib. Otherwise, it has to be added manually. #endif static inline DWORD dsPointerDifference(DWORD laterPointer,DWORD earlierPointer,DWORD bufferSize) { if (laterPointer > earlierPointer) return laterPointer-earlierPointer; else return laterPointer-earlierPointer+bufferSize; } static inline DWORD dsPointerBetween(DWORD pointer, DWORD laterPointer,DWORD earlierPointer, DWORD bufferSize) { if (pointer > bufferSize) pointer -= bufferSize; if (laterPointer < earlierPointer) laterPointer += bufferSize; if (pointer < earlierPointer) pointer += bufferSize; return pointer >= earlierPointer && pointer < laterPointer; } #undef GENERATE_DEBUG_LOG // Define this to generate a debug timing log file in c:/rtaudiolog.txt" #ifdef GENERATE_DEBUG_LOG #include "mmsystem.h" #include "fstream" struct TTickRecord { DWORD currentReadPointer, safeReadPointer; DWORD currentWritePointer, safeWritePointer; DWORD readTime, writeTime; DWORD nextWritePointer, nextReadPointer; }; int currentDebugLogEntry = 0; std::vector debugLog(2000); #endif // A structure to hold various information related to the DirectSound // API implementation. struct DsHandle { void *object; void *buffer; UINT bufferPointer; DWORD dsBufferSize; DWORD dsPointerLeadTime; // the number of bytes ahead of the safe pointer to lead by. }; RtApiDs::RtDsStatistics RtApiDs::statistics; // Provides a backdoor hook to monitor for DirectSound read overruns and write underruns. RtApiDs::RtDsStatistics RtApiDs::getDsStatistics() { RtDsStatistics s = statistics; // update the calculated fields. if (s.inputFrameSize != 0) s.latency += s.readDeviceSafeLeadBytes*1.0/s.inputFrameSize / s.sampleRate; if (s.outputFrameSize != 0) s.latency += (s.writeDeviceSafeLeadBytes+ s.writeDeviceBufferLeadBytes)*1.0/s.outputFrameSize / s.sampleRate; return s; } // Declarations for utility functions, callbacks, and structures // specific to the DirectSound implementation. static bool CALLBACK deviceCountCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext); static bool CALLBACK deviceInfoCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext); static bool CALLBACK defaultDeviceCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext); static bool CALLBACK deviceIdCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext); static char* getErrorString(int code); extern "C" unsigned __stdcall callbackHandler(void *ptr); struct enum_info { std::string name; LPGUID id; bool isInput; bool isValid; }; RtApiDs :: RtApiDs() { // Dsound will run both-threaded. If CoInitialize fails, then just // accept whatever the mainline chose for a threading model. coInitialized = false; HRESULT hr = CoInitialize(NULL); if ( !FAILED(hr) ) coInitialized = true; this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiDs: no Windows DirectSound audio devices found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiDs :: ~RtApiDs() { if (coInitialized) CoUninitialize(); // balanced call. if ( stream_.mode != UNINITIALIZED ) closeStream(); } int RtApiDs :: getDefaultInputDevice(void) { enum_info info; // Enumerate through devices to find the default output. HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)defaultDeviceCallback, &info); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Error performing default input device enumeration: %s.", getErrorString(result)); error(RtError::WARNING); return 0; } for ( int i=0; i info(count); for (i=0; iname; dsinfo.isValid = false; // Enumerate through input devices to find the id (if it exists). HRESULT result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.", getErrorString(result)); error(RtError::DEBUG_WARNING); return; } // Do capture probe first. if ( dsinfo.isValid == false ) goto playback_probe; LPDIRECTSOUNDCAPTURE input; result = DirectSoundCaptureCreate( dsinfo.id, &input, NULL ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.", info->name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); goto playback_probe; } DSCCAPS in_caps; in_caps.dwSize = sizeof(in_caps); result = input->GetCaps( &in_caps ); if ( FAILED(result) ) { input->Release(); sprintf(message_, "RtApiDs: Could not get capture capabilities (%s): %s.", info->name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); goto playback_probe; } // Get input channel information. info->minInputChannels = 1; info->maxInputChannels = in_caps.dwChannels; // Get sample rate and format information. info->sampleRates.clear(); if( in_caps.dwChannels == 2 ) { if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->nativeFormats |= RTAUDIO_SINT8; if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->nativeFormats |= RTAUDIO_SINT8; if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->nativeFormats |= RTAUDIO_SINT8; if ( info->nativeFormats & RTAUDIO_SINT16 ) { if( in_caps.dwFormats & WAVE_FORMAT_1S16 ) info->sampleRates.push_back( 11025 ); if( in_caps.dwFormats & WAVE_FORMAT_2S16 ) info->sampleRates.push_back( 22050 ); if( in_caps.dwFormats & WAVE_FORMAT_4S16 ) info->sampleRates.push_back( 44100 ); } else if ( info->nativeFormats & RTAUDIO_SINT8 ) { if( in_caps.dwFormats & WAVE_FORMAT_1S08 ) info->sampleRates.push_back( 11025 ); if( in_caps.dwFormats & WAVE_FORMAT_2S08 ) info->sampleRates.push_back( 22050 ); if( in_caps.dwFormats & WAVE_FORMAT_4S08 ) info->sampleRates.push_back( 44100 ); } } else if ( in_caps.dwChannels == 1 ) { if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->nativeFormats |= RTAUDIO_SINT16; if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->nativeFormats |= RTAUDIO_SINT8; if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->nativeFormats |= RTAUDIO_SINT8; if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->nativeFormats |= RTAUDIO_SINT8; if ( info->nativeFormats & RTAUDIO_SINT16 ) { if( in_caps.dwFormats & WAVE_FORMAT_1M16 ) info->sampleRates.push_back( 11025 ); if( in_caps.dwFormats & WAVE_FORMAT_2M16 ) info->sampleRates.push_back( 22050 ); if( in_caps.dwFormats & WAVE_FORMAT_4M16 ) info->sampleRates.push_back( 44100 ); } else if ( info->nativeFormats & RTAUDIO_SINT8 ) { if( in_caps.dwFormats & WAVE_FORMAT_1M08 ) info->sampleRates.push_back( 11025 ); if( in_caps.dwFormats & WAVE_FORMAT_2M08 ) info->sampleRates.push_back( 22050 ); if( in_caps.dwFormats & WAVE_FORMAT_4M08 ) info->sampleRates.push_back( 44100 ); } } else info->minInputChannels = 0; // technically, this would be an error input->Release(); playback_probe: dsinfo.isValid = false; // Enumerate through output devices to find the id (if it exists). result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.", getErrorString(result)); error(RtError::DEBUG_WARNING); return; } // Now do playback probe. if ( dsinfo.isValid == false ) goto check_parameters; LPDIRECTSOUND output; DSCAPS out_caps; result = DirectSoundCreate( dsinfo.id, &output, NULL ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.", info->name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); goto check_parameters; } out_caps.dwSize = sizeof(out_caps); result = output->GetCaps( &out_caps ); if ( FAILED(result) ) { output->Release(); sprintf(message_, "RtApiDs: Could not get playback capabilities (%s): %s.", info->name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); goto check_parameters; } // Get output channel information. info->minOutputChannels = 1; info->maxOutputChannels = ( out_caps.dwFlags & DSCAPS_PRIMARYSTEREO ) ? 2 : 1; // Get sample rate information. Use capture device rate information // if it exists. if ( info->sampleRates.size() == 0 ) { info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate ); if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate ) info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate ); } else { // Check input rates against output rate range. If there's an // inconsistency (such as a duplex-capable device which reports a // single output rate of 48000 Hz), we'll go with the output // rate(s) since the DirectSoundCapture API is stupid and broken. // Note that the probed sample rate values are NOT used when // opening the device. Thanks to Tue Andersen for reporting this. if ( info->sampleRates.back() < (int) out_caps.dwMinSecondarySampleRate ) { info->sampleRates.clear(); info->sampleRates.push_back( (int) out_caps.dwMinSecondarySampleRate ); if ( out_caps.dwMaxSecondarySampleRate > out_caps.dwMinSecondarySampleRate ) info->sampleRates.push_back( (int) out_caps.dwMaxSecondarySampleRate ); } else { for ( int i=info->sampleRates.size()-1; i>=0; i-- ) { if ( (unsigned int) info->sampleRates[i] > out_caps.dwMaxSecondarySampleRate ) info->sampleRates.erase( info->sampleRates.begin() + i ); } while ( info->sampleRates.size() > 0 && ((unsigned int) info->sampleRates[0] < out_caps.dwMinSecondarySampleRate) ) { info->sampleRates.erase( info->sampleRates.begin() ); } } } // Get format information. if ( out_caps.dwFlags & DSCAPS_PRIMARY16BIT ) info->nativeFormats |= RTAUDIO_SINT16; if ( out_caps.dwFlags & DSCAPS_PRIMARY8BIT ) info->nativeFormats |= RTAUDIO_SINT8; output->Release(); check_parameters: if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) { sprintf(message_, "RtApiDs: no reported input or output channels for device (%s).", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } if ( info->sampleRates.size() == 0 || info->nativeFormats == 0 ) { sprintf(message_, "RtApiDs: no reported sample rates or data formats for device (%s).", info->name.c_str()); error(RtError::DEBUG_WARNING); return; } // Determine duplex status. if (info->maxInputChannels < info->maxOutputChannels) info->maxDuplexChannels = info->maxInputChannels; else info->maxDuplexChannels = info->maxOutputChannels; if (info->minInputChannels < info->minOutputChannels) info->minDuplexChannels = info->minInputChannels; else info->minDuplexChannels = info->minOutputChannels; if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true; else info->hasDuplexSupport = false; info->probed = true; return; } bool RtApiDs :: probeDeviceOpen( int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers) { HRESULT result; HWND hWnd = GetForegroundWindow(); // According to a note in PortAudio, using GetDesktopWindow() // instead of GetForegroundWindow() is supposed to avoid problems // that occur when the application's window is not the foreground // window. Also, if the application window closes before the // DirectSound buffer, DirectSound can crash. However, for console // applications, no sound was produced when using GetDesktopWindow(). long buffer_size; LPVOID audioPtr; DWORD dataLen; int nBuffers; // Check the numberOfBuffers parameter and limit the lowest value to // two. This is a judgement call and a value of two is probably too // low for capture, but it should work for playback. if (numberOfBuffers < 2) nBuffers = 2; else nBuffers = numberOfBuffers; // Define the wave format structure (16-bit PCM, srate, channels) WAVEFORMATEX waveFormat; ZeroMemory(&waveFormat, sizeof(WAVEFORMATEX)); waveFormat.wFormatTag = WAVE_FORMAT_PCM; waveFormat.nChannels = channels; waveFormat.nSamplesPerSec = (unsigned long) sampleRate; // Determine the data format. if ( devices_[device].nativeFormats ) { // 8-bit and/or 16-bit support if ( format == RTAUDIO_SINT8 ) { if ( devices_[device].nativeFormats & RTAUDIO_SINT8 ) waveFormat.wBitsPerSample = 8; else waveFormat.wBitsPerSample = 16; } else { if ( devices_[device].nativeFormats & RTAUDIO_SINT16 ) waveFormat.wBitsPerSample = 16; else waveFormat.wBitsPerSample = 8; } } else { sprintf(message_, "RtApiDs: no reported data formats for device (%s).", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } waveFormat.nBlockAlign = waveFormat.nChannels * waveFormat.wBitsPerSample / 8; waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign; // Determine the device buffer size. By default, 32k, but we will // grow it to make allowances for very large software buffer sizes. DWORD dsBufferSize = 0; DWORD dsPointerLeadTime = 0; buffer_size = MINIMUM_DEVICE_BUFFER_SIZE; // sound cards will always *knock wood* support this enum_info dsinfo; void *ohandle = 0, *bhandle = 0; // strncpy( dsinfo.name, devices_[device].name.c_str(), 64 ); dsinfo.name = devices_[device].name; dsinfo.isValid = false; if ( mode == OUTPUT ) { dsPointerLeadTime = numberOfBuffers * (*bufferSize) * (waveFormat.wBitsPerSample / 8) * channels; // If the user wants an even bigger buffer, increase the device buffer size accordingly. while ( dsPointerLeadTime * 2U > (DWORD)buffer_size ) buffer_size *= 2; if ( devices_[device].maxOutputChannels < channels ) { sprintf(message_, "RtApiDs: requested channels (%d) > than supported (%d) by device (%s).", channels, devices_[device].maxOutputChannels, devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } // Enumerate through output devices to find the id (if it exists). result = DirectSoundEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Error performing output device id enumeration: %s.", getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } if ( dsinfo.isValid == false ) { sprintf(message_, "RtApiDs: output device (%s) id not found!", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } LPGUID id = dsinfo.id; LPDIRECTSOUND object; LPDIRECTSOUNDBUFFER buffer; DSBUFFERDESC bufferDescription; result = DirectSoundCreate( id, &object, NULL ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Could not create playback object (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Set cooperative level to DSSCL_EXCLUSIVE result = object->SetCooperativeLevel(hWnd, DSSCL_EXCLUSIVE); if ( FAILED(result) ) { object->Release(); sprintf(message_, "RtApiDs: Unable to set cooperative level (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Even though we will write to the secondary buffer, we need to // access the primary buffer to set the correct output format // (since the default is 8-bit, 22 kHz!). Setup the DS primary // buffer description. ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC)); bufferDescription.dwSize = sizeof(DSBUFFERDESC); bufferDescription.dwFlags = DSBCAPS_PRIMARYBUFFER; // Obtain the primary buffer result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL); if ( FAILED(result) ) { object->Release(); sprintf(message_, "RtApiDs: Unable to access primary buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the primary DS buffer sound format. result = buffer->SetFormat(&waveFormat); if ( FAILED(result) ) { object->Release(); sprintf(message_, "RtApiDs: Unable to set primary buffer format (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Setup the secondary DS buffer description. dsBufferSize = (DWORD)buffer_size; ZeroMemory(&bufferDescription, sizeof(DSBUFFERDESC)); bufferDescription.dwSize = sizeof(DSBUFFERDESC); bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS | DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_LOCHARDWARE ); // Force hardware mixing bufferDescription.dwBufferBytes = buffer_size; bufferDescription.lpwfxFormat = &waveFormat; // Try to create the secondary DS buffer. If that doesn't work, // try to use software mixing. Otherwise, there's a problem. result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL); if ( FAILED(result) ) { bufferDescription.dwFlags = ( DSBCAPS_STICKYFOCUS | DSBCAPS_GETCURRENTPOSITION2 | DSBCAPS_LOCSOFTWARE ); // Force software mixing result = object->CreateSoundBuffer(&bufferDescription, &buffer, NULL); if ( FAILED(result) ) { object->Release(); sprintf(message_, "RtApiDs: Unable to create secondary DS buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } } // Get the buffer size ... might be different from what we specified. DSBCAPS dsbcaps; dsbcaps.dwSize = sizeof(DSBCAPS); buffer->GetCaps(&dsbcaps); buffer_size = dsbcaps.dwBufferBytes; // Lock the DS buffer result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0); if ( FAILED(result) ) { object->Release(); buffer->Release(); sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Zero the DS buffer ZeroMemory(audioPtr, dataLen); // Unlock the DS buffer result = buffer->Unlock(audioPtr, dataLen, NULL, 0); if ( FAILED(result) ) { object->Release(); buffer->Release(); sprintf(message_, "RtApiDs: Unable to unlock buffer(%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } ohandle = (void *) object; bhandle = (void *) buffer; stream_.nDeviceChannels[0] = channels; } if ( mode == INPUT ) { if ( devices_[device].maxInputChannels < channels ) { sprintf(message_, "RtAudioDS: device (%s) does not support %d channels.", devices_[device].name.c_str(), channels); error(RtError::DEBUG_WARNING); return FAILURE; } // Enumerate through input devices to find the id (if it exists). result = DirectSoundCaptureEnumerate((LPDSENUMCALLBACK)deviceIdCallback, &dsinfo); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Error performing input device id enumeration: %s.", getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } if ( dsinfo.isValid == false ) { sprintf(message_, "RtAudioDS: input device (%s) id not found!", devices_[device].name.c_str()); error(RtError::DEBUG_WARNING); return FAILURE; } LPGUID id = dsinfo.id; LPDIRECTSOUNDCAPTURE object; LPDIRECTSOUNDCAPTUREBUFFER buffer; DSCBUFFERDESC bufferDescription; result = DirectSoundCaptureCreate( id, &object, NULL ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Could not create capture object (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Setup the secondary DS buffer description. dsBufferSize = buffer_size; ZeroMemory(&bufferDescription, sizeof(DSCBUFFERDESC)); bufferDescription.dwSize = sizeof(DSCBUFFERDESC); bufferDescription.dwFlags = 0; bufferDescription.dwReserved = 0; bufferDescription.dwBufferBytes = buffer_size; bufferDescription.lpwfxFormat = &waveFormat; // Create the capture buffer. result = object->CreateCaptureBuffer(&bufferDescription, &buffer, NULL); if ( FAILED(result) ) { object->Release(); sprintf(message_, "RtApiDs: Unable to create capture buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Lock the capture buffer result = buffer->Lock(0, buffer_size, &audioPtr, &dataLen, NULL, NULL, 0); if ( FAILED(result) ) { object->Release(); buffer->Release(); sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } // Zero the buffer ZeroMemory(audioPtr, dataLen); // Unlock the buffer result = buffer->Unlock(audioPtr, dataLen, NULL, 0); if ( FAILED(result) ) { object->Release(); buffer->Release(); sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.", devices_[device].name.c_str(), getErrorString(result)); error(RtError::DEBUG_WARNING); return FAILURE; } ohandle = (void *) object; bhandle = (void *) buffer; stream_.nDeviceChannels[1] = channels; } stream_.userFormat = format; if ( waveFormat.wBitsPerSample == 8 ) stream_.deviceFormat[mode] = RTAUDIO_SINT8; else stream_.deviceFormat[mode] = RTAUDIO_SINT16; stream_.nUserChannels[mode] = channels; stream_.bufferSize = *bufferSize; // Set flags for buffer conversion stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; if (stream_.nUserChannels[mode] < stream_.nDeviceChannels[mode]) stream_.doConvertBuffer[mode] = true; // Allocate necessary internal buffers if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { sprintf(message_, "RtApiDs: error allocating user buffer memory (%s).", devices_[device].name.c_str()); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiDs: error allocating device buffer memory (%s).", devices_[device].name.c_str()); goto error; } } } // Allocate our DsHandle structures for the stream. DsHandle *handles; if ( stream_.apiHandle == 0 ) { handles = (DsHandle *) calloc(2, sizeof(DsHandle)); if ( handles == NULL ) { sprintf(message_, "RtApiDs: Error allocating DsHandle memory (%s).", devices_[device].name.c_str()); goto error; } handles[0].object = 0; handles[1].object = 0; stream_.apiHandle = (void *) handles; } else handles = (DsHandle *) stream_.apiHandle; handles[mode].object = ohandle; handles[mode].buffer = bhandle; handles[mode].dsBufferSize = dsBufferSize; handles[mode].dsPointerLeadTime = dsPointerLeadTime; stream_.device[mode] = device; stream_.state = STREAM_STOPPED; if ( stream_.mode == OUTPUT && mode == INPUT ) // We had already set up an output stream. stream_.mode = DUPLEX; else stream_.mode = mode; stream_.nBuffers = nBuffers; stream_.sampleRate = sampleRate; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; kRelease(); object->Release(); } if (handles[1].object) { LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object; LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; if (buffer) buffer->Release(); object->Release(); } free(handles); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } error(RtError::DEBUG_WARNING); return FAILURE; } void RtApiDs :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; if ( info->usingCallback ) { sprintf(message_, "RtApiDs: A callback is already set for this stream!"); error(RtError::WARNING); return; } info->callback = (void *) callback; info->userData = userData; info->usingCallback = true; info->object = (void *) this; unsigned thread_id; info->thread = _beginthreadex(NULL, 0, &callbackHandler, &stream_.callbackInfo, 0, &thread_id); if (info->thread == 0) { info->usingCallback = false; sprintf(message_, "RtApiDs: error starting callback thread!"); error(RtError::THREAD_ERROR); } // When spawning multiple threads in quick succession, it appears to be // necessary to wait a bit for each to initialize ... another windoism! Sleep(1); } void RtApiDs :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE ); CloseHandle( (HANDLE)stream_.callbackInfo.thread ); stream_.callbackInfo.thread = 0; stream_.callbackInfo.callback = NULL; stream_.callbackInfo.userData = NULL; MUTEX_UNLOCK(&stream_.mutex); } } void RtApiDs :: closeStream() { // We don't want an exception to be thrown here because this // function is called by our class destructor. So, do our own // streamId check. if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtApiDs::closeStream(): no open stream to close!"); error(RtError::WARNING); return; } if (stream_.callbackInfo.usingCallback) { stream_.callbackInfo.usingCallback = false; WaitForSingleObject( (HANDLE)stream_.callbackInfo.thread, INFINITE ); CloseHandle( (HANDLE)stream_.callbackInfo.thread ); } DsHandle *handles = (DsHandle *) stream_.apiHandle; if (handles) { if (handles[0].object) { LPDIRECTSOUND object = (LPDIRECTSOUND) handles[0].object; LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; if (buffer) { buffer->Stop(); buffer->Release(); } object->Release(); } if (handles[1].object) { LPDIRECTSOUNDCAPTURE object = (LPDIRECTSOUNDCAPTURE) handles[1].object; LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; if (buffer) { buffer->Stop(); buffer->Release(); } object->Release(); } free(handles); stream_.apiHandle = 0; } if (stream_.userBuffer) { free(stream_.userBuffer); stream_.userBuffer = 0; } if (stream_.deviceBuffer) { free(stream_.deviceBuffer); stream_.deviceBuffer = 0; } stream_.mode = UNINITIALIZED; } void RtApiDs :: startStream() { verifyStream(); if (stream_.state == STREAM_RUNNING) return; // Increase scheduler frequency on lesser windows (a side-effect of // increasing timer accuracy). On greater windows (Win2K or later), // this is already in effect. MUTEX_LOCK(&stream_.mutex); DsHandle *handles = (DsHandle *) stream_.apiHandle; timeBeginPeriod(1); memset(&statistics,0,sizeof(statistics)); statistics.sampleRate = stream_.sampleRate; statistics.writeDeviceBufferLeadBytes = handles[0].dsPointerLeadTime ; buffersRolling = false; duplexPrerollBytes = 0; if (stream_.mode == DUPLEX) { // 0.5 seconds of silence in DUPLEX mode while the devices spin up and synchronize. duplexPrerollBytes = (int)(0.5*stream_.sampleRate*formatBytes( stream_.deviceFormat[1])*stream_.nDeviceChannels[1]); } #ifdef GENERATE_DEBUG_LOG currentDebugLogEntry = 0; #endif HRESULT result; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { statistics.outputFrameSize = formatBytes( stream_.deviceFormat[0]) *stream_.nDeviceChannels[0]; LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; result = buffer->Play( 0, 0, DSBPLAY_LOOPING ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to start buffer (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { statistics.inputFrameSize = formatBytes( stream_.deviceFormat[1]) *stream_.nDeviceChannels[1]; LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; result = buffer->Start(DSCBSTART_LOOPING ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to start capture buffer (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } } stream_.state = STREAM_RUNNING; MUTEX_UNLOCK(&stream_.mutex); } void RtApiDs :: stopStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows. #ifdef GENERATE_DEBUG_LOG // Write the timing log to a .TSV file for analysis in Excel. unlink("c:/rtaudiolog.txt"); std::ofstream os("c:/rtaudiolog.txt"); os << "writeTime\treadDelay\tnextWritePointer\tnextReadPointer\tcurrentWritePointer\tsafeWritePointer\tcurrentReadPointer\tsafeReadPointer" << std::endl; for (int i = 0; i < currentDebugLogEntry ; ++i) { TTickRecord &r = debugLog[i]; os << r.writeTime-debugLog[0].writeTime << "\t" << (r.readTime-r.writeTime) << "\t" << r.nextWritePointer % BUFFER_SIZE << "\t" << r.nextReadPointer % BUFFER_SIZE << "\t" << r.currentWritePointer % BUFFER_SIZE << "\t" << r.safeWritePointer % BUFFER_SIZE << "\t" << r.currentReadPointer % BUFFER_SIZE << "\t" << r.safeReadPointer % BUFFER_SIZE << std::endl; } #endif // There is no specific DirectSound API call to "drain" a buffer // before stopping. We can hack this for playback by writing // buffers of zeroes over the entire buffer. For capture, the // concept is less clear so we'll repeat what we do in the // abortStream() case. HRESULT result; DWORD dsBufferSize; LPVOID buffer1 = NULL; LPVOID buffer2 = NULL; DWORD bufferSize1 = 0; DWORD bufferSize2 = 0; DsHandle *handles = (DsHandle *) stream_.apiHandle; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { DWORD currentPos, safePos; long buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; DWORD nextWritePos = handles[0].bufferPointer; dsBufferSize = handles[0].dsBufferSize; DWORD dsBytesWritten = 0; // Write zeroes for at least dsBufferSize bytes. while ( dsBytesWritten < dsBufferSize ) { // Find out where the read and "safe write" pointers are. result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Chase nextWritePosition. if ( currentPos < nextWritePos ) currentPos += dsBufferSize; // unwrap offset DWORD endWrite = nextWritePos + buffer_bytes; // Check whether the entire write region is behind the play pointer. while ( currentPos < endWrite ) { double millis = (endWrite - currentPos) * 900.0; millis /= ( formatBytes(stream_.deviceFormat[0]) * stream_.nDeviceChannels[0] *stream_.sampleRate); if ( millis < 1.0 ) millis = 1.0; Sleep( (DWORD) millis ); // Wake up, find out where we are now result = dsBuffer->GetCurrentPosition( ¤tPos, &safePos ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if ( currentPos < (DWORD)nextWritePos ) currentPos += dsBufferSize; // unwrap offset } // Lock free space in the buffer result = dsBuffer->Lock( nextWritePos, buffer_bytes, &buffer1, &bufferSize1, &buffer2, &bufferSize2, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Zero the free space ZeroMemory( buffer1, bufferSize1 ); if (buffer2 != NULL) ZeroMemory( buffer2, bufferSize2 ); // Update our buffer offset and unlock sound buffer dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize; handles[0].bufferPointer = nextWritePos; dsBytesWritten += buffer_bytes; } // OK, now stop the buffer. result = dsBuffer->Stop(); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // If we play again, start at the beginning of the buffer. handles[0].bufferPointer = 0; } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; buffer1 = NULL; bufferSize1 = 0; result = buffer->Stop(); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } dsBufferSize = handles[1].dsBufferSize; // Lock the buffer and clear it so that if we start to play again, // we won't have old data playing. result = buffer->Lock(0, dsBufferSize, &buffer1, &bufferSize1, NULL, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Zero the DS buffer ZeroMemory(buffer1, bufferSize1); // Unlock the DS buffer result = buffer->Unlock(buffer1, bufferSize1, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // If we start recording again, we must begin at beginning of buffer. handles[1].bufferPointer = 0; } MUTEX_UNLOCK(&stream_.mutex); } void RtApiDs :: abortStream() { verifyStream(); if (stream_.state == STREAM_STOPPED) return; // Change the state before the lock to improve shutdown response // when using a callback. stream_.state = STREAM_STOPPED; MUTEX_LOCK(&stream_.mutex); timeEndPeriod(1); // revert to normal scheduler frequency on lesser windows. HRESULT result; long dsBufferSize; LPVOID audioPtr; DWORD dataLen; DsHandle *handles = (DsHandle *) stream_.apiHandle; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { LPDIRECTSOUNDBUFFER buffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; result = buffer->Stop(); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to stop buffer (%s): %s", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } dsBufferSize = handles[0].dsBufferSize; // Lock the buffer and clear it so that if we start to play again, // we won't have old data playing. result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock buffer (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Zero the DS buffer ZeroMemory(audioPtr, dataLen); // Unlock the DS buffer result = buffer->Unlock(audioPtr, dataLen, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock buffer (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // If we start playing again, we must begin at beginning of buffer. handles[0].bufferPointer = 0; } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { LPDIRECTSOUNDCAPTUREBUFFER buffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; audioPtr = NULL; dataLen = 0; result = buffer->Stop(); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to stop capture buffer (%s): %s", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } dsBufferSize = handles[1].dsBufferSize; // Lock the buffer and clear it so that if we start to play again, // we won't have old data playing. result = buffer->Lock(0, dsBufferSize, &audioPtr, &dataLen, NULL, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock capture buffer (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Zero the DS buffer ZeroMemory(audioPtr, dataLen); // Unlock the DS buffer result = buffer->Unlock(audioPtr, dataLen, NULL, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock capture buffer (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // If we start recording again, we must begin at beginning of buffer. handles[1].bufferPointer = 0; } MUTEX_UNLOCK(&stream_.mutex); } int RtApiDs :: streamWillBlock() { verifyStream(); if (stream_.state == STREAM_STOPPED) return 0; MUTEX_LOCK(&stream_.mutex); int channels; int frames = 0; HRESULT result; DWORD currentPos, safePos; channels = 1; DsHandle *handles = (DsHandle *) stream_.apiHandle; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; UINT nextWritePos = handles[0].bufferPointer; channels = stream_.nDeviceChannels[0]; DWORD dsBufferSize = handles[0].dsBufferSize; // Find out where the read and "safe write" pointers are. result = dsBuffer->GetCurrentPosition(¤tPos, &safePos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } DWORD leadPos = safePos + handles[0].dsPointerLeadTime; if (leadPos > dsBufferSize) { leadPos -= dsBufferSize; } if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset frames = (leadPos - nextWritePos); frames /= channels * formatBytes(stream_.deviceFormat[0]); } if (stream_.mode == INPUT ) { // note that we don't block on DUPLEX input anymore. We run lockstep with the write pointer instead. LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; UINT nextReadPos = handles[1].bufferPointer; channels = stream_.nDeviceChannels[1]; DWORD dsBufferSize = handles[1].dsBufferSize; // Find out where the write and "safe read" pointers are. result = dsBuffer->GetCurrentPosition(¤tPos, &safePos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if ( safePos < (DWORD)nextReadPos ) safePos += dsBufferSize; // unwrap offset frames = (int)(safePos - nextReadPos); frames /= channels * formatBytes(stream_.deviceFormat[1]); } frames = stream_.bufferSize - frames; if (frames < 0) frames = 0; MUTEX_UNLOCK(&stream_.mutex); return frames; } void RtApiDs :: tickStream() { verifyStream(); int stopStream = 0; if (stream_.state == STREAM_STOPPED) { if (stream_.callbackInfo.usingCallback) Sleep(50); // sleep 50 milliseconds return; } else if (stream_.callbackInfo.usingCallback) { RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback; stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData); } MUTEX_LOCK(&stream_.mutex); // The state might change while waiting on a mutex. if (stream_.state == STREAM_STOPPED) { MUTEX_UNLOCK(&stream_.mutex); return; } HRESULT result; DWORD currentWritePos, safeWritePos; DWORD currentReadPos, safeReadPos; DWORD leadPos; UINT nextWritePos; #ifdef GENERATE_DEBUG_LOG DWORD writeTime, readTime; #endif LPVOID buffer1 = NULL; LPVOID buffer2 = NULL; DWORD bufferSize1 = 0; DWORD bufferSize2 = 0; char *buffer; long buffer_bytes; DsHandle *handles = (DsHandle *) stream_.apiHandle; if (stream_.mode == DUPLEX && !buffersRolling) { assert(handles[0].dsBufferSize == handles[1].dsBufferSize); // It takes a while for the devices to get rolling. As a result, // there's no guarantee that the capture and write device pointers // will move in lockstep. Wait here for both devices to start // rolling, and then set our buffer pointers accordingly. // e.g. Crystal Drivers: the capture buffer starts up 5700 to 9600 // bytes later than the write buffer. // Stub: a serious risk of having a pre-emptive scheduling round // take place between the two GetCurrentPosition calls... but I'm // really not sure how to solve the problem. Temporarily boost to // Realtime priority, maybe; but I'm not sure what priority the // directsound service threads run at. We *should* be roughly // within a ms or so of correct. LPDIRECTSOUNDBUFFER dsWriteBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; LPDIRECTSOUNDCAPTUREBUFFER dsCaptureBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; DWORD initialWritePos, initialSafeWritePos; DWORD initialReadPos, initialSafeReadPos;; result = dsWriteBuffer->GetCurrentPosition(&initialWritePos, &initialSafeWritePos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } result = dsCaptureBuffer->GetCurrentPosition(&initialReadPos, &initialSafeReadPos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } while (true) { result = dsWriteBuffer->GetCurrentPosition(¤tWritePos, &safeWritePos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } result = dsCaptureBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if (safeWritePos != initialSafeWritePos && safeReadPos != initialSafeReadPos) { break; } Sleep(1); } assert( handles[0].dsBufferSize == handles[1].dsBufferSize ); buffersRolling = true; handles[0].bufferPointer = (safeWritePos + handles[0].dsPointerLeadTime); handles[1].bufferPointer = safeReadPos; } if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { LPDIRECTSOUNDBUFFER dsBuffer = (LPDIRECTSOUNDBUFFER) handles[0].buffer; // Setup parameters and do buffer conversion if necessary. if (stream_.doConvertBuffer[0]) { buffer = stream_.deviceBuffer; convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] ); buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[0]; buffer_bytes *= formatBytes(stream_.deviceFormat[0]); } else { buffer = stream_.userBuffer; buffer_bytes = stream_.bufferSize * stream_.nUserChannels[0]; buffer_bytes *= formatBytes(stream_.userFormat); } // No byte swapping necessary in DirectSound implementation. // Ahhh ... windoze. 16-bit data is signed but 8-bit data is // unsigned. So, we need to convert our signed 8-bit data here to // unsigned. if ( stream_.deviceFormat[0] == RTAUDIO_SINT8 ) for ( int i=0; iGetCurrentPosition(¤tWritePos, &safeWritePos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current position (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } leadPos = safeWritePos + handles[0].dsPointerLeadTime; if ( leadPos > dsBufferSize ) leadPos -= dsBufferSize; if ( leadPos < nextWritePos ) leadPos += dsBufferSize; // unwrap offset endWrite = nextWritePos + buffer_bytes; // Check whether the entire write region is behind the play pointer. if ( leadPos >= endWrite ) break; // If we are here, then we must wait until the play pointer gets // beyond the write region. The approach here is to use the // Sleep() function to suspend operation until safePos catches // up. Calculate number of milliseconds to wait as: // time = distance * (milliseconds/second) * fudgefactor / // ((bytes/sample) * (samples/second)) // A "fudgefactor" less than 1 is used because it was found // that sleeping too long was MUCH worse than sleeping for // several shorter periods. double millis = (endWrite - leadPos) * 900.0; millis /= ( formatBytes(stream_.deviceFormat[0]) *stream_.nDeviceChannels[0]* stream_.sampleRate); if ( millis < 1.0 ) millis = 1.0; if ( millis > 50.0 ) { static int nOverruns = 0; ++nOverruns; } Sleep( (DWORD) millis ); } #ifdef GENERATE_DEBUG_LOG writeTime = timeGetTime(); #endif if (statistics.writeDeviceSafeLeadBytes < dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize)) { statistics.writeDeviceSafeLeadBytes = dsPointerDifference(safeWritePos,currentWritePos,handles[0].dsBufferSize); } if ( dsPointerBetween( nextWritePos, safeWritePos, currentWritePos, dsBufferSize ) || dsPointerBetween( endWrite, safeWritePos, currentWritePos, dsBufferSize ) ) { // We've strayed into the forbidden zone ... resync the read pointer. ++statistics.numberOfWriteUnderruns; nextWritePos = safeWritePos + handles[0].dsPointerLeadTime-buffer_bytes+dsBufferSize; while (nextWritePos >= dsBufferSize) nextWritePos-= dsBufferSize; handles[0].bufferPointer = nextWritePos; endWrite = nextWritePos + buffer_bytes; } // Lock free space in the buffer result = dsBuffer->Lock( nextWritePos, buffer_bytes, &buffer1, &bufferSize1, &buffer2, &bufferSize2, 0 ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock buffer during playback (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } // Copy our buffer into the DS buffer CopyMemory(buffer1, buffer, bufferSize1); if (buffer2 != NULL) CopyMemory(buffer2, buffer+bufferSize1, bufferSize2); // Update our buffer offset and unlock sound buffer dsBuffer->Unlock( buffer1, bufferSize1, buffer2, bufferSize2 ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock buffer during playback (%s): %s.", devices_[stream_.device[0]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } nextWritePos = (nextWritePos + bufferSize1 + bufferSize2) % dsBufferSize; handles[0].bufferPointer = nextWritePos; } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { // Setup parameters. if (stream_.doConvertBuffer[1]) { buffer = stream_.deviceBuffer; buffer_bytes = stream_.bufferSize * stream_.nDeviceChannels[1]; buffer_bytes *= formatBytes(stream_.deviceFormat[1]); } else { buffer = stream_.userBuffer; buffer_bytes = stream_.bufferSize * stream_.nUserChannels[1]; buffer_bytes *= formatBytes(stream_.userFormat); } LPDIRECTSOUNDCAPTUREBUFFER dsBuffer = (LPDIRECTSOUNDCAPTUREBUFFER) handles[1].buffer; long nextReadPos = handles[1].bufferPointer; DWORD dsBufferSize = handles[1].dsBufferSize; // Find out where the write and "safe read" pointers are. result = dsBuffer->GetCurrentPosition(¤tReadPos, &safeReadPos); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset DWORD endRead = nextReadPos + buffer_bytes; // Handling depends on whether we are INPUT or DUPLEX. // If we're in INPUT mode then waiting is a good thing. If we're in DUPLEX mode, // then a wait here will drag the write pointers into the forbidden zone. // // In DUPLEX mode, rather than wait, we will back off the read pointer until // it's in a safe position. This causes dropouts, but it seems to be the only // practical way to sync up the read and write pointers reliably, given the // the very complex relationship between phase and increment of the read and write // pointers. // // In order to minimize audible dropouts in DUPLEX mode, we will // provide a pre-roll period of 0.5 seconds in which we return // zeros from the read buffer while the pointers sync up. if (stream_.mode == DUPLEX) { if (safeReadPos < endRead) { if (duplexPrerollBytes <= 0) { // pre-roll time over. Be more agressive. int adjustment = endRead-safeReadPos; ++statistics.numberOfReadOverruns; // Two cases: // large adjustments: we've probably run out of CPU cycles, so just resync exactly, // and perform fine adjustments later. // small adjustments: back off by twice as much. if (adjustment >= 2*buffer_bytes) { nextReadPos = safeReadPos-2*buffer_bytes; } else { nextReadPos = safeReadPos-buffer_bytes-adjustment; } statistics.readDeviceSafeLeadBytes = currentReadPos-nextReadPos; if (statistics.readDeviceSafeLeadBytes < 0) statistics.readDeviceSafeLeadBytes += dsBufferSize; if (nextReadPos < 0) nextReadPos += dsBufferSize; } else { // in pre=roll time. Just do it. nextReadPos = safeReadPos-buffer_bytes; while (nextReadPos < 0) nextReadPos += dsBufferSize; } endRead = nextReadPos + buffer_bytes; } } else { while ( safeReadPos < endRead ) { // See comments for playback. double millis = (endRead - safeReadPos) * 900.0; millis /= ( formatBytes(stream_.deviceFormat[1]) * stream_.nDeviceChannels[1] * stream_.sampleRate); if ( millis < 1.0 ) millis = 1.0; Sleep( (DWORD) millis ); // Wake up, find out where we are now result = dsBuffer->GetCurrentPosition( ¤tReadPos, &safeReadPos ); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to get current capture position (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if ( safeReadPos < (DWORD)nextReadPos ) safeReadPos += dsBufferSize; // unwrap offset } } #ifdef GENERATE_DEBUG_LOG readTime = timeGetTime(); #endif if (statistics.readDeviceSafeLeadBytes < dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize)) { statistics.readDeviceSafeLeadBytes = dsPointerDifference(currentReadPos,nextReadPos ,dsBufferSize); } // Lock free space in the buffer result = dsBuffer->Lock (nextReadPos, buffer_bytes, &buffer1, &bufferSize1, &buffer2, &bufferSize2, 0); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to lock buffer during capture (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } if (duplexPrerollBytes <= 0) { // Copy our buffer into the DS buffer CopyMemory(buffer, buffer1, bufferSize1); if (buffer2 != NULL) CopyMemory(buffer+bufferSize1, buffer2, bufferSize2); } else { memset(buffer,0,bufferSize1); if (buffer2 != NULL) memset(buffer+bufferSize1,0,bufferSize2); duplexPrerollBytes -= bufferSize1 + bufferSize2; } // Update our buffer offset and unlock sound buffer nextReadPos = (nextReadPos + bufferSize1 + bufferSize2) % dsBufferSize; dsBuffer->Unlock (buffer1, bufferSize1, buffer2, bufferSize2); if ( FAILED(result) ) { sprintf(message_, "RtApiDs: Unable to unlock buffer during capture (%s): %s.", devices_[stream_.device[1]].name.c_str(), getErrorString(result)); error(RtError::DRIVER_ERROR); } handles[1].bufferPointer = nextReadPos; // No byte swapping necessary in DirectSound implementation. // If necessary, convert 8-bit data from unsigned to signed. if ( stream_.deviceFormat[1] == RTAUDIO_SINT8 ) for ( int j=0; jstopStream(); } // Definitions for utility functions and callbacks // specific to the DirectSound implementation. extern "C" unsigned __stdcall callbackHandler(void *ptr) { CallbackInfo *info = (CallbackInfo *) ptr; RtApiDs *object = (RtApiDs *) info->object; bool *usingCallback = &info->usingCallback; while ( *usingCallback ) { try { object->tickStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiDs: callback thread error (%s) ... closing thread.\n\n", exception.getMessageString()); break; } } _endthreadex( 0 ); return 0; } static bool CALLBACK deviceCountCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext) { int *pointer = ((int *) lpContext); (*pointer)++; return true; } #include "tchar.h" std::string convertTChar( LPCTSTR name ) { std::string s; #if defined( UNICODE ) || defined( _UNICODE ) // Yes, this conversion doesn't make sense for two-byte characters // but RtAudio is currently written to return an std::string of // one-byte chars for the device name. for ( unsigned int i=0; iname.empty() ) info++; info->name = convertTChar( description ); info->id = lpguid; HRESULT hr; info->isValid = false; if (info->isInput == true) { DSCCAPS caps; LPDIRECTSOUNDCAPTURE object; hr = DirectSoundCaptureCreate( lpguid, &object, NULL ); if( hr != DS_OK ) return true; caps.dwSize = sizeof(caps); hr = object->GetCaps( &caps ); if( hr == DS_OK ) { if (caps.dwChannels > 0 && caps.dwFormats > 0) info->isValid = true; } object->Release(); } else { DSCAPS caps; LPDIRECTSOUND object; hr = DirectSoundCreate( lpguid, &object, NULL ); if( hr != DS_OK ) return true; caps.dwSize = sizeof(caps); hr = object->GetCaps( &caps ); if( hr == DS_OK ) { if ( caps.dwFlags & DSCAPS_PRIMARYMONO || caps.dwFlags & DSCAPS_PRIMARYSTEREO ) info->isValid = true; } object->Release(); } return true; } static bool CALLBACK defaultDeviceCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext) { enum_info *info = ((enum_info *) lpContext); if ( lpguid == NULL ) { info->name = convertTChar( description ); return false; } return true; } static bool CALLBACK deviceIdCallback(LPGUID lpguid, LPCTSTR description, LPCTSTR module, LPVOID lpContext) { enum_info *info = ((enum_info *) lpContext); std::string s = convertTChar( description ); if ( info->name == s ) { info->id = lpguid; info->isValid = true; return false; } return true; } static char* getErrorString(int code) { switch (code) { case DSERR_ALLOCATED: return "Already allocated."; case DSERR_CONTROLUNAVAIL: return "Control unavailable."; case DSERR_INVALIDPARAM: return "Invalid parameter."; case DSERR_INVALIDCALL: return "Invalid call."; case DSERR_GENERIC: return "Generic error."; case DSERR_PRIOLEVELNEEDED: return "Priority level needed"; case DSERR_OUTOFMEMORY: return "Out of memory"; case DSERR_BADFORMAT: return "The sample rate or the channel format is not supported."; case DSERR_UNSUPPORTED: return "Not supported."; case DSERR_NODRIVER: return "No driver."; case DSERR_ALREADYINITIALIZED: return "Already initialized."; case DSERR_NOAGGREGATION: return "No aggregation."; case DSERR_BUFFERLOST: return "Buffer lost."; case DSERR_OTHERAPPHASPRIO: return "Another application already has priority."; case DSERR_UNINITIALIZED: return "Uninitialized."; default: return "DirectSound unknown error"; } } //******************** End of __WINDOWS_DS__ *********************// #endif #if defined(__IRIX_AL__) // SGI's AL API for IRIX #include #include #include extern "C" void *callbackHandler(void * ptr); RtApiAl :: RtApiAl() { this->initialize(); if (nDevices_ <= 0) { sprintf(message_, "RtApiAl: no Irix AL audio devices found!"); error(RtError::NO_DEVICES_FOUND); } } RtApiAl :: ~RtApiAl() { // The subclass destructor gets called before the base class // destructor, so close any existing streams before deallocating // apiDeviceId memory. if ( stream_.mode != UNINITIALIZED ) closeStream(); // Free our allocated apiDeviceId memory. long *id; for ( unsigned int i=0; iapiDeviceId; resource = id[0]; if (resource > 0) { // Probe output device parameters. result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0); if (result < 0) { sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.", info->name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); } else { info->maxOutputChannels = value.i; info->minOutputChannels = 1; } result = alGetParamInfo(resource, AL_RATE, &pinfo); if (result < 0) { sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.", info->name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); } else { info->sampleRates.clear(); for (unsigned int k=0; k= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i ) info->sampleRates.push_back( SAMPLE_RATES[k] ); } } // The AL library supports all our formats, except 24-bit and 32-bit ints. info->nativeFormats = (RtAudioFormat) 51; } // Now get input resource ID if it exists. resource = id[1]; if (resource > 0) { // Probe input device parameters. result = alQueryValues(resource, AL_CHANNELS, &value, 1, 0, 0); if (result < 0) { sprintf(message_, "RtApiAl: error getting device (%s) channels: %s.", info->name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); } else { info->maxInputChannels = value.i; info->minInputChannels = 1; } result = alGetParamInfo(resource, AL_RATE, &pinfo); if (result < 0) { sprintf(message_, "RtApiAl: error getting device (%s) rates: %s.", info->name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); } else { // In the case of the default device, these values will // overwrite the rates determined for the output device. Since // the input device is most likely to be more limited than the // output device, this is ok. info->sampleRates.clear(); for (unsigned int k=0; k= pinfo.min.i && SAMPLE_RATES[k] <= pinfo.max.i ) info->sampleRates.push_back( SAMPLE_RATES[k] ); } } // The AL library supports all our formats, except 24-bit and 32-bit ints. info->nativeFormats = (RtAudioFormat) 51; } if ( info->maxInputChannels == 0 && info->maxOutputChannels == 0 ) return; if ( info->sampleRates.size() == 0 ) return; // Determine duplex status. if (info->maxInputChannels < info->maxOutputChannels) info->maxDuplexChannels = info->maxInputChannels; else info->maxDuplexChannels = info->maxOutputChannels; if (info->minInputChannels < info->minOutputChannels) info->minDuplexChannels = info->minInputChannels; else info->minDuplexChannels = info->minOutputChannels; if ( info->maxDuplexChannels > 0 ) info->hasDuplexSupport = true; else info->hasDuplexSupport = false; info->probed = true; return; } bool RtApiAl :: probeDeviceOpen(int device, StreamMode mode, int channels, int sampleRate, RtAudioFormat format, int *bufferSize, int numberOfBuffers) { int result, nBuffers; long resource; ALconfig al_config; ALport port; ALpv pvs[2]; long *id = (long *) devices_[device].apiDeviceId; // Get a new ALconfig structure. al_config = alNewConfig(); if ( !al_config ) { sprintf(message_,"RtApiAl: can't get AL config: %s.", alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the channels. result = alSetChannels(al_config, channels); if ( result < 0 ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: can't set %d channels in AL config: %s.", channels, alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Attempt to set the queue size. The al API doesn't provide a // means for querying the minimum/maximum buffer size of a device, // so if the specified size doesn't work, take whatever the // al_config structure returns. if ( numberOfBuffers < 1 ) nBuffers = 1; else nBuffers = numberOfBuffers; long buffer_size = *bufferSize * nBuffers; result = alSetQueueSize(al_config, buffer_size); // in sample frames if ( result < 0 ) { // Get the buffer size specified by the al_config and try that. buffer_size = alGetQueueSize(al_config); result = alSetQueueSize(al_config, buffer_size); if ( result < 0 ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: can't set buffer size (%ld) in AL config: %s.", buffer_size, alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } *bufferSize = buffer_size / nBuffers; } // Set the data format. stream_.userFormat = format; stream_.deviceFormat[mode] = format; if (format == RTAUDIO_SINT8) { result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP); result = alSetWidth(al_config, AL_SAMPLE_8); } else if (format == RTAUDIO_SINT16) { result = alSetSampFmt(al_config, AL_SAMPFMT_TWOSCOMP); result = alSetWidth(al_config, AL_SAMPLE_16); } else if (format == RTAUDIO_SINT24) { // Our 24-bit format assumes the upper 3 bytes of a 4 byte word. // The AL library uses the lower 3 bytes, so we'll need to do our // own conversion. result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT); stream_.deviceFormat[mode] = RTAUDIO_FLOAT32; } else if (format == RTAUDIO_SINT32) { // The AL library doesn't seem to support the 32-bit integer // format, so we'll need to do our own conversion. result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT); stream_.deviceFormat[mode] = RTAUDIO_FLOAT32; } else if (format == RTAUDIO_FLOAT32) result = alSetSampFmt(al_config, AL_SAMPFMT_FLOAT); else if (format == RTAUDIO_FLOAT64) result = alSetSampFmt(al_config, AL_SAMPFMT_DOUBLE); if ( result == -1 ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: error setting sample format in AL config: %s.", alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } if (mode == OUTPUT) { // Set our device. if (device == 0) resource = AL_DEFAULT_OUTPUT; else resource = id[0]; result = alSetDevice(al_config, resource); if ( result == -1 ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.", devices_[device].name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Open the port. port = alOpenPort("RtApiAl Output Port", "w", al_config); if( !port ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: error opening output port: %s.", alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the sample rate pvs[0].param = AL_MASTER_CLOCK; pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE; pvs[1].param = AL_RATE; pvs[1].value.ll = alDoubleToFixed((double)sampleRate); result = alSetParams(resource, pvs, 2); if ( result < 0 ) { alClosePort(port); alFreeConfig(al_config); sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.", sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } } else { // mode == INPUT // Set our device. if (device == 0) resource = AL_DEFAULT_INPUT; else resource = id[1]; result = alSetDevice(al_config, resource); if ( result == -1 ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: error setting device (%s) in AL config: %s.", devices_[device].name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Open the port. port = alOpenPort("RtApiAl Input Port", "r", al_config); if( !port ) { alFreeConfig(al_config); sprintf(message_,"RtApiAl: error opening input port: %s.", alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } // Set the sample rate pvs[0].param = AL_MASTER_CLOCK; pvs[0].value.i = AL_CRYSTAL_MCLK_TYPE; pvs[1].param = AL_RATE; pvs[1].value.ll = alDoubleToFixed((double)sampleRate); result = alSetParams(resource, pvs, 2); if ( result < 0 ) { alClosePort(port); alFreeConfig(al_config); sprintf(message_,"RtApiAl: error setting sample rate (%d) for device (%s): %s.", sampleRate, devices_[device].name.c_str(), alGetErrorString(oserror())); error(RtError::DEBUG_WARNING); return FAILURE; } } alFreeConfig(al_config); stream_.nUserChannels[mode] = channels; stream_.nDeviceChannels[mode] = channels; // Save stream handle. ALport *handle = (ALport *) stream_.apiHandle; if ( handle == 0 ) { handle = (ALport *) calloc(2, sizeof(ALport)); if ( handle == NULL ) { sprintf(message_, "RtApiAl: Irix Al error allocating handle memory (%s).", devices_[device].name.c_str()); goto error; } stream_.apiHandle = (void *) handle; handle[0] = 0; handle[1] = 0; } handle[mode] = port; // Set flags for buffer conversion stream_.doConvertBuffer[mode] = false; if (stream_.userFormat != stream_.deviceFormat[mode]) stream_.doConvertBuffer[mode] = true; // Allocate necessary internal buffers if ( stream_.nUserChannels[0] != stream_.nUserChannels[1] ) { long buffer_bytes; if (stream_.nUserChannels[0] >= stream_.nUserChannels[1]) buffer_bytes = stream_.nUserChannels[0]; else buffer_bytes = stream_.nUserChannels[1]; buffer_bytes *= *bufferSize * formatBytes(stream_.userFormat); if (stream_.userBuffer) free(stream_.userBuffer); stream_.userBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.userBuffer == NULL) { sprintf(message_, "RtApiAl: error allocating user buffer memory (%s).", devices_[device].name.c_str()); goto error; } } if ( stream_.doConvertBuffer[mode] ) { long buffer_bytes; bool makeBuffer = true; if ( mode == OUTPUT ) buffer_bytes = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); else { // mode == INPUT buffer_bytes = stream_.nDeviceChannels[1] * formatBytes(stream_.deviceFormat[1]); if ( stream_.mode == OUTPUT && stream_.deviceBuffer ) { long bytes_out = stream_.nDeviceChannels[0] * formatBytes(stream_.deviceFormat[0]); if ( buffer_bytes < bytes_out ) makeBuffer = false; } } if ( makeBuffer ) { buffer_bytes *= *bufferSize; if (stream_.deviceBuffer) free(stream_.deviceBuffer); stream_.deviceBuffer = (char *) calloc(buffer_bytes, 1); if (stream_.deviceBuffer == NULL) { sprintf(message_, "RtApiAl: error allocating device buffer memory (%s).", devices_[device].name.c_str()); goto error; } } } stream_.device[mode] = device; stream_.state = STREAM_STOPPED; if ( stream_.mode == OUTPUT && mode == INPUT ) // We had already set up an output stream. stream_.mode = DUPLEX; else stream_.mode = mode; stream_.nBuffers = nBuffers; stream_.bufferSize = *bufferSize; stream_.sampleRate = sampleRate; // Setup the buffer conversion information structure. if ( stream_.doConvertBuffer[mode] ) { if (mode == INPUT) { // convert device to user buffer stream_.convertInfo[mode].inJump = stream_.nDeviceChannels[1]; stream_.convertInfo[mode].outJump = stream_.nUserChannels[1]; stream_.convertInfo[mode].inFormat = stream_.deviceFormat[1]; stream_.convertInfo[mode].outFormat = stream_.userFormat; } else { // convert user to device buffer stream_.convertInfo[mode].inJump = stream_.nUserChannels[0]; stream_.convertInfo[mode].outJump = stream_.nDeviceChannels[0]; stream_.convertInfo[mode].inFormat = stream_.userFormat; stream_.convertInfo[mode].outFormat = stream_.deviceFormat[0]; } if ( stream_.convertInfo[mode].inJump < stream_.convertInfo[mode].outJump ) stream_.convertInfo[mode].channels = stream_.convertInfo[mode].inJump; else stream_.convertInfo[mode].channels = stream_.convertInfo[mode].outJump; // Set up the interleave/deinterleave offsets. if ( mode == INPUT && stream_.deInterleave[1] ) { for (int k=0; k err) frames = err; } frames = stream_.bufferSize - frames; if (frames < 0) frames = 0; MUTEX_UNLOCK(&stream_.mutex); return frames; } void RtApiAl :: tickStream() { verifyStream(); int stopStream = 0; if (stream_.state == STREAM_STOPPED) { if (stream_.callbackInfo.usingCallback) usleep(50000); // sleep 50 milliseconds return; } else if (stream_.callbackInfo.usingCallback) { RtAudioCallback callback = (RtAudioCallback) stream_.callbackInfo.callback; stopStream = callback(stream_.userBuffer, stream_.bufferSize, stream_.callbackInfo.userData); } MUTEX_LOCK(&stream_.mutex); // The state might change while waiting on a mutex. if (stream_.state == STREAM_STOPPED) goto unlock; char *buffer; int channels; RtAudioFormat format; ALport *handle = (ALport *) stream_.apiHandle; if (stream_.mode == OUTPUT || stream_.mode == DUPLEX) { // Setup parameters and do buffer conversion if necessary. if (stream_.doConvertBuffer[0]) { buffer = stream_.deviceBuffer; convertBuffer( buffer, stream_.userBuffer, stream_.convertInfo[0] ); channels = stream_.nDeviceChannels[0]; format = stream_.deviceFormat[0]; } else { buffer = stream_.userBuffer; channels = stream_.nUserChannels[0]; format = stream_.userFormat; } // Do byte swapping if necessary. if (stream_.doByteSwap[0]) byteSwapBuffer(buffer, stream_.bufferSize * channels, format); // Write interleaved samples to device. alWriteFrames(handle[0], buffer, stream_.bufferSize); } if (stream_.mode == INPUT || stream_.mode == DUPLEX) { // Setup parameters. if (stream_.doConvertBuffer[1]) { buffer = stream_.deviceBuffer; channels = stream_.nDeviceChannels[1]; format = stream_.deviceFormat[1]; } else { buffer = stream_.userBuffer; channels = stream_.nUserChannels[1]; format = stream_.userFormat; } // Read interleaved samples from device. alReadFrames(handle[1], buffer, stream_.bufferSize); // Do byte swapping if necessary. if (stream_.doByteSwap[1]) byteSwapBuffer(buffer, stream_.bufferSize * channels, format); // Do buffer conversion if necessary. if (stream_.doConvertBuffer[1]) convertBuffer( stream_.userBuffer, stream_.deviceBuffer, stream_.convertInfo[1] ); } unlock: MUTEX_UNLOCK(&stream_.mutex); if (stream_.callbackInfo.usingCallback && stopStream) this->stopStream(); } void RtApiAl :: setStreamCallback(RtAudioCallback callback, void *userData) { verifyStream(); CallbackInfo *info = (CallbackInfo *) &stream_.callbackInfo; if ( info->usingCallback ) { sprintf(message_, "RtApiAl: A callback is already set for this stream!"); error(RtError::WARNING); return; } info->callback = (void *) callback; info->userData = userData; info->usingCallback = true; info->object = (void *) this; // Set the thread attributes for joinable and realtime scheduling // priority. The higher priority will only take affect if the // program is run as root or suid. pthread_attr_t attr; pthread_attr_init(&attr); pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_JOINABLE); pthread_attr_setschedpolicy(&attr, SCHED_RR); int err = pthread_create(&info->thread, &attr, callbackHandler, &stream_.callbackInfo); pthread_attr_destroy(&attr); if (err) { info->usingCallback = false; sprintf(message_, "RtApiAl: error starting callback thread!"); error(RtError::THREAD_ERROR); } } void RtApiAl :: cancelStreamCallback() { verifyStream(); if (stream_.callbackInfo.usingCallback) { if (stream_.state == STREAM_RUNNING) stopStream(); MUTEX_LOCK(&stream_.mutex); stream_.callbackInfo.usingCallback = false; pthread_join(stream_.callbackInfo.thread, NULL); stream_.callbackInfo.thread = 0; stream_.callbackInfo.callback = NULL; stream_.callbackInfo.userData = NULL; MUTEX_UNLOCK(&stream_.mutex); } } extern "C" void *callbackHandler(void *ptr) { CallbackInfo *info = (CallbackInfo *) ptr; RtApiAl *object = (RtApiAl *) info->object; bool *usingCallback = &info->usingCallback; while ( *usingCallback ) { try { object->tickStream(); } catch (RtError &exception) { fprintf(stderr, "\nRtApiAl: callback thread error (%s) ... closing thread.\n\n", exception.getMessageString()); break; } } return 0; } //******************** End of __IRIX_AL__ *********************// #endif // *************************************************** // // // Protected common (OS-independent) RtAudio methods. // // *************************************************** // // This method can be modified to control the behavior of error // message reporting and throwing. void RtApi :: error(RtError::Type type) { if (type == RtError::WARNING) { fprintf(stderr, "\n%s\n\n", message_); } else if (type == RtError::DEBUG_WARNING) { #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\n%s\n\n", message_); #endif } else { #if defined(__RTAUDIO_DEBUG__) fprintf(stderr, "\n%s\n\n", message_); #endif throw RtError(std::string(message_), type); } } void RtApi :: verifyStream() { if ( stream_.mode == UNINITIALIZED ) { sprintf(message_, "RtAudio: stream is not open!"); error(RtError::INVALID_STREAM); } } void RtApi :: clearDeviceInfo(RtApiDevice *info) { // Don't clear the name or DEVICE_ID fields here ... they are // typically set prior to a call of this function. info->probed = false; info->maxOutputChannels = 0; info->maxInputChannels = 0; info->maxDuplexChannels = 0; info->minOutputChannels = 0; info->minInputChannels = 0; info->minDuplexChannels = 0; info->hasDuplexSupport = false; info->sampleRates.clear(); info->nativeFormats = 0; } void RtApi :: clearStreamInfo() { stream_.mode = UNINITIALIZED; stream_.state = STREAM_STOPPED; stream_.sampleRate = 0; stream_.bufferSize = 0; stream_.nBuffers = 0; stream_.userFormat = 0; for ( int i=0; i<2; i++ ) { stream_.device[i] = 0; stream_.doConvertBuffer[i] = false; stream_.deInterleave[i] = false; stream_.doByteSwap[i] = false; stream_.nUserChannels[i] = 0; stream_.nDeviceChannels[i] = 0; stream_.deviceFormat[i] = 0; } } int RtApi :: formatBytes(RtAudioFormat format) { if (format == RTAUDIO_SINT16) return 2; else if (format == RTAUDIO_SINT24 || format == RTAUDIO_SINT32 || format == RTAUDIO_FLOAT32) return 4; else if (format == RTAUDIO_FLOAT64) return 8; else if (format == RTAUDIO_SINT8) return 1; sprintf(message_,"RtApi: undefined format in formatBytes()."); error(RtError::WARNING); return 0; } void RtApi :: convertBuffer( char *outBuffer, char *inBuffer, ConvertInfo &info ) { // This function does format conversion, input/output channel compensation, and // data interleaving/deinterleaving. 24-bit integers are assumed to occupy // the upper three bytes of a 32-bit integer. // Clear our device buffer when in/out duplex device channels are different if ( outBuffer == stream_.deviceBuffer && stream_.mode == DUPLEX && stream_.nDeviceChannels[0] != stream_.nDeviceChannels[1] ) memset( outBuffer, 0, stream_.bufferSize * info.outJump * formatBytes( info.outFormat ) ); int j; if (info.outFormat == RTAUDIO_FLOAT64) { Float64 scale; Float64 *out = (Float64 *)outBuffer; if (info.inFormat == RTAUDIO_SINT8) { signed char *in = (signed char *)inBuffer; scale = 1.0 / 128.0; for (int i=0; i> 16) & 0x0000ffff); } in += info.inJump; out += info.outJump; } } else if (info.inFormat == RTAUDIO_SINT32) { Int32 *in = (Int32 *)inBuffer; for (int i=0; i> 16) & 0x0000ffff); } in += info.inJump; out += info.outJump; } } else if (info.inFormat == RTAUDIO_FLOAT32) { Float32 *in = (Float32 *)inBuffer; for (int i=0; i> 8) & 0x00ff); } in += info.inJump; out += info.outJump; } } else if (info.inFormat == RTAUDIO_SINT24) { Int32 *in = (Int32 *)inBuffer; for (int i=0; i> 24) & 0x000000ff); } in += info.inJump; out += info.outJump; } } else if (info.inFormat == RTAUDIO_SINT32) { Int32 *in = (Int32 *)inBuffer; for (int i=0; i> 24) & 0x000000ff); } in += info.inJump; out += info.outJump; } } else if (info.inFormat == RTAUDIO_FLOAT32) { Float32 *in = (Float32 *)inBuffer; for (int i=0; i