cpal-0.15.2/.cargo_vcs_info.json0000644000000001360000000000100120400ustar { "git": { "sha1": "2cae7b432320839d0db00c6cb4ca0a56319e7c77" }, "path_in_vcs": "" }cpal-0.15.2/.github/workflows/cpal.yml000064400000000000000000000230511046102023000156700ustar 00000000000000name: cpal on: [push, pull_request] jobs: clippy-test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Update apt run: sudo apt update - name: Install alsa run: sudo apt-get install libasound2-dev - name: Install libjack run: sudo apt-get install libjack-jackd2-dev libjack-jackd2-0 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true components: clippy target: armv7-linux-androideabi - name: Run clippy uses: actions-rs/clippy-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} args: --all --all-features - name: Run clippy for Android target uses: actions-rs/clippy-check@v1 with: token: ${{ secrets.GITHUB_TOKEN }} args: --all --features asio --features oboe/fetch-prebuilt --target armv7-linux-androideabi rustfmt-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true components: rustfmt - name: Run rustfmt uses: actions-rs/cargo@v1 with: command: fmt args: --all -- --check cargo-publish: if: github.event_name == 'push' && github.ref == 'refs/heads/master' runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Install rust uses: actions-rs/toolchain@v1 with: toolchain: stable profile: minimal override: true - name: Update apt run: sudo apt update - name: Install alsa run: sudo apt-get install libasound2-dev - name: Verify publish crate uses: katyo/publish-crates@v1 with: dry-run: true ignore-unpublished-changes: true - name: Publish crate uses: katyo/publish-crates@v1 with: ignore-unpublished-changes: true registry-token: ${{ secrets.CRATESIO_TOKEN }} ubuntu-test: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Update apt run: sudo apt update - name: Install alsa run: sudo apt-get install libasound2-dev - name: Install libjack run: sudo apt-get install libjack-jackd2-dev libjack-jackd2-0 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - name: Run without features uses: actions-rs/cargo@v1 with: command: test args: --all --no-default-features --verbose - name: Run all features uses: actions-rs/cargo@v1 with: command: test args: --all --all-features --verbose linux-check-and-test-armv7: runs-on: ubuntu-latest steps: - name: Checkout sources uses: actions/checkout@v2 - name: Install stable toolchain uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable target: armv7-unknown-linux-gnueabihf override: true - name: Build image run: docker build -t cross/cpal_armv7:v1 ./ - name: Check without features for armv7 uses: actions-rs/cargo@v1 with: command: check use-cross: true args: --target armv7-unknown-linux-gnueabihf --workspace --no-default-features --verbose - name: Test without features for armv7 uses: actions-rs/cargo@v1 with: command: test use-cross: true args: --target armv7-unknown-linux-gnueabihf --workspace --no-default-features --verbose - name: Check all features for armv7 uses: actions-rs/cargo@v1 with: command: check use-cross: true args: --target armv7-unknown-linux-gnueabihf --workspace --all-features --verbose - name: Test all features for armv7 uses: actions-rs/cargo@v1 with: command: test use-cross: true args: --target armv7-unknown-linux-gnueabihf --workspace --all-features --verbose asmjs-wasm32-test: strategy: matrix: target: [asmjs-unknown-emscripten, wasm32-unknown-emscripten] runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Setup Emscripten toolchain uses: mymindstorm/setup-emsdk@v10 with: version: 2.0.9 # https://github.com/rust-lang/rust/issues/85821 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable target: ${{ matrix.target }} - name: Build beep example run: cargo build --example beep --release --target ${{ matrix.target }} wasm32-bindgen-test: strategy: matrix: target: [wasm32-unknown-unknown] runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable target: ${{ matrix.target }} - name: Build beep example run: cargo build --example beep --target ${{ matrix.target }} --features=wasm-bindgen wasm32-wasi-test: strategy: matrix: target: [wasm32-wasi] runs-on: ubuntu-latest steps: - uses: actions/checkout@v1 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable target: ${{ matrix.target }} - name: Build beep example run: cargo build --example beep --target ${{ matrix.target }} windows-test: strategy: matrix: version: [x86_64, i686] runs-on: windows-latest steps: - uses: actions/checkout@v2 - name: Install ASIO SDK env: LINK: https://www.steinberg.net/asiosdk run: | curl -L -o asio.zip $env:LINK 7z x -oasio asio.zip move asio\*\* asio\ - name: Install ASIO4ALL run: choco install asio4all - name: Install llvm and clang run: choco install llvm - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable target: ${{ matrix.version }}-pc-windows-msvc override: true - name: Run without features run: cargo test --all --no-default-features --verbose - name: Run all features run: | $Env:CPAL_ASIO_DIR = "$Env:GITHUB_WORKSPACE\asio" cargo test --all --all-features --verbose macos-test: runs-on: macOS-latest steps: - uses: actions/checkout@v2 - name: Install llvm and clang run: brew install llvm - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - name: Build beep example run: cargo build --example beep - name: Run without features run: cargo test --all --no-default-features --verbose - name: Run all features run: cargo test --all --all-features --verbose android-check: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true target: armv7-linux-androideabi - name: Check android run: cargo check --example android --target armv7-linux-androideabi --features oboe/fetch-prebuilt --verbose - name: Check beep run: cargo check --example beep --target armv7-linux-androideabi --features oboe/fetch-prebuilt --verbose - name: Check enumerate run: cargo check --example enumerate --target armv7-linux-androideabi --features oboe/fetch-prebuilt --verbose - name: Check feedback run: cargo check --example feedback --target armv7-linux-androideabi --features oboe/fetch-prebuilt --verbose - name: Check record_wav run: cargo check --example record_wav --target armv7-linux-androideabi --features oboe/fetch-prebuilt --verbose android-apk-build: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Android targets run: | rustup target add armv7-linux-androideabi rustup target add aarch64-linux-android rustup target add i686-linux-android rustup target add x86_64-linux-android - name: Install Cargo APK run: cargo install cargo-apk - name: Build APK run: cargo apk build --example android ios-build: runs-on: macOS-latest steps: - uses: actions/checkout@v2 - name: Install llvm and clang run: brew install llvm - name: Install stable uses: actions-rs/toolchain@v1 with: profile: minimal toolchain: stable override: true - name: Add iOS targets run: rustup target add aarch64-apple-ios x86_64-apple-ios - name: Install cargo lipo run: cargo install cargo-lipo - name: Build iphonesimulator feedback example run: cd examples/ios-feedback && xcodebuild -scheme cpal-ios-example -configuration Debug -derivedDataPath build -sdk iphonesimulator wasm-beep-build: # this only confirms that the Rust source builds # and checks to prevent regressions like #721. # # It does not test the javascript/web integration runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Install Target run: rustup target add wasm32-unknown-unknown - name: Cargo Build working-directory: ./examples/wasm-beep run: cargo build --target wasm32-unknown-unknown cpal-0.15.2/.gitignore000064400000000000000000000001021046102023000126110ustar 00000000000000/target /Cargo.lock .cargo/ .DS_Store recorded.wav rls*.log cpal-0.15.2/CHANGELOG.md000064400000000000000000000156431046102023000124520ustar 00000000000000# Version 0.15.2 (2023-03-30) - webaudio: support multichannel output streams - Update `windows` dependency - wasapi: fix some thread panics # Version 0.15.1 (2023-03-14) - Add feature `oboe-shared-stdcxx` to enable `shared-stdcxx` on `oboe` for Android support - Remove `thiserror` dependency - Swith `mach` dependency to `mach2` # Version 0.15.0 (2023-01-29) - Update `windows-rs`, `jack`, `coreaudio-sys`, `oboe`, `alsa` dependencies - Switch to the `dasp_sample` crate for the sample trait - Switch to `web-sys` on the emscripten target - Adopt edition 2021 - Add disconnection detection on Mac OS # Version 0.14.1 (2022-10-23) - Support the 0.6.1 release of `alsa-rs` - Fix `asio` feature broken in 0.14.0 - `NetBSD` support - CI improvements # Version 0.14.0 (2022-08-22) - Switch to `windows-rs` crate - Turn `ndk-glue` into a dev-dependency and use `ndk-context` instead - Update dependencies (ndk, ndk-glue, parking_lot, once_cell, jack) # Version 0.13.5 (2022-01-28) - Faster sample format conversion - Update dependencies (ndk, oboe, ndk-glue, jack, alsa, nix) # Version 0.13.4 (2021-08-08) - wasapi: Allow both threading models and switch the default to STA - Update dependencies (core-foundation-sys, jni, rust-jack) - Alsa: improve stream setup parameters # Version 0.13.3 (2021-03-29) - Give each thread a unique name - Fix distortion regression on some alsa configs # Version 0.13.2 (2021-03-16) - Update dependencies (ndk, nix, oboe, jni, etc) # Version 0.13.1 (2020-11-08) - Don't panic when device is plugged out on Windows - Update `parking_lot` dependency # Version 0.13.0 (2020-10-28) - Add Android support via `oboe-rs`. - Add Android APK build an CI job. # Version 0.12.1 (2020-07-23) - Bugfix release to get the asio feature working again. # Version 0.12.0 (2020-07-09) - Large refactor removing the blocking EventLoop API. - Rename many `Format` types to `StreamConfig`: - `Format` type's `data_type` field renamed to `sample_format`. - `Shape` -> `StreamConfig` - The configuration input required to build a stream. - `Format` -> `SupportedStreamConfig` - Describes a single supported stream configuration. - `SupportedFormat` -> `SupportedStreamConfigRange` - Describes a range of supported configurations. - `Device::default_input/output_format` -> `Device::default_input/output_config`. - `Device::supported_input/output_formats` -> `Device::supported_input/output_configs`. - `Device::SupportedInput/OutputFormats` -> `Device::SupportedInput/OutputConfigs`. - `SupportedFormatsError` -> `SupportedStreamConfigsError` - `DefaultFormatError` -> `DefaultStreamConfigError` - `BuildStreamError::FormatNotSupported` -> `BuildStreamError::StreamConfigNotSupported` - Address deprecated use of `mem::uninitialized` in WASAPI. - Removed `UnknownTypeBuffer` in favour of specifying sample type. - Added `build_input/output_stream_raw` methods allowing for dynamically handling sample format type. - Added support for DragonFly platform. - Add `InputCallbackInfo` and `OutputCallbackInfo` types and update expected user data callback function signature to provide these. # Version 0.11.0 (2019-12-11) - Fix some underruns that could occur in ALSA. - Add name to `HostId`. - Use `snd_pcm_hw_params_set_buffer_time_near` rather than `set_buffer_time_max` in ALSA backend. - Remove many uses of `std::mem::uninitialized`. - Fix WASAPI capture logic. - Panic on stream ID overflow rather than returning an error. - Use `ringbuffer` crate in feedback example. - Move errors into a separate module. - Switch from `failure` to `thiserror` for error handling. - Add `winbase` winapi feature to solve windows compile error issues. - Lots of CI improvements. # Version 0.10.0 (2019-07-05) - core-foundation-sys and coreaudio-rs version bumps. - Add an ASIO host, available under Windows. - Introduce a new Host API, adding support for alternative audio APIs. - Remove sleep loop on macOS in favour of using a `Condvar`. - Allow users to handle stream callback errors with a new `StreamEvent` type. - Overhaul error handling throughout the crate. - Remove unnecessary Mutex from ALSA and WASAPI backends in favour of channels. - Remove `panic!` from OutputBuffer Deref impl as it is no longer necessary. # Version 0.9.0 (2019-06-06) - Better buffer handling - Fix logic error in frame/sample size - Added error handling for unknown ALSA device errors - Fix resuming a paused stream on Windows (wasapi). - Implement `default_output_format` for emscripten backend. # Version 0.8.1 (2018-03-18) - Fix the handling of non-default sample rates for coreaudio input streams. # Version 0.8.0 (2018-02-15) - Add `record_wav.rs` example. Records 3 seconds to `$CARGO_MANIFEST_DIR/recorded.wav` using default input device. - Update `enumerate.rs` example to display default input/output devices and formats. - Add input stream support to coreaudio, alsa and windows backends. - Introduce `StreamData` type for handling either input or output streams in `EventLoop::run` callback. - Add `Device::supported_{input/output}_formats` methods. - Add `Device::default_{input/output}_format` methods. - Add `default_{input/output}_device` functions. - Replace usage of `Voice` with `Stream` throughout the crate. - Remove `Endpoint` in favour of `Device` for supporting both input and output streams. # Version 0.7.0 (2018-02-04) - Rename `ChannelsCount` to `ChannelCount`. - Rename `SamplesRate` to `SampleRate`. - Rename the `min_samples_rate` field of `SupportedFormat` to `min_sample_rate` - Rename the `with_max_samples_rate()` method of`SupportedFormat` to `with_max_sample_rate()` - Rename the `samples_rate` field of `Format` to `sample_rate` - Changed the type of the `channels` field of the `SupportedFormat` struct from `Vec` to `ChannelCount` (an alias to `u16`) - Remove unused ChannelPosition API. - Implement `Endpoint` and `Format` Enumeration for macOS. - Implement format handling for macos `build_voice` method. # Version 0.6.0 (2017-12-11) - Changed the emscripten backend to consume less CPU. - Added improvements to the crate documentation. - Implement `pause` and `play` for ALSA backend. - Reduced the number of allocations in the CoreAudio backend. - Fixes for macOS build (#186, #189). # Version 0.5.1 (2017-10-21) - Added `Sample::to_i16()`, `Sample::to_u16()` and `Sample::from`. # Version 0.5.0 (2017-10-21) - Removed the dependency on the `futures` library. - Removed the `Voice` and `SamplesStream` types. - Added `EventLoop::build_voice`, `EventLoop::destroy_voice`, `EventLoop::play`, and `EventLoop::pause` that can be used to create, destroy, play and pause voices. - Added a `VoiceId` struct that is now used to identify a voice owned by an `EventLoop`. - Changed `EventLoop::run()` to take a callback that is called whenever a voice requires sound data. - Changed `supported_formats()` to produce a list of `SupportedFormat` instead of `Format`. A `SupportedFormat` must then be turned into a `Format` in order to build a voice. cpal-0.15.2/Cargo.lock0000644000001026600000000000100100200ustar # This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "0.7.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc936419f96fa211c1b9166887b38e5e40b19958e5b895be7c1f93adec7071ac" dependencies = [ "memchr", ] [[package]] name = "alsa" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8512c9117059663fb5606788fbca3619e2a91dac0e3fe516242eab1fa6be5e44" dependencies = [ "alsa-sys", "bitflags", "libc", "nix", ] [[package]] name = "alsa-sys" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "db8fee663d06c4e303404ef5f40488a53e062f89ba8bfed81f42325aafad1527" dependencies = [ "libc", "pkg-config", ] [[package]] name = "ansi_term" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2" dependencies = [ "winapi", ] [[package]] name = "anstream" version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "342258dd14006105c2b75ab1bd7543a03bdf0cfc94383303ac212a04939dff6f" dependencies = [ "anstyle", "anstyle-parse", "anstyle-wincon", "concolor-override", "concolor-query", "is-terminal", "utf8parse", ] [[package]] name = "anstyle" version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23ea9e81bd02e310c216d080f6223c179012256e5151c41db88d12c88a1684d2" [[package]] name = "anstyle-parse" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7d1bb534e9efed14f3e5f44e7dd1a4f709384023a4165199a4241e18dff0116" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-wincon" version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3127af6145b149f3287bb9a0d10ad9c5692dba8c53ad48285e5bec4063834fa" dependencies = [ "anstyle", "windows-sys", ] [[package]] name = "anyhow" version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7de8ce5e0f9f8d88245311066a578d72b7af3e7088f32783804676302df237e4" [[package]] name = "asio-sys" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ca0cd2f17aa86bcd7eb6b6a423943352843695eb6adee9bd24a09a9e7568b9a" dependencies = [ "bindgen 0.56.0", "cc", "num-derive", "num-traits", "once_cell", "walkdir", ] [[package]] name = "atty" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" dependencies = [ "hermit-abi 0.1.19", "libc", "winapi", ] [[package]] name = "autocfg" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "bindgen" version = "0.56.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2da379dbebc0b76ef63ca68d8fc6e71c0f13e59432e0987e508c1820e6ab5239" dependencies = [ "bitflags", "cexpr 0.4.0", "clang-sys", "clap 2.34.0", "env_logger", "lazy_static", "lazycell", "log", "peeking_take_while", "proc-macro2", "quote", "regex", "rustc-hash", "shlex 0.1.1", "which", ] [[package]] name = "bindgen" version = "0.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4243e6031260db77ede97ad86c27e501d646a27ab57b59a574f725d98ab1fb4" dependencies = [ "bitflags", "cexpr 0.6.0", "clang-sys", "lazy_static", "lazycell", "peeking_take_while", "proc-macro2", "quote", "regex", "rustc-hash", "shlex 1.1.0", "syn 1.0.109", ] [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bumpalo" version = "3.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535" [[package]] name = "bytes" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "cc" version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d30906286121d95be3d479533b458f87493b30a4b5f79a607db8f5d11aa91f" dependencies = [ "jobserver", ] [[package]] name = "cesu8" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" [[package]] name = "cexpr" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27" dependencies = [ "nom 5.1.2", ] [[package]] name = "cexpr" version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" dependencies = [ "nom 7.1.3", ] [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "clang-sys" version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f" dependencies = [ "glob", "libc", "libloading", ] [[package]] name = "clap" version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ "ansi_term", "atty", "bitflags", "strsim 0.8.0", "textwrap", "unicode-width", "vec_map", ] [[package]] name = "clap" version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "046ae530c528f252094e4a77886ee1374437744b2bff1497aa898bbddbbb29b3" dependencies = [ "clap_builder", "clap_derive", "once_cell", ] [[package]] name = "clap_builder" version = "4.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "223163f58c9a40c3b0a43e1c4b50a9ce09f007ea2cb1ec258a687945b4b7929f" dependencies = [ "anstream", "anstyle", "bitflags", "clap_lex", "strsim 0.10.0", ] [[package]] name = "clap_derive" version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9644cd56d6b87dbe899ef8b053e331c0637664e9e21a33dfcdc36093f5c5c4" dependencies = [ "heck", "proc-macro2", "quote", "syn 2.0.11", ] [[package]] name = "clap_lex" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a2dd5a6fe8c6e3502f568a6353e5273bbb15193ad9a89e457b9970798efbea1" [[package]] name = "combine" version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ "bytes", "memchr", ] [[package]] name = "concolor-override" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a855d4a1978dc52fb0536a04d384c2c0c1aa273597f08b77c8c4d3b2eec6037f" [[package]] name = "concolor-query" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "88d11d52c3d7ca2e6d0040212be9e4dbbcd78b6447f535b6b561f449427944cf" dependencies = [ "windows-sys", ] [[package]] name = "core-foundation-sys" version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e7ca8a5221364ef15ce201e8ed2f609fc312682a8f4e0e3d4aa5879764e0fa3b" [[package]] name = "core-foundation-sys" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "coreaudio-rs" version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb17e2d1795b1996419648915df94bc7103c28f7b48062d7acf4652fc371b2ff" dependencies = [ "bitflags", "core-foundation-sys 0.6.2", "coreaudio-sys", ] [[package]] name = "coreaudio-sys" version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f034b2258e6c4ade2f73bf87b21047567fb913ee9550837c2316d139b0262b24" dependencies = [ "bindgen 0.64.0", ] [[package]] name = "cpal" version = "0.15.2" dependencies = [ "alsa", "anyhow", "asio-sys", "clap 4.2.1", "core-foundation-sys 0.8.3", "coreaudio-rs", "dasp_sample", "hound", "jack", "jni 0.19.0", "js-sys", "libc", "mach2", "ndk", "ndk-context", "ndk-glue", "num-traits", "oboe", "once_cell", "parking_lot", "ringbuf", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", "windows", ] [[package]] name = "crossbeam-utils" version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ "cfg-if", ] [[package]] name = "darling" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a01d95850c592940db9b8194bc39f4bc0e89dee5c4265e4b1807c34a9aba453c" dependencies = [ "darling_core", "darling_macro", ] [[package]] name = "darling_core" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.10.0", "syn 1.0.109", ] [[package]] name = "darling_macro" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core", "quote", "syn 1.0.109", ] [[package]] name = "dasp_sample" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c87e182de0887fd5361989c677c4e8f5000cd9491d6d563161a8f3a5519fc7f" [[package]] name = "env_logger" version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a19187fea3ac7e84da7dacf48de0c45d63c6a76f9490dae389aead16c243fce3" dependencies = [ "atty", "humantime", "log", "regex", "termcolor", ] [[package]] name = "errno" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50d6a0976c999d473fe89ad888d5a284e55366d9dc9038b1ba2aa15128c4afa0" dependencies = [ "errno-dragonfly", "libc", "windows-sys", ] [[package]] name = "errno-dragonfly" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" dependencies = [ "cc", "libc", ] [[package]] name = "fnv" version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" dependencies = [ "libc", ] [[package]] name = "hermit-abi" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286" [[package]] name = "hound" version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d13cdbd5dbb29f9c88095bbdc2590c9cba0d0a1269b983fef6b2cdd7e9f4db1" [[package]] name = "humantime" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "ident_case" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "indexmap" version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown", ] [[package]] name = "io-lifetimes" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09270fd4fa1111bc614ed2246c7ef56239a3063d5be0d1ec3b589c505d400aeb" dependencies = [ "hermit-abi 0.3.1", "libc", "windows-sys", ] [[package]] name = "is-terminal" version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "256017f749ab3117e93acb91063009e1f1bb56d03965b14c2c8df4eb02c524d8" dependencies = [ "hermit-abi 0.3.1", "io-lifetimes", "rustix", "windows-sys", ] [[package]] name = "jack" version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e5a18a3c2aefb354fb77111ade228b20267bdc779de84e7a4ccf7ea96b9a6cd" dependencies = [ "bitflags", "jack-sys", "lazy_static", "libc", "log", ] [[package]] name = "jack-sys" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6013b7619b95a22b576dfb43296faa4ecbe40abbdb97dfd22ead520775fc86ab" dependencies = [ "bitflags", "lazy_static", "libc", "libloading", "log", "pkg-config", ] [[package]] name = "jni" version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ "cesu8", "combine", "jni-sys", "log", "thiserror", "walkdir", ] [[package]] name = "jni" version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "039022cdf4d7b1cf548d31f60ae783138e5fd42013f6271049d7df7afadef96c" dependencies = [ "cesu8", "combine", "jni-sys", "log", "thiserror", "walkdir", ] [[package]] name = "jni-sys" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" dependencies = [ "libc", ] [[package]] name = "js-sys" version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "445dde2150c55e483f3d8416706b97ec8e8237c307e5b7b4b8dd15e6af2a0730" dependencies = [ "wasm-bindgen", ] [[package]] name = "lazy_static" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "lazycell" version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" [[package]] name = "libc" version = "0.2.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99227334921fae1a979cf0bfdfcc6b3e5ce376ef57e16fb6fb3ea2ed6095f80c" [[package]] name = "libloading" version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ "cfg-if", "winapi", ] [[package]] name = "linux-raw-sys" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd550e73688e6d578f0ac2119e32b797a327631a42f9433e59d02e139c8df60d" [[package]] name = "lock_api" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg", "scopeguard", ] [[package]] name = "log" version = "0.4.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" dependencies = [ "cfg-if", ] [[package]] name = "mach2" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d0d1830bcd151a6fc4aea1369af235b36c1528fe976b8ff678683c9995eade8" dependencies = [ "libc", ] [[package]] name = "memchr" version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "minimal-lexical" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "ndk" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "451422b7e4718271c8b5b3aadf5adedba43dc76312454b387e98fae0fc951aa0" dependencies = [ "bitflags", "jni-sys", "ndk-sys", "num_enum", "raw-window-handle", "thiserror", ] [[package]] name = "ndk-context" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27b02d87554356db9e9a873add8782d4ea6e3e58ea071a9adb9a2e8ddb884a8b" [[package]] name = "ndk-glue" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0434fabdd2c15e0aab768ca31d5b7b333717f03cf02037d5a0a3ff3c278ed67f" dependencies = [ "libc", "log", "ndk", "ndk-context", "ndk-macro", "ndk-sys", "once_cell", "parking_lot", ] [[package]] name = "ndk-macro" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0df7ac00c4672f9d5aece54ee3347520b7e20f158656c7db2e6de01902eb7a6c" dependencies = [ "darling", "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "ndk-sys" version = "0.4.1+23.1.7779620" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cf2aae958bd232cac5069850591667ad422d263686d75b52a065f9badeee5a3" dependencies = [ "jni-sys", ] [[package]] name = "nix" version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" dependencies = [ "bitflags", "cfg-if", "libc", ] [[package]] name = "nom" version = "5.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" dependencies = [ "memchr", "version_check", ] [[package]] name = "nom" version = "7.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a" dependencies = [ "memchr", "minimal-lexical", ] [[package]] name = "num-derive" version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "876a53fff98e03a936a674b29568b0e605f06b29372c2489ff4de23f1949743d" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "num-traits" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "578ede34cf02f8924ab9447f50c28075b4d3e5b269972345e7e0372b38c6cdcd" dependencies = [ "autocfg", ] [[package]] name = "num_enum" version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f646caf906c20226733ed5b1374287eb97e3c2a5c227ce668c1f2ce20ae57c9" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", "syn 1.0.109", ] [[package]] name = "oboe" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8868cc237ee02e2d9618539a23a8d228b9bb3fc2e7a5b11eed3831de77c395d0" dependencies = [ "jni 0.20.0", "ndk", "ndk-context", "num-derive", "num-traits", "oboe-sys", ] [[package]] name = "oboe-sys" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f44155e7fb718d3cfddcf70690b2b51ac4412f347cd9e4fbe511abe9cd7b5f2" dependencies = [ "cc", ] [[package]] name = "once_cell" version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "parking_lot" version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", "parking_lot_core", ] [[package]] name = "parking_lot_core" version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ "cfg-if", "libc", "redox_syscall", "smallvec", "windows-sys", ] [[package]] name = "peeking_take_while" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099" [[package]] name = "pkg-config" version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ac9a59f73473f1b8d852421e59e64809f025994837ef743615c6d0c5b305160" [[package]] name = "proc-macro-crate" version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ "once_cell", "toml_edit", ] [[package]] name = "proc-macro2" version = "1.0.54" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e472a104799c74b514a57226160104aa483546de37e839ec50e3c2e41dd87534" dependencies = [ "unicode-ident", ] [[package]] name = "quote" version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] [[package]] name = "raw-window-handle" version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f851a03551ceefd30132e447f07f96cb7011d6b658374f3aed847333adb5559" [[package]] name = "redox_syscall" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] [[package]] name = "regex" version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.6.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "ringbuf" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93ca10b9c9e53ac855a2d6953bce34cef6edbac32c4b13047a4d59d67299420a" dependencies = [ "crossbeam-utils", ] [[package]] name = "rustc-hash" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustix" version = "0.37.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e78cc525325c06b4a7ff02db283472f3c042b7ff0c391f96c6d5ac6f4f91b75" dependencies = [ "bitflags", "errno", "io-lifetimes", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "same-file" version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" dependencies = [ "winapi-util", ] [[package]] name = "scopeguard" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "shlex" version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fdf1b9db47230893d76faad238fd6097fd6d6a9245cd7a4d90dbd639536bbd2" [[package]] name = "shlex" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3" [[package]] name = "smallvec" version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "strsim" version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" [[package]] name = "strsim" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "syn" version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "syn" version = "2.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21e3787bb71465627110e7d87ed4faaa36c1f61042ee67badb9e2ef173accc40" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "termcolor" version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" dependencies = [ "winapi-util", ] [[package]] name = "textwrap" version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" dependencies = [ "unicode-width", ] [[package]] name = "thiserror" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", "syn 2.0.11", ] [[package]] name = "toml_datetime" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3ab8ed2edee10b50132aed5f331333428b011c99402b5a534154ed15746f9622" [[package]] name = "toml_edit" version = "0.19.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "239410c8609e8125456927e6707163a3b1fdb40561e4b803bc041f466ccfdc13" dependencies = [ "indexmap", "toml_datetime", "winnow", ] [[package]] name = "unicode-ident" version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5464a87b239f13a63a501f2701565754bae92d243d4bb7eb12f6d57d2269bf4" [[package]] name = "unicode-width" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "utf8parse" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "vec_map" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" [[package]] name = "walkdir" version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", "winapi-util", ] [[package]] name = "wasm-bindgen" version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "31f8dcbc21f30d9b8f2ea926ecb58f6b91192c17e9d33594b3df58b2007ca53b" dependencies = [ "cfg-if", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95ce90fd5bcc06af55a641a86428ee4229e44e07033963a2290a8e241607ccb9" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", "syn 1.0.109", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f219e0d211ba40266969f6dbdd90636da12f75bee4fc9d6c23d1260dadb51454" dependencies = [ "cfg-if", "js-sys", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c21f77c0bedc37fd5dc21f897894a5ca01e7bb159884559461862ae90c0b4c5" dependencies = [ "quote", "wasm-bindgen-macro-support", ] [[package]] name = "wasm-bindgen-macro-support" version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" dependencies = [ "proc-macro2", "quote", "syn 1.0.109", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" version = "0.2.84" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0046fef7e28c3804e5e38bfa31ea2a0f73905319b677e57ebe37e49358989b5d" [[package]] name = "web-sys" version = "0.3.61" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e33b99f4b23ba3eec1a53ac264e35a755f00e966e0065077d6027c0f575b0b97" dependencies = [ "js-sys", "wasm-bindgen", ] [[package]] name = "which" version = "3.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d011071ae14a2f6671d0b74080ae0cd8ebf3a6f8c9589a2cd45f23126fe29724" dependencies = [ "libc", ] [[package]] name = "winapi" version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" dependencies = [ "winapi-i686-pc-windows-gnu", "winapi-x86_64-pc-windows-gnu", ] [[package]] name = "winapi-i686-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi", ] [[package]] name = "winapi-x86_64-pc-windows-gnu" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows" version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cdacb41e6a96a052c6cb63a144f24900236121c6f63f4f8219fef5977ecb0c25" dependencies = [ "windows-targets", ] [[package]] name = "windows-sys" version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" [[package]] name = "windows_aarch64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" [[package]] name = "windows_i686_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" [[package]] name = "windows_i686_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" [[package]] name = "windows_x86_64_gnu" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" [[package]] name = "windows_x86_64_msvc" version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" [[package]] name = "winnow" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae8970b36c66498d8ff1d66685dc86b91b29db0c7739899012f63a63814b4b28" dependencies = [ "memchr", ] cpal-0.15.2/Cargo.toml0000644000000116350000000000100100440ustar # THIS FILE IS AUTOMATICALLY GENERATED BY CARGO # # When uploading crates to the registry Cargo will automatically # "normalize" Cargo.toml files for maximal compatibility # with all versions of Cargo and also rewrite `path` dependencies # to registry (e.g., crates.io) dependencies. # # If you are reading this file be aware that the original Cargo.toml # will likely look very different (and much more reasonable). # See Cargo.toml.orig for the original contents. [package] edition = "2021" name = "cpal" version = "0.15.2" description = "Low-level cross-platform audio I/O library in pure Rust." documentation = "https://docs.rs/cpal" readme = "README.md" keywords = [ "audio", "sound", ] license = "Apache-2.0" repository = "https://github.com/rustaudio/cpal" [[example]] name = "android" crate-type = ["cdylib"] path = "examples/android.rs" [[example]] name = "beep" [[example]] name = "enumerate" [[example]] name = "feedback" [[example]] name = "record_wav" [[example]] name = "synth_tones" [dependencies.dasp_sample] version = "0.11" [dev-dependencies.anyhow] version = "1.0" [dev-dependencies.clap] version = "4.0" features = ["derive"] [dev-dependencies.hound] version = "3.5" [dev-dependencies.ringbuf] version = "0.3" [features] asio = [ "asio-sys", "num-traits", ] oboe-shared-stdcxx = ["oboe/shared-stdcxx"] [target."cfg(all(target_arch = \"wasm32\", target_os = \"unknown\"))".dependencies.js-sys] version = "0.3.35" [target."cfg(all(target_arch = \"wasm32\", target_os = \"unknown\"))".dependencies.wasm-bindgen] version = "0.2.58" optional = true [target."cfg(all(target_arch = \"wasm32\", target_os = \"unknown\"))".dependencies.web-sys] version = "0.3.35" features = [ "AudioContext", "AudioContextOptions", "AudioBuffer", "AudioBufferSourceNode", "AudioNode", "AudioDestinationNode", "Window", "AudioContextState", ] [target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"netbsd\"))".dependencies.alsa] version = "0.7" [target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"netbsd\"))".dependencies.jack] version = "0.11" optional = true [target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"netbsd\"))".dependencies.libc] version = "0.2" [target."cfg(any(target_os = \"linux\", target_os = \"dragonfly\", target_os = \"freebsd\", target_os = \"netbsd\"))".dependencies.parking_lot] version = "0.12" [target."cfg(any(target_os = \"macos\", target_os = \"ios\"))".dependencies.core-foundation-sys] version = "0.8.2" [target."cfg(any(target_os = \"macos\", target_os = \"ios\"))".dependencies.mach2] version = "0.4" [target."cfg(any(target_os = \"macos\", target_os = \"ios\"))".dependencies.parking_lot] version = "0.12" [target."cfg(target_os = \"android\")".dependencies.jni] version = "0.19" [target."cfg(target_os = \"android\")".dependencies.ndk] version = "0.7" [target."cfg(target_os = \"android\")".dependencies.ndk-context] version = "0.1" [target."cfg(target_os = \"android\")".dependencies.oboe] version = "0.5" features = ["java-interface"] [target."cfg(target_os = \"android\")".dev-dependencies.ndk-glue] version = "0.7" [target."cfg(target_os = \"emscripten\")".dependencies.js-sys] version = "0.3.35" [target."cfg(target_os = \"emscripten\")".dependencies.wasm-bindgen] version = "0.2.58" [target."cfg(target_os = \"emscripten\")".dependencies.wasm-bindgen-futures] version = "0.4.33" [target."cfg(target_os = \"emscripten\")".dependencies.web-sys] version = "0.3.35" features = [ "AudioContext", "AudioContextOptions", "AudioBuffer", "AudioBufferSourceNode", "AudioNode", "AudioDestinationNode", "Window", "AudioContextState", ] [target."cfg(target_os = \"ios\")".dependencies.coreaudio-rs] version = "0.11" features = [ "audio_unit", "core_audio", "audio_toolbox", ] default-features = false [target."cfg(target_os = \"macos\")".dependencies.coreaudio-rs] version = "0.11" features = [ "audio_unit", "core_audio", ] default-features = false [target."cfg(target_os = \"windows\")".dependencies.asio-sys] version = "0.2" optional = true [target."cfg(target_os = \"windows\")".dependencies.num-traits] version = "0.2.6" optional = true [target."cfg(target_os = \"windows\")".dependencies.once_cell] version = "1.12" [target."cfg(target_os = \"windows\")".dependencies.parking_lot] version = "0.12" [target."cfg(target_os = \"windows\")".dependencies.windows] version = "0.46.0" features = [ "Win32_Media_Audio", "Win32_Foundation", "Win32_System_Com", "Win32_Devices_Properties", "Win32_Media_KernelStreaming", "Win32_System_Com_StructuredStorage", "Win32_System_Ole", "Win32_System_Threading", "Win32_Security", "Win32_System_SystemServices", "Win32_System_WindowsProgramming", "Win32_Media_Multimedia", "Win32_UI_Shell_PropertiesSystem", ] cpal-0.15.2/Cargo.toml.orig000064400000000000000000000062251046102023000135240ustar 00000000000000[package] name = "cpal" version = "0.15.2" description = "Low-level cross-platform audio I/O library in pure Rust." repository = "https://github.com/rustaudio/cpal" documentation = "https://docs.rs/cpal" license = "Apache-2.0" keywords = ["audio", "sound"] edition = "2021" [features] asio = ["asio-sys", "num-traits"] # Only available on Windows. See README for setup instructions. oboe-shared-stdcxx = ["oboe/shared-stdcxx"] # Only available on Android. See README for what it does. [dependencies] dasp_sample = "0.11" [dev-dependencies] anyhow = "1.0" hound = "3.5" ringbuf = "0.3" clap = { version = "4.0", features = ["derive"] } [target.'cfg(target_os = "android")'.dev-dependencies] ndk-glue = "0.7" [target.'cfg(target_os = "windows")'.dependencies] windows = { version = "0.46.0", features = ["Win32_Media_Audio", "Win32_Foundation", "Win32_System_Com", "Win32_Devices_Properties", "Win32_Media_KernelStreaming", "Win32_System_Com_StructuredStorage", "Win32_System_Ole", "Win32_System_Threading", "Win32_Security", "Win32_System_SystemServices", "Win32_System_WindowsProgramming", "Win32_Media_Multimedia", "Win32_UI_Shell_PropertiesSystem"]} asio-sys = { version = "0.2", path = "asio-sys", optional = true } num-traits = { version = "0.2.6", optional = true } parking_lot = "0.12" once_cell = "1.12" [target.'cfg(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd"))'.dependencies] alsa = "0.7" libc = "0.2" parking_lot = "0.12" jack = { version = "0.11", optional = true } [target.'cfg(any(target_os = "macos", target_os = "ios"))'.dependencies] core-foundation-sys = "0.8.2" # For linking to CoreFoundation.framework and handling device name `CFString`s. mach2 = "0.4" # For access to mach_timebase type. parking_lot = "0.12" [target.'cfg(target_os = "macos")'.dependencies] coreaudio-rs = { version = "0.11", default-features = false, features = ["audio_unit", "core_audio"] } [target.'cfg(target_os = "ios")'.dependencies] coreaudio-rs = { version = "0.11", default-features = false, features = ["audio_unit", "core_audio", "audio_toolbox"] } [target.'cfg(target_os = "emscripten")'.dependencies] wasm-bindgen = { version = "0.2.58" } wasm-bindgen-futures = "0.4.33" js-sys = { version = "0.3.35" } web-sys = { version = "0.3.35", features = [ "AudioContext", "AudioContextOptions", "AudioBuffer", "AudioBufferSourceNode", "AudioNode", "AudioDestinationNode", "Window", "AudioContextState"] } [target.'cfg(all(target_arch = "wasm32", target_os = "unknown"))'.dependencies] wasm-bindgen = { version = "0.2.58", optional = true } js-sys = { version = "0.3.35" } web-sys = { version = "0.3.35", features = [ "AudioContext", "AudioContextOptions", "AudioBuffer", "AudioBufferSourceNode", "AudioNode", "AudioDestinationNode", "Window", "AudioContextState"] } [target.'cfg(target_os = "android")'.dependencies] oboe = { version = "0.5", features = [ "java-interface" ] } ndk = "0.7" ndk-context = "0.1" jni = "0.19" [[example]] name = "android" path = "examples/android.rs" crate-type = ["cdylib"] [[example]] name = "beep" [[example]] name = "enumerate" [[example]] name = "feedback" [[example]] name = "record_wav" [[example]] name = "synth_tones" cpal-0.15.2/Cross.toml000064400000000000000000000002231046102023000126130ustar 00000000000000[target.armv7-unknown-linux-gnueabihf] image = "cross/cpal_armv7:v1" [target.armv7-unknown-linux-gnueabihf.env] passthrough = [ "RUSTFLAGS", ]cpal-0.15.2/Dockerfile000064400000000000000000000005111046102023000126170ustar 00000000000000FROM rustembedded/cross:armv7-unknown-linux-gnueabihf ENV PKG_CONFIG_ALLOW_CROSS 1 ENV PKG_CONFIG_PATH /usr/lib/arm-linux-gnueabihf/pkgconfig/ RUN dpkg --add-architecture armhf && \ apt-get update && \ apt-get install libasound2-dev:armhf -y && \ apt-get install libjack-jackd2-dev:armhf libjack-jackd2-0:armhf -y \cpal-0.15.2/LICENSE000064400000000000000000000261351046102023000116440ustar 00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. cpal-0.15.2/README.md000064400000000000000000000117201046102023000121100ustar 00000000000000# CPAL - Cross-Platform Audio Library [![Actions Status](https://github.com/RustAudio/cpal/workflows/cpal/badge.svg)](https://github.com/RustAudio/cpal/actions) [![Crates.io](https://img.shields.io/crates/v/cpal.svg)](https://crates.io/crates/cpal) [![docs.rs](https://docs.rs/cpal/badge.svg)](https://docs.rs/cpal/) Low-level library for audio input and output in pure Rust. This library currently supports the following: - Enumerate supported audio hosts. - Enumerate all available audio devices. - Get the current default input and output devices. - Enumerate known supported input and output stream formats for a device. - Get the current default input and output stream formats for a device. - Build and run input and output PCM streams on a chosen device with a given stream format. Currently, supported hosts include: - Linux (via ALSA or JACK) - Windows (via WASAPI by default, see ASIO instructions below) - macOS (via CoreAudio) - iOS (via CoreAudio) - Android (via Oboe) - Emscripten Note that on Linux, the ALSA development files are required. These are provided as part of the `libasound2-dev` package on Debian and Ubuntu distributions and `alsa-lib-devel` on Fedora. ## Compiling for Web Assembly If you are interested in using CPAL with WASM, please see [this guide](https://github.com/RustAudio/cpal/wiki/Setting-up-a-new-CPAL-WASM-project) in our Wiki which walks through setting up a new project from scratch. ## Feature flags for audio backends Some audio backends are optional and will only be compiled with a [feature flag](https://doc.rust-lang.org/cargo/reference/features.html). - JACK (on Linux): `jack` - ASIO (on Windows): `asio` Oboe can either use a shared or static runtime. The static runtime is used by default, but activating the `oboe-shared-stdcxx` feature makes it use the shared runtime, which requires `libc++_shared.so` from the Android NDK to be present during execution. ## ASIO on Windows [ASIO](https://en.wikipedia.org/wiki/Audio_Stream_Input/Output) is an audio driver protocol by Steinberg. While it is available on multiple operating systems, it is most commonly used on Windows to work around limitations of WASAPI including access to large numbers of channels and lower-latency audio processing. CPAL allows for using the ASIO SDK as the audio host on Windows instead of WASAPI. To do so, follow these steps: 1. **Download the ASIO SDK** `.zip` from [this link](https://www.steinberg.net/en/company/developers.html). The version as of writing this is 2.3.1. 2. Extract the files and place the directory somewhere you are happy for it to stay (e.g. `~/.asio`). 3. Assign the full path of the directory (that contains the `readme`, `changes`, `ASIO SDK 2.3` pdf, etc) to the `CPAL_ASIO_DIR` environment variable. This is necessary for the `asio-sys` build script to build and bind to the SDK. 4. `bindgen`, the library used to generate bindings to the C++ SDK, requires clang. **Download and install LLVM** from [here](http://releases.llvm.org/download.html) under the "Pre-Built Binaries" section. The version as of writing this is 7.0.0. 5. Add the LLVM `bin` directory to a `LIBCLANG_PATH` environment variable. If you installed LLVM to the default directory, this should work in the command prompt: ``` setx LIBCLANG_PATH "C:\Program Files\LLVM\bin" ``` 6. If you don't have any ASIO devices or drivers available, you can [**download and install ASIO4ALL**](http://www.asio4all.org/). Be sure to enable the "offline" feature during installation despite what the installer says about it being useless. 7. **Loading VCVARS**. `rust-bindgen` uses the C++ tool-chain when generating bindings to the ASIO SDK. As a result, it is necessary to load some environment variables in the command prompt that we used to build our project. On 64-bit machines run: ``` "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" amd64 ``` On 32-bit machines run: ``` "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x86 ``` Note that, depending on your version of Visual Studio, this script might be in a slightly different location. 8. Select the ASIO host at the start of our program with the following code: ```rust let host; #[cfg(target_os = "windows")] { host = cpal::host_from_id(cpal::HostId::Asio).expect("failed to initialise ASIO host"); } ``` If you run into compilations errors produced by `asio-sys` or `bindgen`, make sure that `CPAL_ASIO_DIR` is set correctly and try `cargo clean`. 9. Make sure to enable the `asio` feature when building CPAL: ``` cargo build --features "asio" ``` or if you are using CPAL as a dependency in a downstream project, enable the feature like this: ```toml cpal = { version = "*", features = ["asio"] } ``` In the future we would like to work on automating this process to make it easier, but we are not familiar enough with the ASIO license to do so yet. *Updated as of ASIO version 2.3.3.* cpal-0.15.2/build.rs000064400000000000000000000005401046102023000122740ustar 00000000000000use std::env; const CPAL_ASIO_DIR: &str = "CPAL_ASIO_DIR"; fn main() { println!("cargo:rerun-if-env-changed={}", CPAL_ASIO_DIR); // If ASIO directory isn't set silently return early // otherwise set the asio config flag match env::var(CPAL_ASIO_DIR) { Err(_) => {} Ok(_) => println!("cargo:rustc-cfg=asio"), }; } cpal-0.15.2/examples/android.rs000064400000000000000000000056061046102023000144430ustar 00000000000000#![allow(dead_code)] extern crate anyhow; extern crate cpal; use cpal::{ traits::{DeviceTrait, HostTrait, StreamTrait}, SizedSample, }; use cpal::{FromSample, Sample}; #[cfg_attr(target_os = "android", ndk_glue::main(backtrace = "full"))] fn main() { let host = cpal::default_host(); let device = host .default_output_device() .expect("failed to find output device"); let config = device.default_output_config().unwrap(); match config.sample_format() { cpal::SampleFormat::I8 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::I16 => run::(&device, &config.into()).unwrap(), // cpal::SampleFormat::I24 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::I32 => run::(&device, &config.into()).unwrap(), // cpal::SampleFormat::I48 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::I64 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::U8 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::U16 => run::(&device, &config.into()).unwrap(), // cpal::SampleFormat::U24 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::U32 => run::(&device, &config.into()).unwrap(), // cpal::SampleFormat::U48 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::U64 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::F32 => run::(&device, &config.into()).unwrap(), cpal::SampleFormat::F64 => run::(&device, &config.into()).unwrap(), sample_format => panic!("Unsupported sample format '{sample_format}'"), } } fn run(device: &cpal::Device, config: &cpal::StreamConfig) -> Result<(), anyhow::Error> where T: SizedSample + FromSample, { let sample_rate = config.sample_rate.0 as f32; let channels = config.channels as usize; // Produce a sinusoid of maximum amplitude. let mut sample_clock = 0f32; let mut next_value = move || { sample_clock = (sample_clock + 1.0) % sample_rate; (sample_clock * 440.0 * 2.0 * std::f32::consts::PI / sample_rate).sin() }; let err_fn = |err| eprintln!("an error occurred on stream: {}", err); let stream = device.build_output_stream( config, move |data: &mut [T], _: &cpal::OutputCallbackInfo| { write_data(data, channels, &mut next_value) }, err_fn, None, )?; stream.play()?; std::thread::sleep(std::time::Duration::from_millis(1000)); Ok(()) } fn write_data(output: &mut [T], channels: usize, next_sample: &mut dyn FnMut() -> f32) where T: Sample + FromSample, { for frame in output.chunks_mut(channels) { let value: T = T::from_sample(next_sample()); for sample in frame.iter_mut() { *sample = value; } } } cpal-0.15.2/examples/beep.rs000064400000000000000000000107171046102023000137350ustar 00000000000000use anyhow; use clap::Parser; use cpal::{ traits::{DeviceTrait, HostTrait, StreamTrait}, FromSample, Sample, SizedSample, }; #[derive(Parser, Debug)] #[command(version, about = "CPAL beep example", long_about = None)] struct Opt { /// The audio device to use #[arg(short, long, default_value_t = String::from("default"))] device: String, /// Use the JACK host #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] #[arg(short, long)] #[allow(dead_code)] jack: bool, } fn main() -> anyhow::Result<()> { let opt = Opt::parse(); // Conditionally compile with jack if the feature is specified. #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] // Manually check for flags. Can be passed through cargo with -- e.g. // cargo run --release --example beep --features jack -- --jack let host = if opt.jack { cpal::host_from_id(cpal::available_hosts() .into_iter() .find(|id| *id == cpal::HostId::Jack) .expect( "make sure --features jack is specified. only works on OSes where jack is available", )).expect("jack host unavailable") } else { cpal::default_host() }; #[cfg(any( not(any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" )), not(feature = "jack") ))] let host = cpal::default_host(); let device = if opt.device == "default" { host.default_output_device() } else { host.output_devices()? .find(|x| x.name().map(|y| y == opt.device).unwrap_or(false)) } .expect("failed to find output device"); println!("Output device: {}", device.name()?); let config = device.default_output_config().unwrap(); println!("Default output config: {:?}", config); match config.sample_format() { cpal::SampleFormat::I8 => run::(&device, &config.into()), cpal::SampleFormat::I16 => run::(&device, &config.into()), // cpal::SampleFormat::I24 => run::(&device, &config.into()), cpal::SampleFormat::I32 => run::(&device, &config.into()), // cpal::SampleFormat::I48 => run::(&device, &config.into()), cpal::SampleFormat::I64 => run::(&device, &config.into()), cpal::SampleFormat::U8 => run::(&device, &config.into()), cpal::SampleFormat::U16 => run::(&device, &config.into()), // cpal::SampleFormat::U24 => run::(&device, &config.into()), cpal::SampleFormat::U32 => run::(&device, &config.into()), // cpal::SampleFormat::U48 => run::(&device, &config.into()), cpal::SampleFormat::U64 => run::(&device, &config.into()), cpal::SampleFormat::F32 => run::(&device, &config.into()), cpal::SampleFormat::F64 => run::(&device, &config.into()), sample_format => panic!("Unsupported sample format '{sample_format}'"), } } pub fn run(device: &cpal::Device, config: &cpal::StreamConfig) -> Result<(), anyhow::Error> where T: SizedSample + FromSample, { let sample_rate = config.sample_rate.0 as f32; let channels = config.channels as usize; // Produce a sinusoid of maximum amplitude. let mut sample_clock = 0f32; let mut next_value = move || { sample_clock = (sample_clock + 1.0) % sample_rate; (sample_clock * 440.0 * 2.0 * std::f32::consts::PI / sample_rate).sin() }; let err_fn = |err| eprintln!("an error occurred on stream: {}", err); let stream = device.build_output_stream( config, move |data: &mut [T], _: &cpal::OutputCallbackInfo| { write_data(data, channels, &mut next_value) }, err_fn, None, )?; stream.play()?; std::thread::sleep(std::time::Duration::from_millis(1000)); Ok(()) } fn write_data(output: &mut [T], channels: usize, next_sample: &mut dyn FnMut() -> f32) where T: Sample + FromSample, { for frame in output.chunks_mut(channels) { let value: T = T::from_sample(next_sample()); for sample in frame.iter_mut() { *sample = value; } } } cpal-0.15.2/examples/enumerate.rs000064400000000000000000000053601046102023000150050ustar 00000000000000extern crate anyhow; extern crate cpal; use cpal::traits::{DeviceTrait, HostTrait}; fn main() -> Result<(), anyhow::Error> { println!("Supported hosts:\n {:?}", cpal::ALL_HOSTS); let available_hosts = cpal::available_hosts(); println!("Available hosts:\n {:?}", available_hosts); for host_id in available_hosts { println!("{}", host_id.name()); let host = cpal::host_from_id(host_id)?; let default_in = host.default_input_device().map(|e| e.name().unwrap()); let default_out = host.default_output_device().map(|e| e.name().unwrap()); println!(" Default Input Device:\n {:?}", default_in); println!(" Default Output Device:\n {:?}", default_out); let devices = host.devices()?; println!(" Devices: "); for (device_index, device) in devices.enumerate() { println!(" {}. \"{}\"", device_index + 1, device.name()?); // Input configs if let Ok(conf) = device.default_input_config() { println!(" Default input stream config:\n {:?}", conf); } let input_configs = match device.supported_input_configs() { Ok(f) => f.collect(), Err(e) => { println!(" Error getting supported input configs: {:?}", e); Vec::new() } }; if !input_configs.is_empty() { println!(" All supported input stream configs:"); for (config_index, config) in input_configs.into_iter().enumerate() { println!( " {}.{}. {:?}", device_index + 1, config_index + 1, config ); } } // Output configs if let Ok(conf) = device.default_output_config() { println!(" Default output stream config:\n {:?}", conf); } let output_configs = match device.supported_output_configs() { Ok(f) => f.collect(), Err(e) => { println!(" Error getting supported output configs: {:?}", e); Vec::new() } }; if !output_configs.is_empty() { println!(" All supported output stream configs:"); for (config_index, config) in output_configs.into_iter().enumerate() { println!( " {}.{}. {:?}", device_index + 1, config_index + 1, config ); } } } } Ok(()) } cpal-0.15.2/examples/feedback.rs000064400000000000000000000131001046102023000145330ustar 00000000000000//! Feeds back the input stream directly into the output stream. //! //! Assumes that the input and output devices can use the same stream configuration and that they //! support the f32 sample format. //! //! Uses a delay of `LATENCY_MS` milliseconds in case the default input and output streams are not //! precisely synchronised. use clap::Parser; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use ringbuf::HeapRb; #[derive(Parser, Debug)] #[command(version, about = "CPAL feedback example", long_about = None)] struct Opt { /// The input audio device to use #[arg(short, long, value_name = "IN", default_value_t = String::from("default"))] input_device: String, /// The output audio device to use #[arg(short, long, value_name = "OUT", default_value_t = String::from("default"))] output_device: String, /// Specify the delay between input and output #[arg(short, long, value_name = "DELAY_MS", default_value_t = 150.0)] latency: f32, /// Use the JACK host #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] #[arg(short, long)] #[allow(dead_code)] jack: bool, } fn main() -> anyhow::Result<()> { let opt = Opt::parse(); // Conditionally compile with jack if the feature is specified. #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] // Manually check for flags. Can be passed through cargo with -- e.g. // cargo run --release --example beep --features jack -- --jack let host = if opt.jack { cpal::host_from_id(cpal::available_hosts() .into_iter() .find(|id| *id == cpal::HostId::Jack) .expect( "make sure --features jack is specified. only works on OSes where jack is available", )).expect("jack host unavailable") } else { cpal::default_host() }; #[cfg(any( not(any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" )), not(feature = "jack") ))] let host = cpal::default_host(); // Find devices. let input_device = if opt.input_device == "default" { host.default_input_device() } else { host.input_devices()? .find(|x| x.name().map(|y| y == opt.input_device).unwrap_or(false)) } .expect("failed to find input device"); let output_device = if opt.output_device == "default" { host.default_output_device() } else { host.output_devices()? .find(|x| x.name().map(|y| y == opt.output_device).unwrap_or(false)) } .expect("failed to find output device"); println!("Using input device: \"{}\"", input_device.name()?); println!("Using output device: \"{}\"", output_device.name()?); // We'll try and use the same configuration between streams to keep it simple. let config: cpal::StreamConfig = input_device.default_input_config()?.into(); // Create a delay in case the input and output devices aren't synced. let latency_frames = (opt.latency / 1_000.0) * config.sample_rate.0 as f32; let latency_samples = latency_frames as usize * config.channels as usize; // The buffer to share samples let ring = HeapRb::::new(latency_samples * 2); let (mut producer, mut consumer) = ring.split(); // Fill the samples with 0.0 equal to the length of the delay. for _ in 0..latency_samples { // The ring buffer has twice as much space as necessary to add latency here, // so this should never fail producer.push(0.0).unwrap(); } let input_data_fn = move |data: &[f32], _: &cpal::InputCallbackInfo| { let mut output_fell_behind = false; for &sample in data { if producer.push(sample).is_err() { output_fell_behind = true; } } if output_fell_behind { eprintln!("output stream fell behind: try increasing latency"); } }; let output_data_fn = move |data: &mut [f32], _: &cpal::OutputCallbackInfo| { let mut input_fell_behind = false; for sample in data { *sample = match consumer.pop() { Some(s) => s, None => { input_fell_behind = true; 0.0 } }; } if input_fell_behind { eprintln!("input stream fell behind: try increasing latency"); } }; // Build streams. println!( "Attempting to build both streams with f32 samples and `{:?}`.", config ); let input_stream = input_device.build_input_stream(&config, input_data_fn, err_fn, None)?; let output_stream = output_device.build_output_stream(&config, output_data_fn, err_fn, None)?; println!("Successfully built streams."); // Play the streams. println!( "Starting the input and output streams with `{}` milliseconds of latency.", opt.latency ); input_stream.play()?; output_stream.play()?; // Run for 3 seconds before closing. println!("Playing for 3 seconds... "); std::thread::sleep(std::time::Duration::from_secs(3)); drop(input_stream); drop(output_stream); println!("Done!"); Ok(()) } fn err_fn(err: cpal::StreamError) { eprintln!("an error occurred on stream: {}", err); } cpal-0.15.2/examples/record_wav.rs000064400000000000000000000125271046102023000151560ustar 00000000000000//! Records a WAV file (roughly 3 seconds long) using the default input device and config. //! //! The input data is recorded to "$CARGO_MANIFEST_DIR/recorded.wav". use clap::Parser; use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; use cpal::{FromSample, Sample}; use std::fs::File; use std::io::BufWriter; use std::sync::{Arc, Mutex}; #[derive(Parser, Debug)] #[command(version, about = "CPAL record_wav example", long_about = None)] struct Opt { /// The audio device to use #[arg(short, long, default_value_t = String::from("default"))] device: String, /// Use the JACK host #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] #[arg(short, long)] #[allow(dead_code)] jack: bool, } fn main() -> Result<(), anyhow::Error> { let opt = Opt::parse(); // Conditionally compile with jack if the feature is specified. #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] // Manually check for flags. Can be passed through cargo with -- e.g. // cargo run --release --example beep --features jack -- --jack let host = if opt.jack { cpal::host_from_id(cpal::available_hosts() .into_iter() .find(|id| *id == cpal::HostId::Jack) .expect( "make sure --features jack is specified. only works on OSes where jack is available", )).expect("jack host unavailable") } else { cpal::default_host() }; #[cfg(any( not(any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" )), not(feature = "jack") ))] let host = cpal::default_host(); // Set up the input device and stream with the default input config. let device = if opt.device == "default" { host.default_input_device() } else { host.input_devices()? .find(|x| x.name().map(|y| y == opt.device).unwrap_or(false)) } .expect("failed to find input device"); println!("Input device: {}", device.name()?); let config = device .default_input_config() .expect("Failed to get default input config"); println!("Default input config: {:?}", config); // The WAV file we're recording to. const PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/recorded.wav"); let spec = wav_spec_from_config(&config); let writer = hound::WavWriter::create(PATH, spec)?; let writer = Arc::new(Mutex::new(Some(writer))); // A flag to indicate that recording is in progress. println!("Begin recording..."); // Run the input stream on a separate thread. let writer_2 = writer.clone(); let err_fn = move |err| { eprintln!("an error occurred on stream: {}", err); }; let stream = match config.sample_format() { cpal::SampleFormat::I8 => device.build_input_stream( &config.into(), move |data, _: &_| write_input_data::(data, &writer_2), err_fn, None, )?, cpal::SampleFormat::I16 => device.build_input_stream( &config.into(), move |data, _: &_| write_input_data::(data, &writer_2), err_fn, None, )?, cpal::SampleFormat::I32 => device.build_input_stream( &config.into(), move |data, _: &_| write_input_data::(data, &writer_2), err_fn, None, )?, cpal::SampleFormat::F32 => device.build_input_stream( &config.into(), move |data, _: &_| write_input_data::(data, &writer_2), err_fn, None, )?, sample_format => { return Err(anyhow::Error::msg(format!( "Unsupported sample format '{sample_format}'" ))) } }; stream.play()?; // Let recording go for roughly three seconds. std::thread::sleep(std::time::Duration::from_secs(3)); drop(stream); writer.lock().unwrap().take().unwrap().finalize()?; println!("Recording {} complete!", PATH); Ok(()) } fn sample_format(format: cpal::SampleFormat) -> hound::SampleFormat { if format.is_float() { hound::SampleFormat::Float } else { hound::SampleFormat::Int } } fn wav_spec_from_config(config: &cpal::SupportedStreamConfig) -> hound::WavSpec { hound::WavSpec { channels: config.channels() as _, sample_rate: config.sample_rate().0 as _, bits_per_sample: (config.sample_format().sample_size() * 8) as _, sample_format: sample_format(config.sample_format()), } } type WavWriterHandle = Arc>>>>; fn write_input_data(input: &[T], writer: &WavWriterHandle) where T: Sample, U: Sample + hound::Sample + FromSample, { if let Ok(mut guard) = writer.try_lock() { if let Some(writer) = guard.as_mut() { for &sample in input.iter() { let sample: U = U::from_sample(sample); writer.write_sample(sample).ok(); } } } } cpal-0.15.2/examples/synth_tones.rs000064400000000000000000000106201046102023000153700ustar 00000000000000/* This example expose parameter to pass generator of sample. Good starting point for integration of cpal into your application. */ extern crate anyhow; extern crate clap; extern crate cpal; use cpal::{ traits::{DeviceTrait, HostTrait, StreamTrait}, SizedSample, }; use cpal::{FromSample, Sample}; fn main() -> anyhow::Result<()> { let stream = stream_setup_for(sample_next)?; stream.play()?; std::thread::sleep(std::time::Duration::from_millis(3000)); Ok(()) } fn sample_next(o: &mut SampleRequestOptions) -> f32 { o.tick(); o.tone(440.) * 0.1 + o.tone(880.) * 0.1 // combination of several tones } pub struct SampleRequestOptions { pub sample_rate: f32, pub sample_clock: f32, pub nchannels: usize, } impl SampleRequestOptions { fn tone(&self, freq: f32) -> f32 { (self.sample_clock * freq * 2.0 * std::f32::consts::PI / self.sample_rate).sin() } fn tick(&mut self) { self.sample_clock = (self.sample_clock + 1.0) % self.sample_rate; } } pub fn stream_setup_for(on_sample: F) -> Result where F: FnMut(&mut SampleRequestOptions) -> f32 + std::marker::Send + 'static + Copy, { let (_host, device, config) = host_device_setup()?; match config.sample_format() { cpal::SampleFormat::I8 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::I16 => stream_make::(&device, &config.into(), on_sample), // cpal::SampleFormat::I24 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::I32 => stream_make::(&device, &config.into(), on_sample), // cpal::SampleFormat::I48 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::I64 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::U8 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::U16 => stream_make::(&device, &config.into(), on_sample), // cpal::SampleFormat::U24 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::U32 => stream_make::(&device, &config.into(), on_sample), // cpal::SampleFormat::U48 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::U64 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::F32 => stream_make::(&device, &config.into(), on_sample), cpal::SampleFormat::F64 => stream_make::(&device, &config.into(), on_sample), sample_format => Err(anyhow::Error::msg(format!( "Unsupported sample format '{sample_format}'" ))), } } pub fn host_device_setup( ) -> Result<(cpal::Host, cpal::Device, cpal::SupportedStreamConfig), anyhow::Error> { let host = cpal::default_host(); let device = host .default_output_device() .ok_or_else(|| anyhow::Error::msg("Default output device is not available"))?; println!("Output device : {}", device.name()?); let config = device.default_output_config()?; println!("Default output config : {:?}", config); Ok((host, device, config)) } pub fn stream_make( device: &cpal::Device, config: &cpal::StreamConfig, on_sample: F, ) -> Result where T: SizedSample + FromSample, F: FnMut(&mut SampleRequestOptions) -> f32 + std::marker::Send + 'static + Copy, { let sample_rate = config.sample_rate.0 as f32; let sample_clock = 0f32; let nchannels = config.channels as usize; let mut request = SampleRequestOptions { sample_rate, sample_clock, nchannels, }; let err_fn = |err| eprintln!("Error building output sound stream: {}", err); let stream = device.build_output_stream( config, move |output: &mut [T], _: &cpal::OutputCallbackInfo| { on_window(output, &mut request, on_sample) }, err_fn, None, )?; Ok(stream) } fn on_window(output: &mut [T], request: &mut SampleRequestOptions, mut on_sample: F) where T: Sample + FromSample, F: FnMut(&mut SampleRequestOptions) -> f32 + std::marker::Send + 'static, { for frame in output.chunks_mut(request.nchannels) { let value: T = T::from_sample(on_sample(request)); for sample in frame.iter_mut() { *sample = value; } } } cpal-0.15.2/src/error.rs000064400000000000000000000244401046102023000131220ustar 00000000000000use std::error::Error; use std::fmt::{Display, Formatter}; /// The requested host, although supported on this platform, is unavailable. #[derive(Copy, Clone, Debug)] pub struct HostUnavailable; impl Display for HostUnavailable { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { f.write_str("the requested host is unavailable") } } impl Error for HostUnavailable {} /// Some error has occurred that is specific to the backend from which it was produced. /// /// This error is often used as a catch-all in cases where: /// /// - It is unclear exactly what error might be produced by the backend API. /// - It does not make sense to add a variant to the enclosing error type. /// - No error was expected to occur at all, but we return an error to avoid the possibility of a /// `panic!` caused by some unforeseen or unknown reason. /// /// **Note:** If you notice a `BackendSpecificError` that you believe could be better handled in a /// cross-platform manner, please create an issue or submit a pull request with a patch that adds /// the necessary error variant to the appropriate error enum. #[derive(Clone, Debug)] pub struct BackendSpecificError { pub description: String, } impl Display for BackendSpecificError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { write!( f, "A backend-specific error has occurred: {}", self.description ) } } impl Error for BackendSpecificError {} /// An error that might occur while attempting to enumerate the available devices on a system. #[derive(Clone, Debug)] pub enum DevicesError { /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for DevicesError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), } } } impl Error for DevicesError {} impl From for DevicesError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// An error that may occur while attempting to retrieve a device name. #[derive(Clone, Debug)] pub enum DeviceNameError { /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for DeviceNameError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), } } } impl Error for DeviceNameError {} impl From for DeviceNameError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// Error that can happen when enumerating the list of supported formats. #[derive(Debug)] pub enum SupportedStreamConfigsError { /// The device no longer exists. This can happen if the device is disconnected while the /// program is running. DeviceNotAvailable, /// We called something the C-Layer did not understand InvalidArgument, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for SupportedStreamConfigsError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), Self::DeviceNotAvailable => f.write_str("The requested device is no longer available. For example, it has been unplugged."), Self::InvalidArgument => f.write_str("Invalid argument passed to the backend. For example, this happens when trying to read capture capabilities when the device does not support it.") } } } impl Error for SupportedStreamConfigsError {} impl From for SupportedStreamConfigsError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// May occur when attempting to request the default input or output stream format from a [`Device`](crate::Device). #[derive(Debug)] pub enum DefaultStreamConfigError { /// The device no longer exists. This can happen if the device is disconnected while the /// program is running. DeviceNotAvailable, /// Returned if e.g. the default input format was requested on an output-only audio device. StreamTypeNotSupported, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for DefaultStreamConfigError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), DefaultStreamConfigError::DeviceNotAvailable => f.write_str( "The requested device is no longer available. For example, it has been unplugged.", ), DefaultStreamConfigError::StreamTypeNotSupported => { f.write_str("The requested stream type is not supported by the device.") } } } } impl Error for DefaultStreamConfigError {} impl From for DefaultStreamConfigError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// Error that can happen when creating a [`Stream`](crate::Stream). #[derive(Debug)] pub enum BuildStreamError { /// The device no longer exists. This can happen if the device is disconnected while the /// program is running. DeviceNotAvailable, /// The specified stream configuration is not supported. StreamConfigNotSupported, /// We called something the C-Layer did not understand /// /// On ALSA device functions called with a feature they do not support will yield this. E.g. /// Trying to use capture capabilities on an output only format yields this. InvalidArgument, /// Occurs if adding a new Stream ID would cause an integer overflow. StreamIdOverflow, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for BuildStreamError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), BuildStreamError::DeviceNotAvailable => f.write_str( "The requested device is no longer available. For example, it has been unplugged.", ), BuildStreamError::StreamConfigNotSupported => { f.write_str("The requested stream configuration is not supported by the device.") } BuildStreamError::InvalidArgument => f.write_str( "The requested device does not support this capability (invalid argument)", ), BuildStreamError::StreamIdOverflow => { f.write_str("Adding a new stream ID would cause an overflow") } } } } impl Error for BuildStreamError {} impl From for BuildStreamError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// Errors that might occur when calling [`Stream::play()`](crate::traits::StreamTrait::play). /// /// As of writing this, only macOS may immediately return an error while calling this method. This /// is because both the alsa and wasapi backends only enqueue these commands and do not process /// them immediately. #[derive(Debug)] pub enum PlayStreamError { /// The device associated with the stream is no longer available. DeviceNotAvailable, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for PlayStreamError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), PlayStreamError::DeviceNotAvailable => { f.write_str("the device associated with the stream is no longer available") } } } } impl Error for PlayStreamError {} impl From for PlayStreamError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// Errors that might occur when calling [`Stream::pause()`](crate::traits::StreamTrait::pause). /// /// As of writing this, only macOS may immediately return an error while calling this method. This /// is because both the alsa and wasapi backends only enqueue these commands and do not process /// them immediately. #[derive(Debug)] pub enum PauseStreamError { /// The device associated with the stream is no longer available. DeviceNotAvailable, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for PauseStreamError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), PauseStreamError::DeviceNotAvailable => { f.write_str("the device associated with the stream is no longer available") } } } } impl Error for PauseStreamError {} impl From for PauseStreamError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } /// Errors that might occur while a stream is running. #[derive(Debug)] pub enum StreamError { /// The device no longer exists. This can happen if the device is disconnected while the /// program is running. DeviceNotAvailable, /// See the [`BackendSpecificError`] docs for more information about this error variant. BackendSpecific { err: BackendSpecificError }, } impl Display for StreamError { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { match self { Self::BackendSpecific { err } => err.fmt(f), StreamError::DeviceNotAvailable => f.write_str( "The requested device is no longer available. For example, it has been unplugged.", ), } } } impl Error for StreamError {} impl From for StreamError { fn from(err: BackendSpecificError) -> Self { Self::BackendSpecific { err } } } cpal-0.15.2/src/host/alsa/enumerate.rs000064400000000000000000000034161046102023000156530ustar 00000000000000use super::alsa; use super::parking_lot::Mutex; use super::{Device, DeviceHandles}; use crate::{BackendSpecificError, DevicesError}; /// ALSA's implementation for `Devices`. pub struct Devices { hint_iter: alsa::device_name::HintIter, } impl Devices { pub fn new() -> Result { Ok(Devices { hint_iter: alsa::device_name::HintIter::new_str(None, "pcm")?, }) } } unsafe impl Send for Devices {} unsafe impl Sync for Devices {} impl Iterator for Devices { type Item = Device; fn next(&mut self) -> Option { loop { match self.hint_iter.next() { None => return None, Some(hint) => { let name = match hint.name { None => continue, // Ignoring the `null` device. Some(name) if name == "null" => continue, Some(name) => name, }; if let Ok(handles) = DeviceHandles::open(&name) { return Some(Device { name, handles: Mutex::new(handles), }); } } } } } } #[inline] pub fn default_input_device() -> Option { Some(Device { name: "default".to_owned(), handles: Mutex::new(Default::default()), }) } #[inline] pub fn default_output_device() -> Option { Some(Device { name: "default".to_owned(), handles: Mutex::new(Default::default()), }) } impl From for DevicesError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } cpal-0.15.2/src/host/alsa/mod.rs000064400000000000000000001136101046102023000144430ustar 00000000000000extern crate alsa; extern crate libc; extern crate parking_lot; use self::alsa::poll::Descriptors; use self::parking_lot::Mutex; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; use std::cmp; use std::convert::TryInto; use std::sync::Arc; use std::thread::{self, JoinHandle}; use std::time::Duration; use std::vec::IntoIter as VecIntoIter; pub use self::enumerate::{default_input_device, default_output_device, Devices}; pub type SupportedInputConfigs = VecIntoIter; pub type SupportedOutputConfigs = VecIntoIter; mod enumerate; /// The default linux, dragonfly, freebsd and netbsd host type. #[derive(Debug)] pub struct Host; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { // Assume ALSA is always available on linux/dragonfly/freebsd/netbsd. true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Device::name(self) } fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } fn default_input_config(&self) -> Result { Device::default_input_config(self) } fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, conf: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_inner = self.build_stream_inner(conf, sample_format, alsa::Direction::Capture)?; let stream = Stream::new_input( Arc::new(stream_inner), data_callback, error_callback, timeout, ); Ok(stream) } fn build_output_stream_raw( &self, conf: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_inner = self.build_stream_inner(conf, sample_format, alsa::Direction::Playback)?; let stream = Stream::new_output( Arc::new(stream_inner), data_callback, error_callback, timeout, ); Ok(stream) } } struct TriggerSender(libc::c_int); struct TriggerReceiver(libc::c_int); impl TriggerSender { fn wakeup(&self) { let buf = 1u64; let ret = unsafe { libc::write(self.0, &buf as *const u64 as *const _, 8) }; assert_eq!(ret, 8); } } impl TriggerReceiver { fn clear_pipe(&self) { let mut out = 0u64; let ret = unsafe { libc::read(self.0, &mut out as *mut u64 as *mut _, 8) }; assert_eq!(ret, 8); } } fn trigger() -> (TriggerSender, TriggerReceiver) { let mut fds = [0, 0]; match unsafe { libc::pipe(fds.as_mut_ptr()) } { 0 => (TriggerSender(fds[1]), TriggerReceiver(fds[0])), _ => panic!("Could not create pipe"), } } impl Drop for TriggerSender { fn drop(&mut self) { unsafe { libc::close(self.0); } } } impl Drop for TriggerReceiver { fn drop(&mut self) { unsafe { libc::close(self.0); } } } #[derive(Default)] struct DeviceHandles { playback: Option, capture: Option, } impl DeviceHandles { /// Create `DeviceHandles` for `name` and try to open a handle for both /// directions. Returns `Ok` if either direction is opened successfully. fn open(name: &str) -> Result { let mut handles = Self::default(); let playback_err = handles.try_open(name, alsa::Direction::Playback).err(); let capture_err = handles.try_open(name, alsa::Direction::Capture).err(); if let Some(err) = capture_err.and(playback_err) { Err(err) } else { Ok(handles) } } /// Get a mutable reference to the `Option` for a specific `stream_type`. /// If the `Option` is `None`, the `alsa::PCM` will be opened and placed in /// the `Option` before returning. If `handle_mut()` returns `Ok` the contained /// `Option` is guaranteed to be `Some(..)`. fn try_open( &mut self, name: &str, stream_type: alsa::Direction, ) -> Result<&mut Option, alsa::Error> { let handle = match stream_type { alsa::Direction::Playback => &mut self.playback, alsa::Direction::Capture => &mut self.capture, }; if handle.is_none() { *handle = Some(alsa::pcm::PCM::new(name, stream_type, true)?); } Ok(handle) } /// Get a mutable reference to the `alsa::PCM` handle for a specific `stream_type`. /// If the handle is not yet opened, it will be opened and stored in `self`. fn get_mut( &mut self, name: &str, stream_type: alsa::Direction, ) -> Result<&mut alsa::PCM, alsa::Error> { Ok(self.try_open(name, stream_type)?.as_mut().unwrap()) } /// Take ownership of the `alsa::PCM` handle for a specific `stream_type`. /// If the handle is not yet opened, it will be opened and returned. fn take(&mut self, name: &str, stream_type: alsa::Direction) -> Result { Ok(self.try_open(name, stream_type)?.take().unwrap()) } } pub struct Device { name: String, handles: Mutex, } impl Device { fn build_stream_inner( &self, conf: &StreamConfig, sample_format: SampleFormat, stream_type: alsa::Direction, ) -> Result { let handle_result = self .handles .lock() .take(&self.name, stream_type) .map_err(|e| (e, e.errno())); let handle = match handle_result { Err((_, alsa::nix::errno::Errno::EBUSY)) => { return Err(BuildStreamError::DeviceNotAvailable) } Err((_, alsa::nix::errno::Errno::EINVAL)) => { return Err(BuildStreamError::InvalidArgument) } Err((e, _)) => return Err(e.into()), Ok(handle) => handle, }; let can_pause = set_hw_params_from_format(&handle, conf, sample_format)?; let period_len = set_sw_params_from_format(&handle, conf, stream_type)?; handle.prepare()?; let num_descriptors = handle.count(); if num_descriptors == 0 { let description = "poll descriptor count for stream was 0".to_string(); let err = BackendSpecificError { description }; return Err(err.into()); } // Check to see if we can retrieve valid timestamps from the device. // Related: https://bugs.freedesktop.org/show_bug.cgi?id=88503 let ts = handle.status()?.get_htstamp(); let creation_instant = match (ts.tv_sec, ts.tv_nsec) { (0, 0) => Some(std::time::Instant::now()), _ => None, }; if let alsa::Direction::Capture = stream_type { handle.start()?; } let stream_inner = StreamInner { channel: handle, sample_format, num_descriptors, conf: conf.clone(), period_len, can_pause, creation_instant, }; Ok(stream_inner) } #[inline] fn name(&self) -> Result { Ok(self.name.clone()) } fn supported_configs( &self, stream_t: alsa::Direction, ) -> Result, SupportedStreamConfigsError> { let mut guard = self.handles.lock(); let handle_result = guard .get_mut(&self.name, stream_t) .map_err(|e| (e, e.errno())); let handle = match handle_result { Err((_, alsa::nix::errno::Errno::ENOENT)) | Err((_, alsa::nix::errno::Errno::EBUSY)) => { return Err(SupportedStreamConfigsError::DeviceNotAvailable) } Err((_, alsa::nix::errno::Errno::EINVAL)) => { return Err(SupportedStreamConfigsError::InvalidArgument) } Err((e, _)) => return Err(e.into()), Ok(handle) => handle, }; let hw_params = alsa::pcm::HwParams::any(handle)?; // TODO: check endianness const FORMATS: [(SampleFormat, alsa::pcm::Format); 8] = [ (SampleFormat::I8, alsa::pcm::Format::S8), (SampleFormat::U8, alsa::pcm::Format::U8), (SampleFormat::I16, alsa::pcm::Format::S16LE), //SND_PCM_FORMAT_S16_BE, (SampleFormat::U16, alsa::pcm::Format::U16LE), //SND_PCM_FORMAT_U16_BE, //SND_PCM_FORMAT_S24_LE, //SND_PCM_FORMAT_S24_BE, //SND_PCM_FORMAT_U24_LE, //SND_PCM_FORMAT_U24_BE, (SampleFormat::I32, alsa::pcm::Format::S32LE), //SND_PCM_FORMAT_S32_BE, (SampleFormat::U32, alsa::pcm::Format::U32LE), //SND_PCM_FORMAT_U32_BE, (SampleFormat::F32, alsa::pcm::Format::FloatLE), //SND_PCM_FORMAT_FLOAT_BE, (SampleFormat::F64, alsa::pcm::Format::Float64LE), //SND_PCM_FORMAT_FLOAT64_BE, //SND_PCM_FORMAT_IEC958_SUBFRAME_LE, //SND_PCM_FORMAT_IEC958_SUBFRAME_BE, //SND_PCM_FORMAT_MU_LAW, //SND_PCM_FORMAT_A_LAW, //SND_PCM_FORMAT_IMA_ADPCM, //SND_PCM_FORMAT_MPEG, //SND_PCM_FORMAT_GSM, //SND_PCM_FORMAT_SPECIAL, //SND_PCM_FORMAT_S24_3LE, //SND_PCM_FORMAT_S24_3BE, //SND_PCM_FORMAT_U24_3LE, //SND_PCM_FORMAT_U24_3BE, //SND_PCM_FORMAT_S20_3LE, //SND_PCM_FORMAT_S20_3BE, //SND_PCM_FORMAT_U20_3LE, //SND_PCM_FORMAT_U20_3BE, //SND_PCM_FORMAT_S18_3LE, //SND_PCM_FORMAT_S18_3BE, //SND_PCM_FORMAT_U18_3LE, //SND_PCM_FORMAT_U18_3BE, ]; let mut supported_formats = Vec::new(); for &(sample_format, alsa_format) in FORMATS.iter() { if hw_params.test_format(alsa_format).is_ok() { supported_formats.push(sample_format); } } let min_rate = hw_params.get_rate_min()?; let max_rate = hw_params.get_rate_max()?; let sample_rates = if min_rate == max_rate || hw_params.test_rate(min_rate + 1).is_ok() { vec![(min_rate, max_rate)] } else { const RATES: [libc::c_uint; 13] = [ 5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176400, 192000, ]; let mut rates = Vec::new(); for &rate in RATES.iter() { if hw_params.test_rate(rate).is_ok() { rates.push((rate, rate)); } } if rates.is_empty() { vec![(min_rate, max_rate)] } else { rates } }; let min_channels = hw_params.get_channels_min()?; let max_channels = hw_params.get_channels_max()?; let max_channels = cmp::min(max_channels, 32); // TODO: limiting to 32 channels or too much stuff is returned let supported_channels = (min_channels..max_channels + 1) .filter_map(|num| { if hw_params.test_channels(num).is_ok() { Some(num as ChannelCount) } else { None } }) .collect::>(); let min_buffer_size = hw_params.get_buffer_size_min()?; let max_buffer_size = hw_params.get_buffer_size_max()?; let buffer_size_range = SupportedBufferSize::Range { min: min_buffer_size as u32, max: max_buffer_size as u32, }; let mut output = Vec::with_capacity( supported_formats.len() * supported_channels.len() * sample_rates.len(), ); for &sample_format in supported_formats.iter() { for &channels in supported_channels.iter() { for &(min_rate, max_rate) in sample_rates.iter() { output.push(SupportedStreamConfigRange { channels, min_sample_rate: SampleRate(min_rate as u32), max_sample_rate: SampleRate(max_rate as u32), buffer_size: buffer_size_range.clone(), sample_format, }); } } } Ok(output.into_iter()) } fn supported_input_configs( &self, ) -> Result { self.supported_configs(alsa::Direction::Capture) } fn supported_output_configs( &self, ) -> Result { self.supported_configs(alsa::Direction::Playback) } // ALSA does not offer default stream formats, so instead we compare all supported formats by // the `SupportedStreamConfigRange::cmp_default_heuristics` order and select the greatest. fn default_config( &self, stream_t: alsa::Direction, ) -> Result { let mut formats: Vec<_> = { match self.supported_configs(stream_t) { Err(SupportedStreamConfigsError::DeviceNotAvailable) => { return Err(DefaultStreamConfigError::DeviceNotAvailable); } Err(SupportedStreamConfigsError::InvalidArgument) => { // this happens sometimes when querying for input and output capabilities, but // the device supports only one return Err(DefaultStreamConfigError::StreamTypeNotSupported); } Err(SupportedStreamConfigsError::BackendSpecific { err }) => { return Err(err.into()); } Ok(fmts) => fmts.collect(), } }; formats.sort_by(|a, b| a.cmp_default_heuristics(b)); match formats.into_iter().last() { Some(f) => { let min_r = f.min_sample_rate; let max_r = f.max_sample_rate; let mut format = f.with_max_sample_rate(); const HZ_44100: SampleRate = SampleRate(44_100); if min_r <= HZ_44100 && HZ_44100 <= max_r { format.sample_rate = HZ_44100; } Ok(format) } None => Err(DefaultStreamConfigError::StreamTypeNotSupported), } } fn default_input_config(&self) -> Result { self.default_config(alsa::Direction::Capture) } fn default_output_config(&self) -> Result { self.default_config(alsa::Direction::Playback) } } struct StreamInner { // The ALSA channel. channel: alsa::pcm::PCM, // When converting between file descriptors and `snd_pcm_t`, this is the number of // file descriptors that this `snd_pcm_t` uses. num_descriptors: usize, // Format of the samples. sample_format: SampleFormat, // The configuration used to open this stream. conf: StreamConfig, // Minimum number of samples to put in the buffer. period_len: usize, #[allow(dead_code)] // Whether or not the hardware supports pausing the stream. // TODO: We need an API to expose this. See #197, #284. can_pause: bool, // In the case that the device does not return valid timestamps via `get_htstamp`, this field // will be `Some` and will contain an `Instant` representing the moment the stream was created. // // If this field is `Some`, then the stream will use the duration since this instant as a // source for timestamps. // // If this field is `None` then the elapsed duration between `get_trigger_htstamp` and // `get_htstamp` is used. creation_instant: Option, } // Assume that the ALSA library is built with thread safe option. unsafe impl Sync for StreamInner {} #[derive(Debug, Eq, PartialEq)] enum StreamType { Input, Output, } pub struct Stream { /// The high-priority audio processing thread calling callbacks. /// Option used for moving out in destructor. thread: Option>, /// Handle to the underlying stream for playback controls. inner: Arc, /// Used to signal to stop processing. trigger: TriggerSender, } struct StreamWorkerContext { descriptors: Vec, buffer: Vec, poll_timeout: i32, } impl StreamWorkerContext { fn new(poll_timeout: &Option) -> Self { let poll_timeout: i32 = if let Some(d) = poll_timeout { d.as_millis().try_into().unwrap() } else { -1 }; Self { descriptors: Vec::new(), buffer: Vec::new(), poll_timeout, } } } fn input_stream_worker( rx: TriggerReceiver, stream: &StreamInner, data_callback: &mut (dyn FnMut(&Data, &InputCallbackInfo) + Send + 'static), error_callback: &mut (dyn FnMut(StreamError) + Send + 'static), timeout: Option, ) { let mut ctxt = StreamWorkerContext::new(&timeout); loop { let flow = poll_descriptors_and_prepare_buffer(&rx, stream, &mut ctxt).unwrap_or_else(|err| { error_callback(err.into()); PollDescriptorsFlow::Continue }); match flow { PollDescriptorsFlow::Continue => { continue; } PollDescriptorsFlow::XRun => { if let Err(err) = stream.channel.prepare() { error_callback(err.into()); } continue; } PollDescriptorsFlow::Return => return, PollDescriptorsFlow::Ready { status, avail_frames: _, delay_frames, stream_type, } => { assert_eq!( stream_type, StreamType::Input, "expected input stream, but polling descriptors indicated output", ); if let Err(err) = process_input( stream, &mut ctxt.buffer, status, delay_frames, data_callback, ) { error_callback(err.into()); } } } } } fn output_stream_worker( rx: TriggerReceiver, stream: &StreamInner, data_callback: &mut (dyn FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static), error_callback: &mut (dyn FnMut(StreamError) + Send + 'static), timeout: Option, ) { let mut ctxt = StreamWorkerContext::new(&timeout); loop { let flow = poll_descriptors_and_prepare_buffer(&rx, stream, &mut ctxt).unwrap_or_else(|err| { error_callback(err.into()); PollDescriptorsFlow::Continue }); match flow { PollDescriptorsFlow::Continue => continue, PollDescriptorsFlow::XRun => { if let Err(err) = stream.channel.prepare() { error_callback(err.into()); } continue; } PollDescriptorsFlow::Return => return, PollDescriptorsFlow::Ready { status, avail_frames, delay_frames, stream_type, } => { assert_eq!( stream_type, StreamType::Output, "expected output stream, but polling descriptors indicated input", ); if let Err(err) = process_output( stream, &mut ctxt.buffer, status, avail_frames, delay_frames, data_callback, error_callback, ) { error_callback(err.into()); } } } } } enum PollDescriptorsFlow { Continue, Return, Ready { stream_type: StreamType, status: alsa::pcm::Status, avail_frames: usize, delay_frames: usize, }, XRun, } // This block is shared between both input and output stream worker functions. fn poll_descriptors_and_prepare_buffer( rx: &TriggerReceiver, stream: &StreamInner, ctxt: &mut StreamWorkerContext, ) -> Result { let StreamWorkerContext { ref mut descriptors, ref mut buffer, ref poll_timeout, } = *ctxt; descriptors.clear(); // Add the self-pipe for signaling termination. descriptors.push(libc::pollfd { fd: rx.0, events: libc::POLLIN, revents: 0, }); // Add ALSA polling fds. let len = descriptors.len(); descriptors.resize( stream.num_descriptors + len, libc::pollfd { fd: 0, events: 0, revents: 0, }, ); let filled = stream.channel.fill(&mut descriptors[len..])?; debug_assert_eq!(filled, stream.num_descriptors); // Don't timeout, wait forever. let res = alsa::poll::poll(descriptors, *poll_timeout)?; if res == 0 { let description = String::from("`alsa::poll()` spuriously returned"); return Err(BackendSpecificError { description }); } if descriptors[0].revents != 0 { // The stream has been requested to be destroyed. rx.clear_pipe(); return Ok(PollDescriptorsFlow::Return); } let stream_type = match stream.channel.revents(&descriptors[1..])? { alsa::poll::Flags::OUT => StreamType::Output, alsa::poll::Flags::IN => StreamType::Input, _ => { // Nothing to process, poll again return Ok(PollDescriptorsFlow::Continue); } }; let status = stream.channel.status()?; let avail_frames = match stream.channel.avail() { Err(err) if err.errno() == alsa::nix::errno::Errno::EPIPE => { return Ok(PollDescriptorsFlow::XRun) } res => res, }? as usize; let delay_frames = match status.get_delay() { // Buffer underrun. TODO: Notify the user. d if d < 0 => 0, d => d as usize, }; let available_samples = avail_frames * stream.conf.channels as usize; // Only go on if there is at least `stream.period_len` samples. if available_samples < stream.period_len { return Ok(PollDescriptorsFlow::Continue); } // Prepare the data buffer. let buffer_size = stream.sample_format.sample_size() * available_samples; buffer.resize(buffer_size, 0u8); Ok(PollDescriptorsFlow::Ready { stream_type, status, avail_frames, delay_frames, }) } // Read input data from ALSA and deliver it to the user. fn process_input( stream: &StreamInner, buffer: &mut [u8], status: alsa::pcm::Status, delay_frames: usize, data_callback: &mut (dyn FnMut(&Data, &InputCallbackInfo) + Send + 'static), ) -> Result<(), BackendSpecificError> { stream.channel.io_bytes().readi(buffer)?; let sample_format = stream.sample_format; let data = buffer.as_mut_ptr() as *mut (); let len = buffer.len() / sample_format.sample_size(); let data = unsafe { Data::from_parts(data, len, sample_format) }; let callback = stream_timestamp(&status, stream.creation_instant)?; let delay_duration = frames_to_duration(delay_frames, stream.conf.sample_rate); let capture = callback .sub(delay_duration) .expect("`capture` is earlier than representation supported by `StreamInstant`"); let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = crate::InputCallbackInfo { timestamp }; data_callback(&data, &info); Ok(()) } // Request data from the user's function and write it via ALSA. // // Returns `true` fn process_output( stream: &StreamInner, buffer: &mut [u8], status: alsa::pcm::Status, available_frames: usize, delay_frames: usize, data_callback: &mut (dyn FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static), error_callback: &mut dyn FnMut(StreamError), ) -> Result<(), BackendSpecificError> { { // We're now sure that we're ready to write data. let sample_format = stream.sample_format; let data = buffer.as_mut_ptr() as *mut (); let len = buffer.len() / sample_format.sample_size(); let mut data = unsafe { Data::from_parts(data, len, sample_format) }; let callback = stream_timestamp(&status, stream.creation_instant)?; let delay_duration = frames_to_duration(delay_frames, stream.conf.sample_rate); let playback = callback .add(delay_duration) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = crate::OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); } loop { match stream.channel.io_bytes().writei(buffer) { Err(err) if err.errno() == alsa::nix::errno::Errno::EPIPE => { // buffer underrun // TODO: Notify the user of this. let _ = stream.channel.try_recover(err, false); } Err(err) => { error_callback(err.into()); continue; } Ok(result) if result != available_frames => { let description = format!( "unexpected number of frames written: expected {}, \ result {} (this should never happen)", available_frames, result, ); error_callback(BackendSpecificError { description }.into()); continue; } _ => { break; } } } Ok(()) } // Use the elapsed duration since the start of the stream. // // This ensures positive values that are compatible with our `StreamInstant` representation. fn stream_timestamp( status: &alsa::pcm::Status, creation_instant: Option, ) -> Result { match creation_instant { None => { let trigger_ts = status.get_trigger_htstamp(); let ts = status.get_htstamp(); let nanos = timespec_diff_nanos(ts, trigger_ts); if nanos < 0 { panic!( "get_htstamp `{:?}` was earlier than get_trigger_htstamp `{:?}`", ts, trigger_ts ); } Ok(crate::StreamInstant::from_nanos(nanos)) } Some(creation) => { let now = std::time::Instant::now(); let duration = now.duration_since(creation); let instant = crate::StreamInstant::from_nanos_i128(duration.as_nanos() as i128) .expect("stream duration has exceeded `StreamInstant` representation"); Ok(instant) } } } // Adapted from `timestamp2ns` here: // https://fossies.org/linux/alsa-lib/test/audio_time.c fn timespec_to_nanos(ts: libc::timespec) -> i64 { ts.tv_sec as i64 * 1_000_000_000 + ts.tv_nsec as i64 } // Adapted from `timediff` here: // https://fossies.org/linux/alsa-lib/test/audio_time.c fn timespec_diff_nanos(a: libc::timespec, b: libc::timespec) -> i64 { timespec_to_nanos(a) - timespec_to_nanos(b) } // Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } impl Stream { fn new_input( inner: Arc, mut data_callback: D, mut error_callback: E, timeout: Option, ) -> Stream where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let (tx, rx) = trigger(); // Clone the handle for passing into worker thread. let stream = inner.clone(); let thread = thread::Builder::new() .name("cpal_alsa_in".to_owned()) .spawn(move || { input_stream_worker( rx, &stream, &mut data_callback, &mut error_callback, timeout, ); }) .unwrap(); Stream { thread: Some(thread), inner, trigger: tx, } } fn new_output( inner: Arc, mut data_callback: D, mut error_callback: E, timeout: Option, ) -> Stream where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let (tx, rx) = trigger(); // Clone the handle for passing into worker thread. let stream = inner.clone(); let thread = thread::Builder::new() .name("cpal_alsa_out".to_owned()) .spawn(move || { output_stream_worker( rx, &stream, &mut data_callback, &mut error_callback, timeout, ); }) .unwrap(); Stream { thread: Some(thread), inner, trigger: tx, } } } impl Drop for Stream { fn drop(&mut self) { self.trigger.wakeup(); self.thread.take().unwrap().join().unwrap(); } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { self.inner.channel.pause(false).ok(); Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { self.inner.channel.pause(true).ok(); Ok(()) } } fn set_hw_params_from_format( pcm_handle: &alsa::pcm::PCM, config: &StreamConfig, sample_format: SampleFormat, ) -> Result { let hw_params = alsa::pcm::HwParams::any(pcm_handle)?; hw_params.set_access(alsa::pcm::Access::RWInterleaved)?; let sample_format = if cfg!(target_endian = "big") { match sample_format { SampleFormat::I8 => alsa::pcm::Format::S8, SampleFormat::I16 => alsa::pcm::Format::S16BE, // SampleFormat::I24 => alsa::pcm::Format::S24BE, SampleFormat::I32 => alsa::pcm::Format::S32BE, // SampleFormat::I48 => alsa::pcm::Format::S48BE, // SampleFormat::I64 => alsa::pcm::Format::S64BE, SampleFormat::U8 => alsa::pcm::Format::U8, SampleFormat::U16 => alsa::pcm::Format::U16BE, // SampleFormat::U24 => alsa::pcm::Format::U24BE, SampleFormat::U32 => alsa::pcm::Format::U32BE, // SampleFormat::U48 => alsa::pcm::Format::U48BE, // SampleFormat::U64 => alsa::pcm::Format::U64BE, SampleFormat::F32 => alsa::pcm::Format::FloatBE, SampleFormat::F64 => alsa::pcm::Format::Float64BE, sample_format => { return Err(BackendSpecificError { description: format!( "Sample format '{}' is not supported by this backend", sample_format ), }) } } } else { match sample_format { SampleFormat::I8 => alsa::pcm::Format::S8, SampleFormat::I16 => alsa::pcm::Format::S16LE, // SampleFormat::I24 => alsa::pcm::Format::S24LE, SampleFormat::I32 => alsa::pcm::Format::S32LE, // SampleFormat::I48 => alsa::pcm::Format::S48LE, // SampleFormat::I64 => alsa::pcm::Format::S64LE, SampleFormat::U8 => alsa::pcm::Format::U8, SampleFormat::U16 => alsa::pcm::Format::U16LE, // SampleFormat::U24 => alsa::pcm::Format::U24LE, SampleFormat::U32 => alsa::pcm::Format::U32LE, // SampleFormat::U48 => alsa::pcm::Format::U48LE, // SampleFormat::U64 => alsa::pcm::Format::U64LE, SampleFormat::F32 => alsa::pcm::Format::FloatLE, SampleFormat::F64 => alsa::pcm::Format::Float64LE, sample_format => { return Err(BackendSpecificError { description: format!( "Sample format '{}' is not supported by this backend", sample_format ), }) } } }; hw_params.set_format(sample_format)?; hw_params.set_rate(config.sample_rate.0, alsa::ValueOr::Nearest)?; hw_params.set_channels(config.channels as u32)?; match config.buffer_size { BufferSize::Fixed(v) => { hw_params.set_period_size_near((v / 4) as alsa::pcm::Frames, alsa::ValueOr::Nearest)?; hw_params.set_buffer_size(v as alsa::pcm::Frames)?; } BufferSize::Default => { // These values together represent a moderate latency and wakeup interval. // Without them, we are at the mercy of the device hw_params.set_period_time_near(25_000, alsa::ValueOr::Nearest)?; hw_params.set_buffer_time_near(100_000, alsa::ValueOr::Nearest)?; } } pcm_handle.hw_params(&hw_params)?; Ok(hw_params.can_pause()) } fn set_sw_params_from_format( pcm_handle: &alsa::pcm::PCM, config: &StreamConfig, stream_type: alsa::Direction, ) -> Result { let sw_params = pcm_handle.sw_params_current()?; let period_len = { let (buffer, period) = pcm_handle.get_params()?; if buffer == 0 { return Err(BackendSpecificError { description: "initialization resulted in a null buffer".to_string(), }); } sw_params.set_avail_min(period as alsa::pcm::Frames)?; let start_threshold = match stream_type { alsa::Direction::Playback => buffer - period, // For capture streams, the start threshold is irrelevant and ignored, // because build_stream_inner() starts the stream before process_input() // reads from it. Set it anyway I guess, since it's better than leaving // it at an unspecified default value. alsa::Direction::Capture => 1, }; sw_params.set_start_threshold(start_threshold.try_into().unwrap())?; period as usize * config.channels as usize }; sw_params.set_tstamp_mode(true)?; sw_params.set_tstamp_type(alsa::pcm::TstampType::MonotonicRaw)?; // tstamp_type param cannot be changed after the device is opened. // The default tstamp_type value on most Linux systems is "monotonic", // let's try to use it if setting the tstamp_type fails. if pcm_handle.sw_params(&sw_params).is_err() { sw_params.set_tstamp_type(alsa::pcm::TstampType::Monotonic)?; pcm_handle.sw_params(&sw_params)?; } Ok(period_len) } impl From for BackendSpecificError { fn from(err: alsa::Error) -> Self { BackendSpecificError { description: err.to_string(), } } } impl From for BuildStreamError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } impl From for SupportedStreamConfigsError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } impl From for PlayStreamError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } impl From for PauseStreamError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } impl From for StreamError { fn from(err: alsa::Error) -> Self { let err: BackendSpecificError = err.into(); err.into() } } cpal-0.15.2/src/host/asio/device.rs000064400000000000000000000176031046102023000151430ustar 00000000000000use std; pub type SupportedInputConfigs = std::vec::IntoIter; pub type SupportedOutputConfigs = std::vec::IntoIter; use super::parking_lot::Mutex; use super::sys; use crate::BackendSpecificError; use crate::DefaultStreamConfigError; use crate::DeviceNameError; use crate::DevicesError; use crate::SampleFormat; use crate::SampleRate; use crate::SupportedBufferSize; use crate::SupportedStreamConfig; use crate::SupportedStreamConfigRange; use crate::SupportedStreamConfigsError; use std::hash::{Hash, Hasher}; use std::sync::Arc; /// A ASIO Device pub struct Device { /// The driver represented by this device. pub driver: Arc, // Input and/or Output stream. // A driver can only have one of each. // They need to be created at the same time. pub asio_streams: Arc>, } /// All available devices. pub struct Devices { asio: Arc, drivers: std::vec::IntoIter, } impl PartialEq for Device { fn eq(&self, other: &Self) -> bool { self.driver.name() == other.driver.name() } } impl Eq for Device {} impl Hash for Device { fn hash(&self, state: &mut H) { self.driver.name().hash(state); } } impl Device { pub fn name(&self) -> Result { Ok(self.driver.name().to_string()) } /// Gets the supported input configs. /// TODO currently only supports the default. /// Need to find all possible configs. pub fn supported_input_configs( &self, ) -> Result { // Retrieve the default config for the total supported channels and supported sample // format. let f = match self.default_input_config() { Err(_) => return Err(SupportedStreamConfigsError::DeviceNotAvailable), Ok(f) => f, }; // Collect a config for every combination of supported sample rate and number of channels. let mut supported_configs = vec![]; for &rate in crate::COMMON_SAMPLE_RATES { if !self .driver .can_sample_rate(rate.0.into()) .ok() .unwrap_or(false) { continue; } for channels in 1..f.channels + 1 { supported_configs.push(SupportedStreamConfigRange { channels, min_sample_rate: rate, max_sample_rate: rate, buffer_size: f.buffer_size.clone(), sample_format: f.sample_format.clone(), }) } } Ok(supported_configs.into_iter()) } /// Gets the supported output configs. /// TODO currently only supports the default. /// Need to find all possible configs. pub fn supported_output_configs( &self, ) -> Result { // Retrieve the default config for the total supported channels and supported sample // format. let f = match self.default_output_config() { Err(_) => return Err(SupportedStreamConfigsError::DeviceNotAvailable), Ok(f) => f, }; // Collect a config for every combination of supported sample rate and number of channels. let mut supported_configs = vec![]; for &rate in crate::COMMON_SAMPLE_RATES { if !self .driver .can_sample_rate(rate.0.into()) .ok() .unwrap_or(false) { continue; } for channels in 1..f.channels + 1 { supported_configs.push(SupportedStreamConfigRange { channels, min_sample_rate: rate, max_sample_rate: rate, buffer_size: f.buffer_size.clone(), sample_format: f.sample_format.clone(), }) } } Ok(supported_configs.into_iter()) } /// Returns the default input config pub fn default_input_config(&self) -> Result { let channels = self.driver.channels().map_err(default_config_err)?.ins as u16; let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _); let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?; let buffer_size = SupportedBufferSize::Range { min: min as u32, max: max as u32, }; // Map th ASIO sample type to a CPAL sample type let data_type = self.driver.input_data_type().map_err(default_config_err)?; let sample_format = convert_data_type(&data_type) .ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?; Ok(SupportedStreamConfig { channels, sample_rate, buffer_size, sample_format, }) } /// Returns the default output config pub fn default_output_config(&self) -> Result { let channels = self.driver.channels().map_err(default_config_err)?.outs as u16; let sample_rate = SampleRate(self.driver.sample_rate().map_err(default_config_err)? as _); let (min, max) = self.driver.buffersize_range().map_err(default_config_err)?; let buffer_size = SupportedBufferSize::Range { min: min as u32, max: max as u32, }; let data_type = self.driver.output_data_type().map_err(default_config_err)?; let sample_format = convert_data_type(&data_type) .ok_or(DefaultStreamConfigError::StreamTypeNotSupported)?; Ok(SupportedStreamConfig { channels, sample_rate, buffer_size, sample_format, }) } } impl Devices { pub fn new(asio: Arc) -> Result { let drivers = asio.driver_names().into_iter(); Ok(Devices { asio, drivers }) } } impl Iterator for Devices { type Item = Device; /// Load drivers and return device fn next(&mut self) -> Option { loop { match self.drivers.next() { Some(name) => match self.asio.load_driver(&name) { Ok(driver) => { let driver = Arc::new(driver); let asio_streams = Arc::new(Mutex::new(sys::AsioStreams { input: None, output: None, })); return Some(Device { driver, asio_streams, }); } Err(_) => continue, }, None => return None, } } } } pub(crate) fn convert_data_type(ty: &sys::AsioSampleType) -> Option { let fmt = match *ty { sys::AsioSampleType::ASIOSTInt16MSB => SampleFormat::I16, sys::AsioSampleType::ASIOSTInt16LSB => SampleFormat::I16, sys::AsioSampleType::ASIOSTFloat32MSB => SampleFormat::F32, sys::AsioSampleType::ASIOSTFloat32LSB => SampleFormat::F32, sys::AsioSampleType::ASIOSTInt32MSB => SampleFormat::I32, sys::AsioSampleType::ASIOSTInt32LSB => SampleFormat::I32, _ => return None, }; Some(fmt) } fn default_config_err(e: sys::AsioError) -> DefaultStreamConfigError { match e { sys::AsioError::NoDrivers | sys::AsioError::HardwareMalfunction => { DefaultStreamConfigError::DeviceNotAvailable } sys::AsioError::NoRate => DefaultStreamConfigError::StreamTypeNotSupported, err => { let description = format!("{}", err); BackendSpecificError { description }.into() } } } cpal-0.15.2/src/host/asio/mod.rs000064400000000000000000000074161046102023000144640ustar 00000000000000extern crate asio_sys as sys; extern crate parking_lot; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigsError, }; pub use self::device::{Device, Devices, SupportedInputConfigs, SupportedOutputConfigs}; pub use self::stream::Stream; use std::sync::Arc; use std::time::Duration; mod device; mod stream; /// The host for ASIO. #[derive(Debug)] pub struct Host { asio: Arc, } impl Host { pub fn new() -> Result { let asio = Arc::new(sys::Asio::new()); let host = Host { asio }; Ok(host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { true //unimplemented!("check how to do this using asio-sys") } fn devices(&self) -> Result { Devices::new(self.asio.clone()) } fn default_input_device(&self) -> Option { // ASIO has no concept of a default device, so just use the first. self.input_devices().ok().and_then(|mut ds| ds.next()) } fn default_output_device(&self) -> Option { // ASIO has no concept of a default device, so just use the first. self.output_devices().ok().and_then(|mut ds| ds.next()) } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Device::name(self) } fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } fn default_input_config(&self) -> Result { Device::default_input_config(self) } fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Device::build_input_stream_raw( self, config, sample_format, data_callback, error_callback, timeout, ) } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Device::build_output_stream_raw( self, config, sample_format, data_callback, error_callback, timeout, ) } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { Stream::play(self) } fn pause(&self) -> Result<(), PauseStreamError> { Stream::pause(self) } } cpal-0.15.2/src/host/asio/stream.rs000064400000000000000000000646431046102023000152050ustar 00000000000000extern crate asio_sys as sys; extern crate num_traits; use self::num_traits::PrimInt; use super::parking_lot::Mutex; use super::Device; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, Data, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SizedSample, StreamConfig, StreamError, }; use std; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; // Used to keep track of whether or not the current asio stream buffer requires // being silencing before summing audio. #[derive(Default)] struct SilenceAsioBuffer { first: bool, second: bool, } pub struct Stream { playing: Arc, // Ensure the `Driver` does not terminate until the last stream is dropped. driver: Arc, asio_streams: Arc>, callback_id: sys::CallbackId, } impl Stream { pub fn play(&self) -> Result<(), PlayStreamError> { self.playing.store(true, Ordering::SeqCst); Ok(()) } pub fn pause(&self) -> Result<(), PauseStreamError> { self.playing.store(false, Ordering::SeqCst); Ok(()) } } impl Device { pub fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_type = self.driver.input_data_type().map_err(build_stream_err)?; // Ensure that the desired sample type is supported. let expected_sample_format = super::device::convert_data_type(&stream_type) .ok_or(BuildStreamError::StreamConfigNotSupported)?; if sample_format != expected_sample_format { return Err(BuildStreamError::StreamConfigNotSupported); } let num_channels = config.channels.clone(); let buffer_size = self.get_or_create_input_stream(config, sample_format)?; let cpal_num_samples = buffer_size * num_channels as usize; // Create the buffer depending on the size of the data type. let len_bytes = cpal_num_samples * sample_format.sample_size(); let mut interleaved = vec![0u8; len_bytes]; let stream_playing = Arc::new(AtomicBool::new(false)); let playing = Arc::clone(&stream_playing); let asio_streams = self.asio_streams.clone(); // Set the input callback. // This is most performance critical part of the ASIO bindings. let config = config.clone(); let callback_id = self.driver.add_callback(move |callback_info| unsafe { // If not playing return early. if !playing.load(Ordering::SeqCst) { return; } // There is 0% chance of lock contention the host only locks when recreating streams. let stream_lock = asio_streams.lock(); let ref asio_stream = match stream_lock.input { Some(ref asio_stream) => asio_stream, None => return, }; /// 1. Write from the ASIO buffer to the interleaved CPAL buffer. /// 2. Deliver the CPAL buffer to the user callback. unsafe fn process_input_callback( data_callback: &mut D, interleaved: &mut [u8], asio_stream: &sys::AsioStream, asio_info: &sys::CallbackInfo, sample_rate: crate::SampleRate, from_endianness: F, ) where A: SizedSample, D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, F: Fn(A) -> A, { // 1. Write the ASIO channels to the CPAL buffer. let interleaved: &mut [A] = cast_slice_mut(interleaved); let n_frames = asio_stream.buffer_size as usize; let n_channels = interleaved.len() / n_frames; let buffer_index = asio_info.buffer_index as usize; for ch_ix in 0..n_channels { let asio_channel = asio_channel_slice::(asio_stream, buffer_index, ch_ix); for (frame, s_asio) in interleaved.chunks_mut(n_channels).zip(asio_channel) { frame[ch_ix] = from_endianness(*s_asio); } } // 2. Deliver the interleaved buffer to the callback. let data = interleaved.as_mut_ptr() as *mut (); let len = interleaved.len(); let data = Data::from_parts(data, len, A::FORMAT); let callback = system_time_to_stream_instant(asio_info.system_time); let delay = frames_to_duration(n_frames, sample_rate); let capture = callback .sub(delay) .expect("`capture` occurs before origin of alsa `StreamInstant`"); let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = InputCallbackInfo { timestamp }; data_callback(&data, &info); } match (&stream_type, sample_format) { (&sys::AsioSampleType::ASIOSTInt16LSB, SampleFormat::I16) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, from_le, ); } (&sys::AsioSampleType::ASIOSTInt16MSB, SampleFormat::I16) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, from_be, ); } // TODO: Handle endianness conversion for floats? We currently use the `PrimInt` // trait for the `to_le` and `to_be` methods, but this does not support floats. (&sys::AsioSampleType::ASIOSTFloat32LSB, SampleFormat::F32) | (&sys::AsioSampleType::ASIOSTFloat32MSB, SampleFormat::F32) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, std::convert::identity::, ); } (&sys::AsioSampleType::ASIOSTInt32LSB, SampleFormat::I32) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, from_le, ); } (&sys::AsioSampleType::ASIOSTInt32MSB, SampleFormat::I32) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, from_be, ); } // TODO: Handle endianness conversion for floats? We currently use the `PrimInt` // trait for the `to_le` and `to_be` methods, but this does not support floats. (&sys::AsioSampleType::ASIOSTFloat64LSB, SampleFormat::F64) | (&sys::AsioSampleType::ASIOSTFloat64MSB, SampleFormat::F64) => { process_input_callback::( &mut data_callback, &mut interleaved, asio_stream, callback_info, config.sample_rate, std::convert::identity::, ); } unsupported_format_pair => unreachable!( "`build_input_stream_raw` should have returned with unsupported \ format {:?}", unsupported_format_pair ), } }); let driver = self.driver.clone(); let asio_streams = self.asio_streams.clone(); // Immediately start the device? self.driver.start().map_err(build_stream_err)?; Ok(Stream { playing: stream_playing, driver, asio_streams, callback_id, }) } pub fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_type = self.driver.output_data_type().map_err(build_stream_err)?; // Ensure that the desired sample type is supported. let expected_sample_format = super::device::convert_data_type(&stream_type) .ok_or(BuildStreamError::StreamConfigNotSupported)?; if sample_format != expected_sample_format { return Err(BuildStreamError::StreamConfigNotSupported); } let num_channels = config.channels.clone(); let buffer_size = self.get_or_create_output_stream(config, sample_format)?; let cpal_num_samples = buffer_size * num_channels as usize; // Create buffers depending on data type. let len_bytes = cpal_num_samples * sample_format.sample_size(); let mut interleaved = vec![0u8; len_bytes]; let mut silence_asio_buffer = SilenceAsioBuffer::default(); let stream_playing = Arc::new(AtomicBool::new(false)); let playing = Arc::clone(&stream_playing); let asio_streams = self.asio_streams.clone(); let config = config.clone(); let callback_id = self.driver.add_callback(move |callback_info| unsafe { // If not playing, return early. if !playing.load(Ordering::SeqCst) { return; } // There is 0% chance of lock contention the host only locks when recreating streams. let stream_lock = asio_streams.lock(); let ref asio_stream = match stream_lock.output { Some(ref asio_stream) => asio_stream, None => return, }; // Silence the ASIO buffer that is about to be used. // // This checks if any other callbacks have already silenced the buffer associated with // the current `buffer_index`. // // If not, we will silence it and set the opposite buffer half to unsilenced. let silence = match callback_info.buffer_index { 0 if !silence_asio_buffer.first => { silence_asio_buffer.first = true; silence_asio_buffer.second = false; true } 0 => false, 1 if !silence_asio_buffer.second => { silence_asio_buffer.second = true; silence_asio_buffer.first = false; true } 1 => false, _ => unreachable!("ASIO uses a double-buffer so there should only be 2"), }; /// 1. Render the given callback to the given buffer of interleaved samples. /// 2. If required, silence the ASIO buffer. /// 3. Finally, write the interleaved data to the non-interleaved ASIO buffer, /// performing endianness conversions as necessary. unsafe fn process_output_callback( data_callback: &mut D, interleaved: &mut [u8], silence_asio_buffer: bool, asio_stream: &sys::AsioStream, asio_info: &sys::CallbackInfo, sample_rate: crate::SampleRate, to_endianness: F, ) where A: SizedSample + std::ops::Add, D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, F: Fn(A) -> A, { // 1. Render interleaved buffer from callback. let interleaved: &mut [A] = cast_slice_mut(interleaved); let data = interleaved.as_mut_ptr() as *mut (); let len = interleaved.len(); let mut data = Data::from_parts(data, len, A::FORMAT); let callback = system_time_to_stream_instant(asio_info.system_time); let n_frames = asio_stream.buffer_size as usize; let delay = frames_to_duration(n_frames, sample_rate); let playback = callback .add(delay) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); // 2. Silence ASIO channels if necessary. let n_channels = interleaved.len() / n_frames; let buffer_index = asio_info.buffer_index as usize; if silence_asio_buffer { for ch_ix in 0..n_channels { let asio_channel = asio_channel_slice_mut::(asio_stream, buffer_index, ch_ix); asio_channel .iter_mut() .for_each(|s| *s = to_endianness(A::EQUILIBRIUM)); } } // 3. Write interleaved samples to ASIO channels, one channel at a time. for ch_ix in 0..n_channels { let asio_channel = asio_channel_slice_mut::(asio_stream, buffer_index, ch_ix); for (frame, s_asio) in interleaved.chunks(n_channels).zip(asio_channel) { *s_asio = *s_asio + to_endianness(A::from_sample(frame[ch_ix])); } } } match (sample_format, &stream_type) { (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16LSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, to_le, ); } (SampleFormat::I16, &sys::AsioSampleType::ASIOSTInt16MSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, to_be, ); } // TODO: Handle endianness conversion for floats? We currently use the `PrimInt` // trait for the `to_le` and `to_be` methods, but this does not support floats. (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32LSB) | (SampleFormat::F32, &sys::AsioSampleType::ASIOSTFloat32MSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, std::convert::identity::, ); } (SampleFormat::I32, &sys::AsioSampleType::ASIOSTInt32LSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, to_le, ); } (SampleFormat::I32, &sys::AsioSampleType::ASIOSTInt32MSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, to_be, ); } // TODO: Handle endianness conversion for floats? We currently use the `PrimInt` // trait for the `to_le` and `to_be` methods, but this does not support floats. (SampleFormat::F64, &sys::AsioSampleType::ASIOSTFloat64LSB) | (SampleFormat::F64, &sys::AsioSampleType::ASIOSTFloat64MSB) => { process_output_callback::( &mut data_callback, &mut interleaved, silence, asio_stream, callback_info, config.sample_rate, std::convert::identity::, ); } unsupported_format_pair => unreachable!( "`build_output_stream_raw` should have returned with unsupported \ format {:?}", unsupported_format_pair ), } }); let driver = self.driver.clone(); let asio_streams = self.asio_streams.clone(); // Immediately start the device? self.driver.start().map_err(build_stream_err)?; Ok(Stream { playing: stream_playing, driver, asio_streams, callback_id, }) } /// Create a new CPAL Input Stream. /// /// If there is no existing ASIO Input Stream it will be created. /// /// On success, the buffer size of the stream is returned. fn get_or_create_input_stream( &self, config: &StreamConfig, sample_format: SampleFormat, ) -> Result { match self.default_input_config() { Ok(f) => { let num_asio_channels = f.channels; check_config(&self.driver, config, sample_format, num_asio_channels) } Err(_) => Err(BuildStreamError::StreamConfigNotSupported), }?; let num_channels = config.channels as usize; let ref mut streams = *self.asio_streams.lock(); let buffer_size = match config.buffer_size { BufferSize::Fixed(v) => Some(v as i32), BufferSize::Default => None, }; // Either create a stream if thers none or had back the // size of the current one. match streams.input { Some(ref input) => Ok(input.buffer_size as usize), None => { let output = streams.output.take(); self.driver .prepare_input_stream(output, num_channels, buffer_size) .map(|new_streams| { let bs = match new_streams.input { Some(ref inp) => inp.buffer_size as usize, None => unreachable!(), }; *streams = new_streams; bs }) .map_err(|ref e| { println!("Error preparing stream: {}", e); BuildStreamError::DeviceNotAvailable }) } } } /// Create a new CPAL Output Stream. /// /// If there is no existing ASIO Output Stream it will be created. fn get_or_create_output_stream( &self, config: &StreamConfig, sample_format: SampleFormat, ) -> Result { match self.default_output_config() { Ok(f) => { let num_asio_channels = f.channels; check_config(&self.driver, config, sample_format, num_asio_channels) } Err(_) => Err(BuildStreamError::StreamConfigNotSupported), }?; let num_channels = config.channels as usize; let ref mut streams = *self.asio_streams.lock(); let buffer_size = match config.buffer_size { BufferSize::Fixed(v) => Some(v as i32), BufferSize::Default => None, }; // Either create a stream if thers none or had back the // size of the current one. match streams.output { Some(ref output) => Ok(output.buffer_size as usize), None => { let input = streams.input.take(); self.driver .prepare_output_stream(input, num_channels, buffer_size) .map(|new_streams| { let bs = match new_streams.output { Some(ref out) => out.buffer_size as usize, None => unreachable!(), }; *streams = new_streams; bs }) .map_err(|ref e| { println!("Error preparing stream: {}", e); BuildStreamError::DeviceNotAvailable }) } } } } impl Drop for Stream { fn drop(&mut self) { self.driver.remove_callback(self.callback_id); } } fn asio_ns_to_double(val: sys::bindings::asio_import::ASIOTimeStamp) -> f64 { let two_raised_to_32 = 4294967296.0; val.lo as f64 + val.hi as f64 * two_raised_to_32 } /// Asio retrieves system time via `timeGetTime` which returns the time in milliseconds. fn system_time_to_stream_instant( system_time: sys::bindings::asio_import::ASIOTimeStamp, ) -> crate::StreamInstant { let systime_ns = asio_ns_to_double(system_time); let secs = systime_ns as i64 / 1_000_000_000; let nanos = (systime_ns as i64 - secs * 1_000_000_000) as u32; crate::StreamInstant::new(secs, nanos) } /// Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } /// Check whether or not the desired config is supported by the stream. /// /// Checks sample rate, data type and then finally the number of channels. fn check_config( driver: &sys::Driver, config: &StreamConfig, sample_format: SampleFormat, num_asio_channels: u16, ) -> Result<(), BuildStreamError> { let StreamConfig { channels, sample_rate, buffer_size: _, } = config; // Try and set the sample rate to what the user selected. let sample_rate = sample_rate.0.into(); if sample_rate != driver.sample_rate().map_err(build_stream_err)? { if driver .can_sample_rate(sample_rate) .map_err(build_stream_err)? { driver .set_sample_rate(sample_rate) .map_err(build_stream_err)?; } else { return Err(BuildStreamError::StreamConfigNotSupported); } } // unsigned formats are not supported by asio match sample_format { SampleFormat::I16 | SampleFormat::I32 | SampleFormat::F32 => (), _ => return Err(BuildStreamError::StreamConfigNotSupported), } if *channels > num_asio_channels { return Err(BuildStreamError::StreamConfigNotSupported); } Ok(()) } /// Cast a byte slice into a mutable slice of desired type. /// /// Safety: it's up to the caller to ensure that the input slice has valid bit representations. unsafe fn cast_slice_mut(v: &mut [u8]) -> &mut [T] { debug_assert!(v.len() % std::mem::size_of::() == 0); std::slice::from_raw_parts_mut(v.as_mut_ptr() as *mut T, v.len() / std::mem::size_of::()) } /// Helper function to convert to little endianness. fn to_le(t: T) -> T { t.to_le() } /// Helper function to convert to big endianness. fn to_be(t: T) -> T { t.to_be() } /// Helper function to convert from little endianness. fn from_le(t: T) -> T { T::from_le(t) } /// Helper function to convert from little endianness. fn from_be(t: T) -> T { T::from_be(t) } /// Shorthand for retrieving the asio buffer slice associated with a channel. /// /// Safety: it's up to the user to ensure that this function is not called multiple times for the /// same channel. unsafe fn asio_channel_slice( asio_stream: &sys::AsioStream, buffer_index: usize, channel_index: usize, ) -> &[T] { asio_channel_slice_mut(asio_stream, buffer_index, channel_index) } /// Shorthand for retrieving the asio buffer slice associated with a channel. /// /// Safety: it's up to the user to ensure that this function is not called multiple times for the /// same channel. unsafe fn asio_channel_slice_mut( asio_stream: &sys::AsioStream, buffer_index: usize, channel_index: usize, ) -> &mut [T] { let buff_ptr: *mut T = asio_stream.buffer_infos[channel_index].buffers[buffer_index as usize] as *mut _; std::slice::from_raw_parts_mut(buff_ptr, asio_stream.buffer_size as usize) } fn build_stream_err(e: sys::AsioError) -> BuildStreamError { match e { sys::AsioError::NoDrivers | sys::AsioError::HardwareMalfunction => { BuildStreamError::DeviceNotAvailable } sys::AsioError::InvalidInput | sys::AsioError::BadMode => BuildStreamError::InvalidArgument, err => { let description = format!("{}", err); BackendSpecificError { description }.into() } } } cpal-0.15.2/src/host/coreaudio/ios/enumerate.rs000064400000000000000000000016161046102023000174770ustar 00000000000000use std::vec::IntoIter as VecIntoIter; use crate::DevicesError; use crate::SupportedStreamConfigRange; use super::Device; pub type SupportedInputConfigs = ::std::vec::IntoIter; pub type SupportedOutputConfigs = ::std::vec::IntoIter; // TODO: Support enumerating earpiece vs headset vs speaker etc? pub struct Devices(VecIntoIter); impl Devices { pub fn new() -> Result { Ok(Self::default()) } } impl Default for Devices { fn default() -> Devices { Devices(vec![Device].into_iter()) } } impl Iterator for Devices { type Item = Device; #[inline] fn next(&mut self) -> Option { self.0.next() } } #[inline] pub fn default_input_device() -> Option { Some(Device) } #[inline] pub fn default_output_device() -> Option { Some(Device) } cpal-0.15.2/src/host/coreaudio/ios/mod.rs000064400000000000000000000343021046102023000162670ustar 00000000000000//! //! coreaudio on iOS looks a bit different from macOS. A lot of configuration needs to use //! the AVAudioSession objc API which doesn't exist on macOS. //! //! TODO: //! - Use AVAudioSession to enumerate buffer size / sample rate / number of channels and set //! buffer size. //! extern crate core_foundation_sys; extern crate coreaudio; use std::cell::RefCell; use self::coreaudio::audio_unit::render_callback::data; use self::coreaudio::audio_unit::{render_callback, AudioUnit, Element, Scope}; use self::coreaudio::sys::{ kAudioOutputUnitProperty_EnableIO, kAudioUnitProperty_StreamFormat, AudioBuffer, AudioStreamBasicDescription, }; use super::{asbd_from_config, frames_to_duration, host_time_to_stream_instant}; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; use self::enumerate::{ default_input_device, default_output_device, Devices, SupportedInputConfigs, SupportedOutputConfigs, }; use std::slice; use std::time::Duration; pub mod enumerate; // These days the default of iOS is now F32 and no longer I16 const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32; #[derive(Clone, Debug, PartialEq, Eq)] pub struct Device; pub struct Host; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl Device { #[inline] fn name(&self) -> Result { Ok("Default Device".to_owned()) } #[inline] fn supported_input_configs( &self, ) -> Result { // TODO: query AVAudioSession for parameters, some values like sample rate and buffer size // probably need to actually be set to see if it works, but channels can be enumerated. let asbd: AudioStreamBasicDescription = default_input_asbd()?; let stream_config = stream_config_from_asbd(asbd); Ok(vec![SupportedStreamConfigRange { channels: stream_config.channels, min_sample_rate: stream_config.sample_rate, max_sample_rate: stream_config.sample_rate, buffer_size: stream_config.buffer_size.clone(), sample_format: SUPPORTED_SAMPLE_FORMAT, }] .into_iter()) } #[inline] fn supported_output_configs( &self, ) -> Result { // TODO: query AVAudioSession for parameters, some values like sample rate and buffer size // probably need to actually be set to see if it works, but channels can be enumerated. let asbd: AudioStreamBasicDescription = default_output_asbd()?; let stream_config = stream_config_from_asbd(asbd); let configs: Vec<_> = (1..=asbd.mChannelsPerFrame as u16) .map(|channels| SupportedStreamConfigRange { channels, min_sample_rate: stream_config.sample_rate, max_sample_rate: stream_config.sample_rate, buffer_size: stream_config.buffer_size.clone(), sample_format: SUPPORTED_SAMPLE_FORMAT, }) .collect(); Ok(configs.into_iter()) } #[inline] fn default_input_config(&self) -> Result { let asbd: AudioStreamBasicDescription = default_input_asbd()?; let stream_config = stream_config_from_asbd(asbd); Ok(stream_config) } #[inline] fn default_output_config(&self) -> Result { let asbd: AudioStreamBasicDescription = default_output_asbd()?; let stream_config = stream_config_from_asbd(asbd); Ok(stream_config) } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; #[inline] fn name(&self) -> Result { Device::name(self) } #[inline] fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } #[inline] fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } #[inline] fn default_input_config(&self) -> Result { Device::default_input_config(self) } #[inline] fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, mut error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { // The scope and element for working with a device's input stream. let scope = Scope::Output; let element = Element::Input; let mut audio_unit = create_audio_unit()?; audio_unit.uninitialize()?; configure_for_recording(&mut audio_unit)?; audio_unit.initialize()?; // Set the stream in interleaved mode. let asbd = asbd_from_config(config, sample_format); audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; // Set the buffersize match config.buffer_size { BufferSize::Fixed(_) => { return Err(BuildStreamError::StreamConfigNotSupported); } BufferSize::Default => (), } // Register the callback that is being called by coreaudio whenever it needs data to be // fed to the audio buffer. let bytes_per_channel = sample_format.sample_size(); let sample_rate = config.sample_rate; type Args = render_callback::Args; audio_unit.set_input_callback(move |args: Args| unsafe { let ptr = (*args.data.data).mBuffers.as_ptr() as *const AudioBuffer; let len = (*args.data.data).mNumberBuffers as usize; let buffers: &[AudioBuffer] = slice::from_raw_parts(ptr, len); // There is only 1 buffer when using interleaved channels let AudioBuffer { mNumberChannels: channels, mDataByteSize: data_byte_size, mData: data, } = buffers[0]; let data = data as *mut (); let len = (data_byte_size as usize / bytes_per_channel) as usize; let data = Data::from_parts(data, len, sample_format); // TODO: Need a better way to get delay, for now we assume a double-buffer offset. let callback = match host_time_to_stream_instant(args.time_stamp.mHostTime) { Err(err) => { error_callback(err.into()); return Err(()); } Ok(cb) => cb, }; let buffer_frames = len / channels as usize; let delay = frames_to_duration(buffer_frames, sample_rate); let capture = callback .sub(delay) .expect("`capture` occurs before origin of alsa `StreamInstant`"); let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = InputCallbackInfo { timestamp }; data_callback(&data, &info); Ok(()) })?; audio_unit.start()?; Ok(Stream::new(StreamInner { playing: true, audio_unit, })) } /// Create an output stream. fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, mut error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { match config.buffer_size { BufferSize::Fixed(_) => { return Err(BuildStreamError::StreamConfigNotSupported); } BufferSize::Default => (), }; let mut audio_unit = create_audio_unit()?; // The scope and element for working with a device's output stream. let scope = Scope::Input; let element = Element::Output; // Set the stream in interleaved mode. let asbd = asbd_from_config(config, sample_format); audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; // Register the callback that is being called by coreaudio whenever it needs data to be // fed to the audio buffer. let bytes_per_channel = sample_format.sample_size(); let sample_rate = config.sample_rate; type Args = render_callback::Args; audio_unit.set_render_callback(move |args: Args| unsafe { // If `run()` is currently running, then a callback will be available from this list. // Otherwise, we just fill the buffer with zeroes and return. let AudioBuffer { mNumberChannels: channels, mDataByteSize: data_byte_size, mData: data, } = (*args.data.data).mBuffers[0]; let data = data as *mut (); let len = (data_byte_size as usize / bytes_per_channel) as usize; let mut data = Data::from_parts(data, len, sample_format); let callback = match host_time_to_stream_instant(args.time_stamp.mHostTime) { Err(err) => { error_callback(err.into()); return Err(()); } Ok(cb) => cb, }; // TODO: Need a better way to get delay, for now we assume a double-buffer offset. let buffer_frames = len / channels as usize; let delay = frames_to_duration(buffer_frames, sample_rate); let playback = callback .add(delay) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); Ok(()) })?; audio_unit.start()?; Ok(Stream::new(StreamInner { playing: true, audio_unit, })) } } pub struct Stream { inner: RefCell, } impl Stream { fn new(inner: StreamInner) -> Self { Self { inner: RefCell::new(inner), } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { let mut stream = self.inner.borrow_mut(); if !stream.playing { if let Err(e) = stream.audio_unit.start() { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } stream.playing = true; } Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { let mut stream = self.inner.borrow_mut(); if stream.playing { if let Err(e) = stream.audio_unit.stop() { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } stream.playing = false; } Ok(()) } } struct StreamInner { playing: bool, audio_unit: AudioUnit, } fn create_audio_unit() -> Result { AudioUnit::new(coreaudio::audio_unit::IOType::RemoteIO) } fn configure_for_recording(audio_unit: &mut AudioUnit) -> Result<(), coreaudio::Error> { // Enable mic recording let enable_input = 1u32; audio_unit.set_property( kAudioOutputUnitProperty_EnableIO, Scope::Input, Element::Input, Some(&enable_input), )?; // Disable output let disable_output = 0u32; audio_unit.set_property( kAudioOutputUnitProperty_EnableIO, Scope::Output, Element::Output, Some(&disable_output), )?; Ok(()) } fn default_output_asbd() -> Result { let audio_unit = create_audio_unit()?; let id = kAudioUnitProperty_StreamFormat; let asbd: AudioStreamBasicDescription = audio_unit.get_property(id, Scope::Output, Element::Output)?; Ok(asbd) } fn default_input_asbd() -> Result { let mut audio_unit = create_audio_unit()?; audio_unit.uninitialize()?; configure_for_recording(&mut audio_unit)?; audio_unit.initialize()?; let id = kAudioUnitProperty_StreamFormat; let asbd: AudioStreamBasicDescription = audio_unit.get_property(id, Scope::Input, Element::Input)?; Ok(asbd) } fn stream_config_from_asbd(asbd: AudioStreamBasicDescription) -> SupportedStreamConfig { let buffer_size = SupportedBufferSize::Range { min: 0, max: 0 }; SupportedStreamConfig { channels: asbd.mChannelsPerFrame as u16, sample_rate: SampleRate(asbd.mSampleRate as u32), buffer_size: buffer_size.clone(), sample_format: SUPPORTED_SAMPLE_FORMAT, } } cpal-0.15.2/src/host/coreaudio/macos/enumerate.rs000064400000000000000000000106301046102023000200030ustar 00000000000000extern crate coreaudio; use self::coreaudio::sys::{ kAudioHardwareNoError, kAudioHardwarePropertyDefaultInputDevice, kAudioHardwarePropertyDefaultOutputDevice, kAudioHardwarePropertyDevices, kAudioObjectPropertyElementMaster, kAudioObjectPropertyScopeGlobal, kAudioObjectSystemObject, AudioDeviceID, AudioObjectGetPropertyData, AudioObjectGetPropertyDataSize, AudioObjectPropertyAddress, OSStatus, }; use super::Device; use crate::{BackendSpecificError, DevicesError, SupportedStreamConfigRange}; use std::mem; use std::ptr::null; use std::vec::IntoIter as VecIntoIter; unsafe fn audio_devices() -> Result, OSStatus> { let property_address = AudioObjectPropertyAddress { mSelector: kAudioHardwarePropertyDevices, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }; macro_rules! try_status_or_return { ($status:expr) => { if $status != kAudioHardwareNoError as i32 { return Err($status); } }; } let data_size = 0u32; let status = AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ); try_status_or_return!(status); let device_count = data_size / mem::size_of::() as u32; let mut audio_devices = vec![]; audio_devices.reserve_exact(device_count as usize); let status = AudioObjectGetPropertyData( kAudioObjectSystemObject, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, audio_devices.as_mut_ptr() as *mut _, ); try_status_or_return!(status); audio_devices.set_len(device_count as usize); Ok(audio_devices) } pub struct Devices(VecIntoIter); impl Devices { pub fn new() -> Result { let devices = unsafe { match audio_devices() { Ok(devices) => devices, Err(os_status) => { let description = format!("{}", os_status); let err = BackendSpecificError { description }; return Err(err.into()); } } }; Ok(Devices(devices.into_iter())) } } unsafe impl Send for Devices {} unsafe impl Sync for Devices {} impl Iterator for Devices { type Item = Device; fn next(&mut self) -> Option { self.0.next().map(|id| Device { audio_device_id: id, is_default: false, }) } } pub fn default_input_device() -> Option { let property_address = AudioObjectPropertyAddress { mSelector: kAudioHardwarePropertyDefaultInputDevice, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }; let audio_device_id: AudioDeviceID = 0; let data_size = mem::size_of::(); let status = unsafe { AudioObjectGetPropertyData( kAudioObjectSystemObject, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &audio_device_id as *const _ as *mut _, ) }; if status != kAudioHardwareNoError as i32 { return None; } let device = Device { audio_device_id, is_default: true, }; Some(device) } pub fn default_output_device() -> Option { let property_address = AudioObjectPropertyAddress { mSelector: kAudioHardwarePropertyDefaultOutputDevice, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }; let audio_device_id: AudioDeviceID = 0; let data_size = mem::size_of::(); let status = unsafe { AudioObjectGetPropertyData( kAudioObjectSystemObject, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &audio_device_id as *const _ as *mut _, ) }; if status != kAudioHardwareNoError as i32 { return None; } let device = Device { audio_device_id, is_default: true, }; Some(device) } pub type SupportedInputConfigs = VecIntoIter; pub type SupportedOutputConfigs = VecIntoIter; cpal-0.15.2/src/host/coreaudio/macos/mod.rs000064400000000000000000001030101046102023000165700ustar 00000000000000extern crate core_foundation_sys; extern crate coreaudio; use super::{asbd_from_config, check_os_status, frames_to_duration, host_time_to_stream_instant}; use self::core_foundation_sys::string::{CFStringGetCString, CFStringGetCStringPtr, CFStringRef}; use self::coreaudio::audio_unit::render_callback::{self, data}; use self::coreaudio::audio_unit::{AudioUnit, Element, Scope}; use self::coreaudio::sys::{ kAudioDevicePropertyAvailableNominalSampleRates, kAudioDevicePropertyBufferFrameSize, kAudioDevicePropertyBufferFrameSizeRange, kAudioDevicePropertyDeviceIsAlive, kAudioDevicePropertyDeviceNameCFString, kAudioDevicePropertyNominalSampleRate, kAudioDevicePropertyScopeOutput, kAudioDevicePropertyStreamConfiguration, kAudioDevicePropertyStreamFormat, kAudioObjectPropertyElementMaster, kAudioObjectPropertyScopeGlobal, kAudioObjectPropertyScopeInput, kAudioObjectPropertyScopeOutput, kAudioOutputUnitProperty_CurrentDevice, kAudioOutputUnitProperty_EnableIO, kAudioUnitProperty_StreamFormat, kCFStringEncodingUTF8, AudioBuffer, AudioBufferList, AudioDeviceID, AudioObjectGetPropertyData, AudioObjectGetPropertyDataSize, AudioObjectID, AudioObjectPropertyAddress, AudioObjectPropertyScope, AudioObjectSetPropertyData, AudioStreamBasicDescription, AudioValueRange, OSStatus, }; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, ChannelCount, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; use parking_lot::Mutex; use std::ffi::CStr; use std::fmt; use std::mem; use std::os::raw::c_char; use std::ptr::null; use std::slice; use std::sync::mpsc::{channel, RecvTimeoutError}; use std::sync::Arc; use std::time::{Duration, Instant}; pub use self::enumerate::{ default_input_device, default_output_device, Devices, SupportedInputConfigs, SupportedOutputConfigs, }; use property_listener::AudioObjectPropertyListener; pub mod enumerate; mod property_listener; /// Coreaudio host, the default host on macOS. #[derive(Debug)] pub struct Host; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { // Assume coreaudio is always available true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Device::name(self) } fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } fn default_input_config(&self) -> Result { Device::default_input_config(self) } fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Device::build_input_stream_raw( self, config, sample_format, data_callback, error_callback, timeout, ) } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Device::build_output_stream_raw( self, config, sample_format, data_callback, error_callback, timeout, ) } } #[derive(Clone, PartialEq, Eq)] pub struct Device { pub(crate) audio_device_id: AudioDeviceID, is_default: bool, } impl Device { fn name(&self) -> Result { let property_address = AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyDeviceNameCFString, mScope: kAudioDevicePropertyScopeOutput, mElement: kAudioObjectPropertyElementMaster, }; let device_name: CFStringRef = null(); let data_size = mem::size_of::(); let c_str = unsafe { let status = AudioObjectGetPropertyData( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &device_name as *const _ as *mut _, ); check_os_status(status)?; let c_string: *const c_char = CFStringGetCStringPtr(device_name, kCFStringEncodingUTF8); if c_string.is_null() { let status = AudioObjectGetPropertyData( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &device_name as *const _ as *mut _, ); check_os_status(status)?; let mut buf: [i8; 255] = [0; 255]; let result = CFStringGetCString( device_name, buf.as_mut_ptr(), buf.len() as _, kCFStringEncodingUTF8, ); if result == 0 { let description = "core foundation failed to return device name string".to_string(); let err = BackendSpecificError { description }; return Err(err.into()); } let name: &CStr = CStr::from_ptr(buf.as_ptr()); return Ok(name.to_str().unwrap().to_owned()); } CStr::from_ptr(c_string as *mut _) }; Ok(c_str.to_string_lossy().into_owned()) } // Logic re-used between `supported_input_configs` and `supported_output_configs`. #[allow(clippy::cast_ptr_alignment)] fn supported_configs( &self, scope: AudioObjectPropertyScope, ) -> Result { let mut property_address = AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyStreamConfiguration, mScope: scope, mElement: kAudioObjectPropertyElementMaster, }; unsafe { // Retrieve the devices audio buffer list. let data_size = 0u32; let status = AudioObjectGetPropertyDataSize( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ); check_os_status(status)?; let mut audio_buffer_list: Vec = vec![]; audio_buffer_list.reserve_exact(data_size as usize); let status = AudioObjectGetPropertyData( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, audio_buffer_list.as_mut_ptr() as *mut _, ); check_os_status(status)?; let audio_buffer_list = audio_buffer_list.as_mut_ptr() as *mut AudioBufferList; // If there's no buffers, skip. if (*audio_buffer_list).mNumberBuffers == 0 { return Ok(vec![].into_iter()); } // Count the number of channels as the sum of all channels in all output buffers. let n_buffers = (*audio_buffer_list).mNumberBuffers as usize; let first: *const AudioBuffer = (*audio_buffer_list).mBuffers.as_ptr(); let buffers: &'static [AudioBuffer] = slice::from_raw_parts(first, n_buffers); let mut n_channels = 0; for buffer in buffers { n_channels += buffer.mNumberChannels as usize; } // TODO: macOS should support U8, I16, I32, F32 and F64. This should allow for using // I16 but just use F32 for now as it's the default anyway. let sample_format = SampleFormat::F32; // Get available sample rate ranges. property_address.mSelector = kAudioDevicePropertyAvailableNominalSampleRates; let data_size = 0u32; let status = AudioObjectGetPropertyDataSize( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ); check_os_status(status)?; let n_ranges = data_size as usize / mem::size_of::(); let mut ranges: Vec = vec![]; ranges.reserve_exact(data_size as usize); let status = AudioObjectGetPropertyData( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ranges.as_mut_ptr() as *mut _, ); check_os_status(status)?; let ranges: *mut AudioValueRange = ranges.as_mut_ptr() as *mut _; let ranges: &'static [AudioValueRange] = slice::from_raw_parts(ranges, n_ranges); let audio_unit = audio_unit_from_device(self, true)?; let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?; // Collect the supported formats for the device. let mut fmts = vec![]; for range in ranges { let fmt = SupportedStreamConfigRange { channels: n_channels as ChannelCount, min_sample_rate: SampleRate(range.mMinimum as _), max_sample_rate: SampleRate(range.mMaximum as _), buffer_size: buffer_size.clone(), sample_format, }; fmts.push(fmt); } Ok(fmts.into_iter()) } } fn supported_input_configs( &self, ) -> Result { self.supported_configs(kAudioObjectPropertyScopeInput) } fn supported_output_configs( &self, ) -> Result { self.supported_configs(kAudioObjectPropertyScopeOutput) } fn default_config( &self, scope: AudioObjectPropertyScope, ) -> Result { fn default_config_error_from_os_status( status: OSStatus, ) -> Result<(), DefaultStreamConfigError> { let err = match coreaudio::Error::from_os_status(status) { Err(err) => err, Ok(_) => return Ok(()), }; match err { coreaudio::Error::AudioUnit( coreaudio::error::AudioUnitError::FormatNotSupported, ) | coreaudio::Error::AudioCodec(_) | coreaudio::Error::AudioFormat(_) => { Err(DefaultStreamConfigError::StreamTypeNotSupported) } coreaudio::Error::AudioUnit(coreaudio::error::AudioUnitError::NoConnection) => { Err(DefaultStreamConfigError::DeviceNotAvailable) } err => { let description = format!("{}", err); let err = BackendSpecificError { description }; Err(err.into()) } } } let property_address = AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyStreamFormat, mScope: scope, mElement: kAudioObjectPropertyElementMaster, }; unsafe { let asbd: AudioStreamBasicDescription = mem::zeroed(); let data_size = mem::size_of::() as u32; let status = AudioObjectGetPropertyData( self.audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &asbd as *const _ as *mut _, ); default_config_error_from_os_status(status)?; let sample_format = { let audio_format = coreaudio::audio_unit::AudioFormat::from_format_and_flag( asbd.mFormatID, Some(asbd.mFormatFlags), ); let flags = match audio_format { Some(coreaudio::audio_unit::AudioFormat::LinearPCM(flags)) => flags, _ => return Err(DefaultStreamConfigError::StreamTypeNotSupported), }; let maybe_sample_format = coreaudio::audio_unit::SampleFormat::from_flags_and_bits_per_sample( flags, asbd.mBitsPerChannel, ); match maybe_sample_format { Some(coreaudio::audio_unit::SampleFormat::F32) => SampleFormat::F32, Some(coreaudio::audio_unit::SampleFormat::I16) => SampleFormat::I16, _ => return Err(DefaultStreamConfigError::StreamTypeNotSupported), } }; let audio_unit = audio_unit_from_device(self, true)?; let buffer_size = get_io_buffer_frame_size_range(&audio_unit)?; let config = SupportedStreamConfig { sample_rate: SampleRate(asbd.mSampleRate as _), channels: asbd.mChannelsPerFrame as _, buffer_size, sample_format, }; Ok(config) } } fn default_input_config(&self) -> Result { self.default_config(kAudioObjectPropertyScopeInput) } fn default_output_config(&self) -> Result { self.default_config(kAudioObjectPropertyScopeOutput) } } impl fmt::Debug for Device { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Device") .field("audio_device_id", &self.audio_device_id) .field("name", &self.name()) .finish() } } struct StreamInner { playing: bool, audio_unit: AudioUnit, /// Manage the lifetime of the closure that handles device disconnection. _disconnect_listener: Option, // Track the device with which the audio unit was spawned. // // We must do this so that we can avoid changing the device sample rate if there is already // a stream associated with the device. #[allow(dead_code)] device_id: AudioDeviceID, } /// Register the on-disconnect callback. /// This will both stop the stream and call the error callback with DeviceNotAvailable. /// This function should only be called once per stream. fn add_disconnect_listener( stream: &Stream, error_callback: Arc>, ) -> Result<(), BuildStreamError> where E: FnMut(StreamError) + Send + 'static, { let stream_copy = stream.clone(); let mut stream_inner = stream.inner.lock(); stream_inner._disconnect_listener = Some(AudioObjectPropertyListener::new( stream_inner.device_id, AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyDeviceIsAlive, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }, move || { let _ = stream_copy.pause(); (error_callback.lock())(StreamError::DeviceNotAvailable); }, )?); Ok(()) } fn audio_unit_from_device(device: &Device, input: bool) -> Result { let output_type = if device.is_default && !input { coreaudio::audio_unit::IOType::DefaultOutput } else { coreaudio::audio_unit::IOType::HalOutput }; let mut audio_unit = AudioUnit::new(output_type)?; if input { // Enable input processing. let enable_input = 1u32; audio_unit.set_property( kAudioOutputUnitProperty_EnableIO, Scope::Input, Element::Input, Some(&enable_input), )?; // Disable output processing. let disable_output = 0u32; audio_unit.set_property( kAudioOutputUnitProperty_EnableIO, Scope::Output, Element::Output, Some(&disable_output), )?; } audio_unit.set_property( kAudioOutputUnitProperty_CurrentDevice, Scope::Global, Element::Output, Some(&device.audio_device_id), )?; Ok(audio_unit) } impl Device { #[allow(clippy::cast_ptr_alignment)] #[allow(clippy::while_immutable_condition)] #[allow(clippy::float_cmp)] fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { // The scope and element for working with a device's input stream. let scope = Scope::Output; let element = Element::Input; // Potentially change the device sample rate to match the config. set_sample_rate(self.audio_device_id, config.sample_rate)?; let mut audio_unit = audio_unit_from_device(self, true)?; // Set the stream in interleaved mode. let asbd = asbd_from_config(config, sample_format); audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; // Set the buffersize match config.buffer_size { BufferSize::Fixed(v) => { let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?; match buffer_size_range { SupportedBufferSize::Range { min, max } => { if v >= min && v <= max { audio_unit.set_property( kAudioDevicePropertyBufferFrameSize, scope, element, Some(&v), )? } else { return Err(BuildStreamError::StreamConfigNotSupported); } } SupportedBufferSize::Unknown => (), } } BufferSize::Default => (), } let error_callback = Arc::new(Mutex::new(error_callback)); let error_callback_disconnect = error_callback.clone(); // Register the callback that is being called by coreaudio whenever it needs data to be // fed to the audio buffer. let bytes_per_channel = sample_format.sample_size(); let sample_rate = config.sample_rate; type Args = render_callback::Args; audio_unit.set_input_callback(move |args: Args| unsafe { let ptr = (*args.data.data).mBuffers.as_ptr() as *const AudioBuffer; let len = (*args.data.data).mNumberBuffers as usize; let buffers: &[AudioBuffer] = slice::from_raw_parts(ptr, len); // TODO: Perhaps loop over all buffers instead? let AudioBuffer { mNumberChannels: channels, mDataByteSize: data_byte_size, mData: data, } = buffers[0]; let data = data as *mut (); let len = (data_byte_size as usize / bytes_per_channel) as usize; let data = Data::from_parts(data, len, sample_format); // TODO: Need a better way to get delay, for now we assume a double-buffer offset. let callback = match host_time_to_stream_instant(args.time_stamp.mHostTime) { Err(err) => { (error_callback.lock())(err.into()); return Err(()); } Ok(cb) => cb, }; let buffer_frames = len / channels as usize; let delay = frames_to_duration(buffer_frames, sample_rate); let capture = callback .sub(delay) .expect("`capture` occurs before origin of alsa `StreamInstant`"); let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = InputCallbackInfo { timestamp }; data_callback(&data, &info); Ok(()) })?; let stream = Stream::new(StreamInner { playing: true, _disconnect_listener: None, audio_unit, device_id: self.audio_device_id, }); // If we didn't request the default device, stop the stream if the // device disconnects. if !self.is_default { add_disconnect_listener(&stream, error_callback_disconnect)?; } stream.inner.lock().audio_unit.start()?; Ok(stream) } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, mut data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let mut audio_unit = audio_unit_from_device(self, false)?; // The scope and element for working with a device's output stream. let scope = Scope::Input; let element = Element::Output; // Set the stream in interleaved mode. let asbd = asbd_from_config(config, sample_format); audio_unit.set_property(kAudioUnitProperty_StreamFormat, scope, element, Some(&asbd))?; // Set the buffersize match config.buffer_size { BufferSize::Fixed(v) => { let buffer_size_range = get_io_buffer_frame_size_range(&audio_unit)?; match buffer_size_range { SupportedBufferSize::Range { min, max } => { if v >= min && v <= max { audio_unit.set_property( kAudioDevicePropertyBufferFrameSize, scope, element, Some(&v), )? } else { return Err(BuildStreamError::StreamConfigNotSupported); } } SupportedBufferSize::Unknown => (), } } BufferSize::Default => (), } let error_callback = Arc::new(Mutex::new(error_callback)); let error_callback_disconnect = error_callback.clone(); // Register the callback that is being called by coreaudio whenever it needs data to be // fed to the audio buffer. let bytes_per_channel = sample_format.sample_size(); let sample_rate = config.sample_rate; type Args = render_callback::Args; audio_unit.set_render_callback(move |args: Args| unsafe { // If `run()` is currently running, then a callback will be available from this list. // Otherwise, we just fill the buffer with zeroes and return. let AudioBuffer { mNumberChannels: channels, mDataByteSize: data_byte_size, mData: data, } = (*args.data.data).mBuffers[0]; let data = data as *mut (); let len = (data_byte_size as usize / bytes_per_channel) as usize; let mut data = Data::from_parts(data, len, sample_format); let callback = match host_time_to_stream_instant(args.time_stamp.mHostTime) { Err(err) => { (error_callback.lock())(err.into()); return Err(()); } Ok(cb) => cb, }; // TODO: Need a better way to get delay, for now we assume a double-buffer offset. let buffer_frames = len / channels as usize; let delay = frames_to_duration(buffer_frames, sample_rate); let playback = callback .add(delay) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); Ok(()) })?; let stream = Stream::new(StreamInner { playing: true, _disconnect_listener: None, audio_unit, device_id: self.audio_device_id, }); // If we didn't request the default device, stop the stream if the // device disconnects. if !self.is_default { add_disconnect_listener(&stream, error_callback_disconnect)?; } stream.inner.lock().audio_unit.start()?; Ok(stream) } } /// Attempt to set the device sample rate to the provided rate. /// Return an error if the requested sample rate is not supported by the device. fn set_sample_rate( audio_device_id: AudioObjectID, target_sample_rate: SampleRate, ) -> Result<(), BuildStreamError> { // Get the current sample rate. let mut property_address = AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyNominalSampleRate, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }; let sample_rate: f64 = 0.0; let data_size = mem::size_of::() as u32; let status = unsafe { AudioObjectGetPropertyData( audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, &sample_rate as *const _ as *mut _, ) }; coreaudio::Error::from_os_status(status)?; // If the requested sample rate is different to the device sample rate, update the device. if sample_rate as u32 != target_sample_rate.0 { // Get available sample rate ranges. property_address.mSelector = kAudioDevicePropertyAvailableNominalSampleRates; let data_size = 0u32; let status = unsafe { AudioObjectGetPropertyDataSize( audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ) }; coreaudio::Error::from_os_status(status)?; let n_ranges = data_size as usize / mem::size_of::(); let mut ranges: Vec = vec![]; ranges.reserve_exact(data_size as usize); let status = unsafe { AudioObjectGetPropertyData( audio_device_id, &property_address as *const _, 0, null(), &data_size as *const _ as *mut _, ranges.as_mut_ptr() as *mut _, ) }; coreaudio::Error::from_os_status(status)?; let ranges: *mut AudioValueRange = ranges.as_mut_ptr() as *mut _; let ranges: &'static [AudioValueRange] = unsafe { slice::from_raw_parts(ranges, n_ranges) }; // Now that we have the available ranges, pick the one matching the desired rate. let sample_rate = target_sample_rate.0; let maybe_index = ranges .iter() .position(|r| r.mMinimum as u32 == sample_rate && r.mMaximum as u32 == sample_rate); let range_index = match maybe_index { None => return Err(BuildStreamError::StreamConfigNotSupported), Some(i) => i, }; let (send, recv) = channel::>(); let sample_rate_address = AudioObjectPropertyAddress { mSelector: kAudioDevicePropertyNominalSampleRate, mScope: kAudioObjectPropertyScopeGlobal, mElement: kAudioObjectPropertyElementMaster, }; // Send sample rate updates back on a channel. let sample_rate_handler = move || { let mut rate: f64 = 0.0; let data_size = mem::size_of::(); let result = unsafe { AudioObjectGetPropertyData( audio_device_id, &sample_rate_address as *const _, 0, null(), &data_size as *const _ as *mut _, &mut rate as *const _ as *mut _, ) }; send.send(coreaudio::Error::from_os_status(result).map(|_| rate)) .ok(); }; let listener = AudioObjectPropertyListener::new( audio_device_id, sample_rate_address, sample_rate_handler, )?; // Finally, set the sample rate. property_address.mSelector = kAudioDevicePropertyNominalSampleRate; let status = unsafe { AudioObjectSetPropertyData( audio_device_id, &property_address as *const _, 0, null(), data_size, &ranges[range_index] as *const _ as *const _, ) }; coreaudio::Error::from_os_status(status)?; // Wait for the reported_rate to change. // // This should not take longer than a few ms, but we timeout after 1 sec just in case. // We loop over potentially several events from the channel to ensure // that we catch the expected change in sample rate. let mut timeout = Duration::from_secs(1); let start = Instant::now(); loop { match recv.recv_timeout(timeout) { Err(err) => { let description = match err { RecvTimeoutError::Disconnected => { "sample rate listener channel disconnected unexpectedly" } RecvTimeoutError::Timeout => { "timeout waiting for sample rate update for device" } } .to_string(); return Err(BackendSpecificError { description }.into()); } Ok(Ok(reported_sample_rate)) => { if reported_sample_rate == target_sample_rate.0 as f64 { break; } } Ok(Err(_)) => { // TODO: should we consider collecting this error? } }; timeout = timeout .checked_sub(start.elapsed()) .unwrap_or(Duration::ZERO); } listener.remove()?; } Ok(()) } #[derive(Clone)] pub struct Stream { inner: Arc>, } impl Stream { fn new(inner: StreamInner) -> Self { Self { inner: Arc::new(Mutex::new(inner)), } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { let mut stream = self.inner.lock(); if !stream.playing { if let Err(e) = stream.audio_unit.start() { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } stream.playing = true; } Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { let mut stream = self.inner.lock(); if stream.playing { if let Err(e) = stream.audio_unit.stop() { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } stream.playing = false; } Ok(()) } } fn get_io_buffer_frame_size_range( audio_unit: &AudioUnit, ) -> Result { let buffer_size_range: AudioValueRange = audio_unit.get_property( kAudioDevicePropertyBufferFrameSizeRange, Scope::Global, Element::Output, )?; Ok(SupportedBufferSize::Range { min: buffer_size_range.mMinimum as u32, max: buffer_size_range.mMaximum as u32, }) } cpal-0.15.2/src/host/coreaudio/macos/property_listener.rs000064400000000000000000000053311046102023000216110ustar 00000000000000//! Helper code for registering audio object property listeners. use super::coreaudio::sys::{ AudioObjectAddPropertyListener, AudioObjectID, AudioObjectPropertyAddress, AudioObjectRemovePropertyListener, OSStatus, }; use crate::BuildStreamError; /// A double-indirection to be able to pass a closure (a fat pointer) /// via a single c_void. struct PropertyListenerCallbackWrapper(Box ()>); /// Maintain an audio object property listener. /// The listener will be removed when this type is dropped. pub struct AudioObjectPropertyListener { callback: Box, property_address: AudioObjectPropertyAddress, audio_object_id: AudioObjectID, removed: bool, } impl AudioObjectPropertyListener { /// Attach the provided callback as a audio object property listener. pub fn new () + 'static>( audio_object_id: AudioObjectID, property_address: AudioObjectPropertyAddress, callback: F, ) -> Result { let callback = Box::new(PropertyListenerCallbackWrapper(Box::new(callback))); unsafe { coreaudio::Error::from_os_status(AudioObjectAddPropertyListener( audio_object_id, &property_address as *const _, Some(property_listener_handler_shim), &*callback as *const _ as *mut _, ))?; }; Ok(Self { callback, audio_object_id, property_address, removed: false, }) } /// Explicitly remove the property listener. /// Use this method if you need to explicitly handle failure to remove /// the property listener. pub fn remove(mut self) -> Result<(), BuildStreamError> { self.remove_inner() } fn remove_inner(&mut self) -> Result<(), BuildStreamError> { unsafe { coreaudio::Error::from_os_status(AudioObjectRemovePropertyListener( self.audio_object_id, &self.property_address as *const _, Some(property_listener_handler_shim), &*self.callback as *const _ as *mut _, ))?; } self.removed = true; Ok(()) } } impl Drop for AudioObjectPropertyListener { fn drop(&mut self) { if !self.removed { let _ = self.remove_inner(); } } } /// Callback used to call user-provided closure as a property listener. unsafe extern "C" fn property_listener_handler_shim( _: AudioObjectID, _: u32, _: *const AudioObjectPropertyAddress, callback: *mut ::std::os::raw::c_void, ) -> OSStatus { let wrapper = callback as *mut PropertyListenerCallbackWrapper; (*wrapper).0(); 0 } cpal-0.15.2/src/host/coreaudio/mod.rs000064400000000000000000000105611046102023000154760ustar 00000000000000extern crate coreaudio; extern crate parking_lot; use self::coreaudio::sys::{ kAudioFormatFlagIsFloat, kAudioFormatFlagIsPacked, kAudioFormatLinearPCM, AudioStreamBasicDescription, OSStatus, }; use crate::DefaultStreamConfigError; use crate::{BuildStreamError, SupportedStreamConfigsError}; use crate::{BackendSpecificError, SampleFormat, StreamConfig}; #[cfg(target_os = "ios")] mod ios; #[cfg(target_os = "macos")] mod macos; #[cfg(target_os = "ios")] pub use self::ios::{ enumerate::{Devices, SupportedInputConfigs, SupportedOutputConfigs}, Device, Host, Stream, }; #[cfg(target_os = "macos")] pub use self::macos::{ enumerate::{Devices, SupportedInputConfigs, SupportedOutputConfigs}, Device, Host, Stream, }; /// Common helper methods used by both macOS and iOS fn check_os_status(os_status: OSStatus) -> Result<(), BackendSpecificError> { match coreaudio::Error::from_os_status(os_status) { Ok(()) => Ok(()), Err(err) => { let description = err.to_string(); Err(BackendSpecificError { description }) } } } // Create a coreaudio AudioStreamBasicDescription from a CPAL Format. fn asbd_from_config( config: &StreamConfig, sample_format: SampleFormat, ) -> AudioStreamBasicDescription { let n_channels = config.channels as usize; let sample_rate = config.sample_rate.0; let bytes_per_channel = sample_format.sample_size(); let bits_per_channel = bytes_per_channel * 8; let bytes_per_frame = n_channels * bytes_per_channel; let frames_per_packet = 1; let bytes_per_packet = frames_per_packet * bytes_per_frame; let format_flags = match sample_format { SampleFormat::F32 => (kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked) as u32, _ => kAudioFormatFlagIsPacked as u32, }; AudioStreamBasicDescription { mBitsPerChannel: bits_per_channel as _, mBytesPerFrame: bytes_per_frame as _, mChannelsPerFrame: n_channels as _, mBytesPerPacket: bytes_per_packet as _, mFramesPerPacket: frames_per_packet as _, mFormatFlags: format_flags, mFormatID: kAudioFormatLinearPCM, mSampleRate: sample_rate as _, ..Default::default() } } fn host_time_to_stream_instant( m_host_time: u64, ) -> Result { let mut info: mach2::mach_time::mach_timebase_info = Default::default(); let res = unsafe { mach2::mach_time::mach_timebase_info(&mut info) }; check_os_status(res)?; let nanos = m_host_time * info.numer as u64 / info.denom as u64; let secs = nanos / 1_000_000_000; let subsec_nanos = nanos - secs * 1_000_000_000; Ok(crate::StreamInstant::new(secs as i64, subsec_nanos as u32)) } // Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } // TODO need stronger error identification impl From for BuildStreamError { fn from(err: coreaudio::Error) -> BuildStreamError { match err { coreaudio::Error::RenderCallbackBufferFormatDoesNotMatchAudioUnitStreamFormat | coreaudio::Error::NoKnownSubtype | coreaudio::Error::AudioUnit(coreaudio::error::AudioUnitError::FormatNotSupported) | coreaudio::Error::AudioCodec(_) | coreaudio::Error::AudioFormat(_) => BuildStreamError::StreamConfigNotSupported, _ => BuildStreamError::DeviceNotAvailable, } } } impl From for SupportedStreamConfigsError { fn from(err: coreaudio::Error) -> SupportedStreamConfigsError { let description = format!("{}", err); let err = BackendSpecificError { description }; // Check for possible DeviceNotAvailable variant SupportedStreamConfigsError::BackendSpecific { err } } } impl From for DefaultStreamConfigError { fn from(err: coreaudio::Error) -> DefaultStreamConfigError { let description = format!("{}", err); let err = BackendSpecificError { description }; // Check for possible DeviceNotAvailable variant DefaultStreamConfigError::BackendSpecific { err } } } cpal-0.15.2/src/host/emscripten/mod.rs000064400000000000000000000327751046102023000157100ustar 00000000000000use js_sys::Float32Array; use std::time::Duration; use wasm_bindgen::prelude::*; use wasm_bindgen::JsCast; use wasm_bindgen_futures::{spawn_local, JsFuture}; use web_sys::AudioContext; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; // The emscripten backend currently works by instantiating an `AudioContext` object per `Stream`. // Creating a stream creates a new `AudioContext`. Destroying a stream destroys it. Creation of a // `Host` instance initializes the `stdweb` context. /// The default emscripten host type. #[derive(Debug)] pub struct Host; /// Content is false if the iterator is empty. pub struct Devices(bool); #[derive(Clone, Debug, PartialEq, Eq)] pub struct Device; #[wasm_bindgen] #[derive(Clone)] pub struct Stream { // A reference to an `AudioContext` object. audio_ctxt: AudioContext, } // Index within the `streams` array of the events loop. #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct StreamId(usize); pub type SupportedInputConfigs = ::std::vec::IntoIter; pub type SupportedOutputConfigs = ::std::vec::IntoIter; const MIN_CHANNELS: u16 = 1; const MAX_CHANNELS: u16 = 32; const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000); const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000); const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100); const MIN_BUFFER_SIZE: u32 = 1; const MAX_BUFFER_SIZE: u32 = u32::MAX; const DEFAULT_BUFFER_SIZE: usize = 2048; const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32; impl Host { pub fn new() -> Result { Ok(Host) } } impl Devices { fn new() -> Result { Ok(Self::default()) } } impl Device { #[inline] fn name(&self) -> Result { Ok("Default Device".to_owned()) } #[inline] fn supported_input_configs( &self, ) -> Result { unimplemented!(); } #[inline] fn supported_output_configs( &self, ) -> Result { let buffer_size = SupportedBufferSize::Range { min: MIN_BUFFER_SIZE, max: MAX_BUFFER_SIZE, }; let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS) .map(|channels| SupportedStreamConfigRange { channels, min_sample_rate: MIN_SAMPLE_RATE, max_sample_rate: MAX_SAMPLE_RATE, buffer_size: buffer_size.clone(), sample_format: SUPPORTED_SAMPLE_FORMAT, }) .collect(); Ok(configs.into_iter()) } fn default_input_config(&self) -> Result { unimplemented!(); } fn default_output_config(&self) -> Result { const EXPECT: &str = "expected at least one valid webaudio stream config"; let config = self .supported_output_configs() .expect(EXPECT) .max_by(|a, b| a.cmp_default_heuristics(b)) .unwrap() .with_sample_rate(DEFAULT_SAMPLE_RATE); Ok(config) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { // Assume this host is always available on emscripten. true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Device::name(self) } fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } fn default_input_config(&self) -> Result { Device::default_input_config(self) } fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, _config: &StreamConfig, _sample_format: SampleFormat, _data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { unimplemented!() } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { if !valid_config(config, sample_format) { return Err(BuildStreamError::StreamConfigNotSupported); } let buffer_size_frames = match config.buffer_size { BufferSize::Fixed(v) => { if v == 0 { return Err(BuildStreamError::StreamConfigNotSupported); } else { v as usize } } BufferSize::Default => DEFAULT_BUFFER_SIZE, }; // Create the stream. let audio_ctxt = AudioContext::new().expect("webaudio is not present on this system"); let stream = Stream { audio_ctxt }; // Use `set_timeout` to invoke a Rust callback repeatedly. // // The job of this callback is to fill the content of the audio buffers. // // See also: The call to `set_timeout` at the end of the `audio_callback_fn` which creates // the loop. set_timeout( 10, stream.clone(), data_callback, config, sample_format, buffer_size_frames as u32, ); Ok(stream) } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { let future = JsFuture::from( self.audio_ctxt .resume() .expect("Could not resume the stream"), ); spawn_local(async { match future.await { Ok(value) => assert!(value.is_undefined()), Err(value) => panic!("AudioContext.resume() promise was rejected: {:?}", value), } }); Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { let future = JsFuture::from( self.audio_ctxt .suspend() .expect("Could not suspend the stream"), ); spawn_local(async { match future.await { Ok(value) => assert!(value.is_undefined()), Err(value) => panic!("AudioContext.suspend() promise was rejected: {:?}", value), } }); Ok(()) } } fn audio_callback_fn( mut data_callback: D, ) -> impl FnOnce(Stream, StreamConfig, SampleFormat, u32) where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, { |stream, config, sample_format, buffer_size_frames| { let sample_rate = config.sample_rate.0; let buffer_size_samples = buffer_size_frames * config.channels as u32; let audio_ctxt = &stream.audio_ctxt; // TODO: We should be re-using a buffer. let mut temporary_buffer = vec![0f32; buffer_size_samples as usize]; { let len = temporary_buffer.len(); let data = temporary_buffer.as_mut_ptr() as *mut (); let mut data = unsafe { Data::from_parts(data, len, sample_format) }; let now_secs: f64 = audio_ctxt.current_time(); let callback = crate::StreamInstant::from_secs_f64(now_secs); // TODO: Use proper latency instead. Currently, unsupported on most browsers though, so // we estimate based on buffer size instead. Probably should use this, but it's only // supported by firefox (2020-04-28). // let latency_secs: f64 = audio_ctxt.outputLatency.try_into().unwrap(); let buffer_duration = frames_to_duration(len, sample_rate as usize); let playback = callback .add(buffer_duration) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); } let typed_array: Float32Array = temporary_buffer.as_slice().into(); debug_assert_eq!(temporary_buffer.len() % config.channels as usize, 0); let src_buffer = Float32Array::new(typed_array.buffer().as_ref()); let context = audio_ctxt; let buffer = context .create_buffer( config.channels as u32, buffer_size_frames as u32, sample_rate as f32, ) .expect("Buffer could not be created"); for channel in 0..config.channels { let mut buffer_content = buffer .get_channel_data(channel as u32) .expect("Should be impossible"); for (i, buffer_content_item) in buffer_content.iter_mut().enumerate() { *buffer_content_item = src_buffer.get_index(i as u32 * config.channels as u32 + channel as u32); } } let node = context .create_buffer_source() .expect("The buffer source node could not be created"); node.set_buffer(Some(&buffer)); context .destination() .connect_with_audio_node(&node) .expect("Could not connect the audio node to the destination"); node.start().expect("Could not start the audio node"); // TODO: handle latency better ; right now we just use setInterval with the amount of sound // data that is in each buffer ; this is obviously bad, and also the schedule is too tight // and there may be underflows set_timeout( 1000 * buffer_size_frames as i32 / sample_rate as i32, stream.clone().clone(), data_callback, &config, sample_format, buffer_size_frames as u32, ); } } fn set_timeout( time: i32, stream: Stream, data_callback: D, config: &StreamConfig, sample_format: SampleFormat, buffer_size_frames: u32, ) where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, { let window = web_sys::window().expect("Not in a window somehow?"); window .set_timeout_with_callback_and_timeout_and_arguments_4( &Closure::once_into_js(audio_callback_fn(data_callback)) .dyn_ref::() .expect("The function was somehow not a function"), time, &stream.into(), &((*config).clone()).into(), &Closure::once_into_js(move || sample_format), &buffer_size_frames.into(), ) .expect("The timeout could not be set"); } impl Default for Devices { fn default() -> Devices { // We produce an empty iterator if the WebAudio API isn't available. Devices(is_webaudio_available()) } } impl Iterator for Devices { type Item = Device; #[inline] fn next(&mut self) -> Option { if self.0 { self.0 = false; Some(Device) } else { None } } } #[inline] fn default_input_device() -> Option { unimplemented!(); } #[inline] fn default_output_device() -> Option { if is_webaudio_available() { Some(Device) } else { None } } // Detects whether the `AudioContext` global variable is available. fn is_webaudio_available() -> bool { AudioContext::new().is_ok() } // Whether or not the given stream configuration is valid for building a stream. fn valid_config(conf: &StreamConfig, sample_format: SampleFormat) -> bool { conf.channels <= MAX_CHANNELS && conf.channels >= MIN_CHANNELS && conf.sample_rate <= MAX_SAMPLE_RATE && conf.sample_rate >= MIN_SAMPLE_RATE && sample_format == SUPPORTED_SAMPLE_FORMAT } // Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: usize) -> std::time::Duration { let secsf = frames as f64 / rate as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } cpal-0.15.2/src/host/jack/device.rs000064400000000000000000000225751046102023000151240ustar 00000000000000use crate::traits::DeviceTrait; use crate::{ BackendSpecificError, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; use std::hash::{Hash, Hasher}; use std::time::Duration; use super::stream::Stream; use super::JACK_SAMPLE_FORMAT; pub type SupportedInputConfigs = std::vec::IntoIter; pub type SupportedOutputConfigs = std::vec::IntoIter; const DEFAULT_NUM_CHANNELS: u16 = 2; const DEFAULT_SUPPORTED_CHANNELS: [u16; 10] = [1, 2, 4, 6, 8, 16, 24, 32, 48, 64]; /// If a device is for input or output. /// Until we have duplex stream support JACK clients and CPAL devices for JACK will be either input or output. #[derive(Clone, Debug)] pub enum DeviceType { InputDevice, OutputDevice, } #[derive(Clone, Debug)] pub struct Device { name: String, sample_rate: SampleRate, buffer_size: SupportedBufferSize, device_type: DeviceType, start_server_automatically: bool, connect_ports_automatically: bool, } impl Device { fn new_device( name: String, connect_ports_automatically: bool, start_server_automatically: bool, device_type: DeviceType, ) -> Result { // ClientOptions are bit flags that you can set with the constants provided let client_options = super::get_client_options(start_server_automatically); // Create a dummy client to find out the sample rate of the server to be able to provide it as a possible config. // This client will be dropped, and a new one will be created when making the stream. // This is a hack due to the fact that the Client must be moved to create the AsyncClient. match super::get_client(&name, client_options) { Ok(client) => Ok(Device { // The name given to the client by JACK, could potentially be different from the name supplied e.g.if there is a name collision name: client.name().to_string(), sample_rate: SampleRate(client.sample_rate() as u32), buffer_size: SupportedBufferSize::Range { min: client.buffer_size(), max: client.buffer_size(), }, device_type, start_server_automatically, connect_ports_automatically, }), Err(e) => Err(e), } } pub fn default_output_device( name: &str, connect_ports_automatically: bool, start_server_automatically: bool, ) -> Result { let output_client_name = format!("{}_out", name); Device::new_device( output_client_name, connect_ports_automatically, start_server_automatically, DeviceType::OutputDevice, ) } pub fn default_input_device( name: &str, connect_ports_automatically: bool, start_server_automatically: bool, ) -> Result { let input_client_name = format!("{}_in", name); Device::new_device( input_client_name, connect_ports_automatically, start_server_automatically, DeviceType::InputDevice, ) } pub fn default_config(&self) -> Result { let channels = DEFAULT_NUM_CHANNELS; let sample_rate = self.sample_rate; let buffer_size = self.buffer_size.clone(); // The sample format for JACK audio ports is always "32-bit float mono audio" in the current implementation. // Custom formats are allowed within JACK, but this is of niche interest. // The format can be found programmatically by calling jack::PortSpec::port_type() on a created port. let sample_format = JACK_SAMPLE_FORMAT; Ok(SupportedStreamConfig { channels, sample_rate, buffer_size, sample_format, }) } pub fn supported_configs(&self) -> Vec { let f = match self.default_config() { Err(_) => return vec![], Ok(f) => f, }; let mut supported_configs = vec![]; for &channels in DEFAULT_SUPPORTED_CHANNELS.iter() { supported_configs.push(SupportedStreamConfigRange { channels, min_sample_rate: f.sample_rate, max_sample_rate: f.sample_rate, buffer_size: f.buffer_size.clone(), sample_format: f.sample_format, }); } supported_configs } pub fn is_input(&self) -> bool { matches!(self.device_type, DeviceType::InputDevice) } pub fn is_output(&self) -> bool { matches!(self.device_type, DeviceType::OutputDevice) } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Ok(self.name.clone()) } fn supported_input_configs( &self, ) -> Result { Ok(self.supported_configs().into_iter()) } fn supported_output_configs( &self, ) -> Result { Ok(self.supported_configs().into_iter()) } /// Returns the default input config /// The sample format for JACK audio ports is always "32-bit float mono audio" unless using a custom type. /// The sample rate is set by the JACK server. fn default_input_config(&self) -> Result { self.default_config() } /// Returns the default output config /// The sample format for JACK audio ports is always "32-bit float mono audio" unless using a custom type. /// The sample rate is set by the JACK server. fn default_output_config(&self) -> Result { self.default_config() } fn build_input_stream_raw( &self, conf: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { if let DeviceType::OutputDevice = &self.device_type { // Trying to create an input stream from an output device return Err(BuildStreamError::StreamConfigNotSupported); } if conf.sample_rate != self.sample_rate || sample_format != JACK_SAMPLE_FORMAT { return Err(BuildStreamError::StreamConfigNotSupported); } // The settings should be fine, create a Client let client_options = super::get_client_options(self.start_server_automatically); let client; match super::get_client(&self.name, client_options) { Ok(c) => client = c, Err(e) => { return Err(BuildStreamError::BackendSpecific { err: BackendSpecificError { description: e }, }) } }; let mut stream = Stream::new_input(client, conf.channels, data_callback, error_callback); if self.connect_ports_automatically { stream.connect_to_system_inputs(); } Ok(stream) } fn build_output_stream_raw( &self, conf: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { if let DeviceType::InputDevice = &self.device_type { // Trying to create an output stream from an input device return Err(BuildStreamError::StreamConfigNotSupported); } if conf.sample_rate != self.sample_rate || sample_format != JACK_SAMPLE_FORMAT { return Err(BuildStreamError::StreamConfigNotSupported); } // The settings should be fine, create a Client let client_options = super::get_client_options(self.start_server_automatically); let client; match super::get_client(&self.name, client_options) { Ok(c) => client = c, Err(e) => { return Err(BuildStreamError::BackendSpecific { err: BackendSpecificError { description: e }, }) } }; let mut stream = Stream::new_output(client, conf.channels, data_callback, error_callback); if self.connect_ports_automatically { stream.connect_to_system_outputs(); } Ok(stream) } } impl PartialEq for Device { fn eq(&self, other: &Self) -> bool { // Device::name() can never fail in this implementation self.name().unwrap() == other.name().unwrap() } } impl Eq for Device {} impl Hash for Device { fn hash(&self, state: &mut H) { self.name().unwrap().hash(state); } } cpal-0.15.2/src/host/jack/mod.rs000064400000000000000000000150651046102023000144400ustar 00000000000000extern crate jack; use crate::traits::HostTrait; use crate::{DevicesError, SampleFormat, SupportedStreamConfigRange}; mod device; pub use self::device::Device; pub use self::stream::Stream; mod stream; const JACK_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32; pub type SupportedInputConfigs = std::vec::IntoIter; pub type SupportedOutputConfigs = std::vec::IntoIter; pub type Devices = std::vec::IntoIter; /// The JACK Host type #[derive(Debug)] pub struct Host { /// The name that the client will have in JACK. /// Until we have duplex streams two clients will be created adding "out" or "in" to the name /// since names have to be unique. name: String, /// If ports are to be connected to the system (soundcard) ports automatically (default is true). connect_ports_automatically: bool, /// If the JACK server should be started automatically if it isn't already when creating a Client (default is false). start_server_automatically: bool, /// A list of the devices that have been created from this Host. devices_created: Vec, } impl Host { pub fn new() -> Result { let mut host = Host { name: "cpal_client".to_owned(), connect_ports_automatically: true, start_server_automatically: false, devices_created: vec![], }; // Devices don't exist for JACK, they have to be created host.initialize_default_devices(); Ok(host) } /// Set whether the ports should automatically be connected to system /// (default is true) pub fn set_connect_automatically(&mut self, do_connect: bool) { self.connect_ports_automatically = do_connect; } /// Set whether a JACK server should be automatically started if it isn't already. /// (default is false) pub fn set_start_server_automatically(&mut self, do_start_server: bool) { self.start_server_automatically = do_start_server; } pub fn input_device_with_name(&mut self, name: &str) -> Option { self.name = name.to_owned(); self.default_input_device() } pub fn output_device_with_name(&mut self, name: &str) -> Option { self.name = name.to_owned(); self.default_output_device() } fn initialize_default_devices(&mut self) { let in_device_res = Device::default_input_device( &self.name, self.connect_ports_automatically, self.start_server_automatically, ); match in_device_res { Ok(device) => self.devices_created.push(device), Err(err) => { println!("{}", err); } } let out_device_res = Device::default_output_device( &self.name, self.connect_ports_automatically, self.start_server_automatically, ); match out_device_res { Ok(device) => self.devices_created.push(device), Err(err) => { println!("{}", err); } } } } impl HostTrait for Host { type Devices = Devices; type Device = Device; /// JACK is available if /// - the jack feature flag is set /// - libjack is installed (wouldn't compile without it) /// - the JACK server can be started /// /// If the code compiles the necessary jack libraries are installed. /// There is no way to know if the user has set up a correct JACK configuration e.g. with qjackctl. /// Users can choose to automatically start the server if it isn't already started when creating a client /// so checking if the server is running could give a false negative in some use cases. /// For these reasons this function should always return true. fn is_available() -> bool { true } fn devices(&self) -> Result { Ok(self.devices_created.clone().into_iter()) } fn default_input_device(&self) -> Option { for device in &self.devices_created { if device.is_input() { return Some(device.clone()); } } None } fn default_output_device(&self) -> Option { for device in &self.devices_created { if device.is_output() { return Some(device.clone()); } } None } } fn get_client_options(start_server_automatically: bool) -> jack::ClientOptions { let mut client_options = jack::ClientOptions::empty(); client_options.set( jack::ClientOptions::NO_START_SERVER, !start_server_automatically, ); client_options } fn get_client(name: &str, client_options: jack::ClientOptions) -> Result { let c_res = jack::Client::new(name, client_options); match c_res { Ok((client, status)) => { // The ClientStatus can tell us many things if status.intersects(jack::ClientStatus::SERVER_ERROR) { return Err(String::from( "There was an error communicating with the JACK server!", )); } else if status.intersects(jack::ClientStatus::SERVER_FAILED) { return Err(String::from("Could not connect to the JACK server!")); } else if status.intersects(jack::ClientStatus::VERSION_ERROR) { return Err(String::from( "Error connecting to JACK server: Client's protocol version does not match!", )); } else if status.intersects(jack::ClientStatus::INIT_FAILURE) { return Err(String::from( "Error connecting to JACK server: Unable to initialize client!", )); } else if status.intersects(jack::ClientStatus::SHM_FAILURE) { return Err(String::from( "Error connecting to JACK server: Unable to access shared memory!", )); } else if status.intersects(jack::ClientStatus::NO_SUCH_CLIENT) { return Err(String::from( "Error connecting to JACK server: Requested client does not exist!", )); } else if status.intersects(jack::ClientStatus::INVALID_OPTION) { return Err(String::from("Error connecting to JACK server: The operation contained an invalid or unsupported option!")); } return Ok(client); } Err(e) => { return Err(format!("Failed to open client because of error: {:?}", e)); } } } cpal-0.15.2/src/host/jack/stream.rs000064400000000000000000000432411046102023000151510ustar 00000000000000use crate::traits::StreamTrait; use crate::ChannelCount; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use crate::{ BackendSpecificError, Data, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleRate, StreamError, }; use super::JACK_SAMPLE_FORMAT; type ErrorCallbackPtr = Arc>; pub struct Stream { // TODO: It might be faster to send a message when playing/pausing than to check this every iteration playing: Arc, async_client: jack::AsyncClient, // Port names are stored in order to connect them to other ports in jack automatically input_port_names: Vec, output_port_names: Vec, } impl Stream { // TODO: Return error messages pub fn new_input( client: jack::Client, channels: ChannelCount, data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let mut ports = vec![]; let mut port_names: Vec = vec![]; // Create ports for i in 0..channels { let port_try = client.register_port(&format!("in_{}", i), jack::AudioIn::default()); match port_try { Ok(port) => { // Get the port name in order to later connect it automatically if let Ok(port_name) = port.name() { port_names.push(port_name); } // Store the port into a Vec to move to the ProcessHandler ports.push(port); } Err(e) => { // If port creation failed, send the error back via the error_callback error_callback( BackendSpecificError { description: e.to_string(), } .into(), ); } } } let playing = Arc::new(AtomicBool::new(true)); let error_callback_ptr = Arc::new(Mutex::new(error_callback)) as ErrorCallbackPtr; let input_process_handler = LocalProcessHandler::new( vec![], ports, SampleRate(client.sample_rate() as u32), client.buffer_size() as usize, Some(Box::new(data_callback)), None, playing.clone(), Arc::clone(&error_callback_ptr), ); let notification_handler = JackNotificationHandler::new(error_callback_ptr); let async_client = client .activate_async(notification_handler, input_process_handler) .unwrap(); Stream { playing, async_client, input_port_names: port_names, output_port_names: vec![], } } pub fn new_output( client: jack::Client, channels: ChannelCount, data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let mut ports = vec![]; let mut port_names: Vec = vec![]; // Create ports for i in 0..channels { let port_try = client.register_port(&format!("out_{}", i), jack::AudioOut::default()); match port_try { Ok(port) => { // Get the port name in order to later connect it automatically if let Ok(port_name) = port.name() { port_names.push(port_name); } // Store the port into a Vec to move to the ProcessHandler ports.push(port); } Err(e) => { // If port creation failed, send the error back via the error_callback error_callback( BackendSpecificError { description: e.to_string(), } .into(), ); } } } let playing = Arc::new(AtomicBool::new(true)); let error_callback_ptr = Arc::new(Mutex::new(error_callback)) as ErrorCallbackPtr; let output_process_handler = LocalProcessHandler::new( ports, vec![], SampleRate(client.sample_rate() as u32), client.buffer_size() as usize, None, Some(Box::new(data_callback)), playing.clone(), Arc::clone(&error_callback_ptr), ); let notification_handler = JackNotificationHandler::new(error_callback_ptr); let async_client = client .activate_async(notification_handler, output_process_handler) .unwrap(); Stream { playing, async_client, input_port_names: vec![], output_port_names: port_names, } } /// Connect to the standard system outputs in jack, system:playback_1 and system:playback_2 /// This has to be done after the client is activated, doing it just after creating the ports doesn't work. pub fn connect_to_system_outputs(&mut self) { // Get the system ports let system_ports = self.async_client.as_client().ports( Some("system:playback_.*"), None, jack::PortFlags::empty(), ); // Connect outputs from this client to the system playback inputs for i in 0..self.output_port_names.len() { if i >= system_ports.len() { break; } match self .async_client .as_client() .connect_ports_by_name(&self.output_port_names[i], &system_ports[i]) { Ok(_) => (), Err(e) => println!("Unable to connect to port with error {}", e), } } } /// Connect to the standard system outputs in jack, system:capture_1 and system:capture_2 /// This has to be done after the client is activated, doing it just after creating the ports doesn't work. pub fn connect_to_system_inputs(&mut self) { // Get the system ports let system_ports = self.async_client.as_client().ports( Some("system:capture_.*"), None, jack::PortFlags::empty(), ); // Connect outputs from this client to the system playback inputs for i in 0..self.input_port_names.len() { if i >= system_ports.len() { break; } match self .async_client .as_client() .connect_ports_by_name(&system_ports[i], &self.input_port_names[i]) { Ok(_) => (), Err(e) => println!("Unable to connect to port with error {}", e), } } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { self.playing.store(true, Ordering::SeqCst); Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { self.playing.store(false, Ordering::SeqCst); Ok(()) } } struct LocalProcessHandler { /// No new ports are allowed to be created after the creation of the LocalProcessHandler as that would invalidate the buffer sizes out_ports: Vec>, in_ports: Vec>, sample_rate: SampleRate, buffer_size: usize, input_data_callback: Option>, output_data_callback: Option>, // JACK audio samples are 32-bit float (unless you do some custom dark magic) temp_input_buffer: Vec, temp_output_buffer: Vec, playing: Arc, creation_timestamp: std::time::Instant, /// This should not be called on `process`, only on `buffer_size` because it can block. error_callback_ptr: ErrorCallbackPtr, } impl LocalProcessHandler { fn new( out_ports: Vec>, in_ports: Vec>, sample_rate: SampleRate, buffer_size: usize, input_data_callback: Option>, output_data_callback: Option< Box, >, playing: Arc, error_callback_ptr: ErrorCallbackPtr, ) -> Self { // These may be reallocated in the `buffer_size` callback. let temp_input_buffer = vec![0.0; in_ports.len() * buffer_size]; let temp_output_buffer = vec![0.0; out_ports.len() * buffer_size]; LocalProcessHandler { out_ports, in_ports, sample_rate, buffer_size, input_data_callback, output_data_callback, temp_input_buffer, temp_output_buffer, playing, creation_timestamp: std::time::Instant::now(), error_callback_ptr, } } } fn temp_buffer_to_data(temp_input_buffer: &mut Vec, total_buffer_size: usize) -> Data { let slice = &temp_input_buffer[0..total_buffer_size]; let data = slice.as_ptr() as *mut (); let len = total_buffer_size; let data = unsafe { Data::from_parts(data, len, JACK_SAMPLE_FORMAT) }; data } impl jack::ProcessHandler for LocalProcessHandler { fn process(&mut self, _: &jack::Client, process_scope: &jack::ProcessScope) -> jack::Control { if !self.playing.load(Ordering::SeqCst) { return jack::Control::Continue; } // This should be equal to self.buffer_size, but the implementation will // work even if it is less. Will panic in `temp_buffer_to_data` if greater. let current_frame_count = process_scope.n_frames() as usize; // Get timestamp data let cycle_times = process_scope.cycle_times(); let current_start_usecs = match cycle_times { Ok(times) => times.current_usecs, Err(_) => { // jack was unable to get the current time information // Fall back to using Instants let now = std::time::Instant::now(); let duration = now.duration_since(self.creation_timestamp); duration.as_micros() as u64 } }; let start_cycle_instant = micros_to_stream_instant(current_start_usecs); let start_callback_instant = start_cycle_instant .add(frames_to_duration( process_scope.frames_since_cycle_start() as usize, self.sample_rate, )) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); if let Some(input_callback) = &mut self.input_data_callback { // Let's get the data from the input ports and run the callback let num_in_channels = self.in_ports.len(); // Read the data from the input ports into the temporary buffer // Go through every channel and store its data in the temporary input buffer for ch_ix in 0..num_in_channels { let input_channel = &self.in_ports[ch_ix].as_slice(process_scope); for i in 0..current_frame_count { self.temp_input_buffer[ch_ix + i * num_in_channels] = input_channel[i]; } } // Create a slice of exactly current_frame_count frames let data = temp_buffer_to_data( &mut self.temp_input_buffer, current_frame_count * num_in_channels, ); // Create timestamp let frames_since_cycle_start = process_scope.frames_since_cycle_start() as usize; let duration_since_cycle_start = frames_to_duration(frames_since_cycle_start, self.sample_rate); let callback = start_callback_instant .add(duration_since_cycle_start) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let capture = start_callback_instant; let timestamp = crate::InputStreamTimestamp { callback, capture }; let info = crate::InputCallbackInfo { timestamp }; input_callback(&data, &info); } if let Some(output_callback) = &mut self.output_data_callback { let num_out_channels = self.out_ports.len(); // Create a slice of exactly current_frame_count frames let mut data = temp_buffer_to_data( &mut self.temp_output_buffer, current_frame_count * num_out_channels, ); // Create timestamp let frames_since_cycle_start = process_scope.frames_since_cycle_start() as usize; let duration_since_cycle_start = frames_to_duration(frames_since_cycle_start, self.sample_rate); let callback = start_callback_instant .add(duration_since_cycle_start) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let buffer_duration = frames_to_duration(current_frame_count, self.sample_rate); let playback = start_cycle_instant .add(buffer_duration) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = crate::OutputCallbackInfo { timestamp }; output_callback(&mut data, &info); // Deinterlace for ch_ix in 0..num_out_channels { let output_channel = &mut self.out_ports[ch_ix].as_mut_slice(process_scope); for i in 0..current_frame_count { output_channel[i] = self.temp_output_buffer[ch_ix + i * num_out_channels]; } } } // Continue as normal jack::Control::Continue } fn buffer_size(&mut self, _: &jack::Client, size: jack::Frames) -> jack::Control { // The `buffer_size` callback is actually called on the process thread, but // it does not need to be suitable for real-time use. Thus we can simply allocate // new buffers here. It is also fine to call the error callback. // Details: https://github.com/RustAudio/rust-jack/issues/137 let new_size = size as usize; if new_size != self.buffer_size { self.buffer_size = new_size; self.temp_input_buffer = vec![0.0; self.in_ports.len() * new_size]; self.temp_output_buffer = vec![0.0; self.out_ports.len() * new_size]; let description = format!("buffer size changed to: {}", new_size); if let Ok(mut mutex_guard) = self.error_callback_ptr.lock() { let err = &mut *mutex_guard; err(BackendSpecificError { description }.into()); } } jack::Control::Continue } } fn micros_to_stream_instant(micros: u64) -> crate::StreamInstant { let nanos = micros * 1000; let secs = micros / 1_000_000; let subsec_nanos = nanos - secs * 1_000_000_000; crate::StreamInstant::new(secs as i64, subsec_nanos as u32) } // Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: usize, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } /// Receives notifications from the JACK server. It is unclear if this may be run concurrent with itself under JACK2 specs /// so it needs to be Sync. struct JackNotificationHandler { error_callback_ptr: ErrorCallbackPtr, init_sample_rate_flag: Arc, } impl JackNotificationHandler { pub fn new(error_callback_ptr: ErrorCallbackPtr) -> Self { JackNotificationHandler { error_callback_ptr, init_sample_rate_flag: Arc::new(AtomicBool::new(false)), } } fn send_error(&mut self, description: String) { // This thread isn't the audio thread, it's fine to block if let Ok(mut mutex_guard) = self.error_callback_ptr.lock() { let err = &mut *mutex_guard; err(BackendSpecificError { description }.into()); } } } impl jack::NotificationHandler for JackNotificationHandler { fn shutdown(&mut self, _status: jack::ClientStatus, reason: &str) { self.send_error(format!("JACK was shut down for reason: {}", reason)); } fn sample_rate(&mut self, _: &jack::Client, srate: jack::Frames) -> jack::Control { match self.init_sample_rate_flag.load(Ordering::SeqCst) { false => { // One of these notifications is sent every time a client is started. self.init_sample_rate_flag.store(true, Ordering::SeqCst); jack::Control::Continue } true => { self.send_error(format!("sample rate changed to: {}", srate)); // Since CPAL currently has no way of signaling a sample rate change in order to make // all necessary changes that would bring we choose to quit. jack::Control::Quit } } } fn xrun(&mut self, _: &jack::Client) -> jack::Control { self.send_error(String::from("xrun (buffer over or under run)")); jack::Control::Continue } } cpal-0.15.2/src/host/mod.rs000064400000000000000000000013351046102023000135230ustar 00000000000000#[cfg(any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ))] pub(crate) mod alsa; #[cfg(all(windows, feature = "asio"))] pub(crate) mod asio; #[cfg(any(target_os = "macos", target_os = "ios"))] pub(crate) mod coreaudio; #[cfg(target_os = "emscripten")] pub(crate) mod emscripten; #[cfg(all( any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ), feature = "jack" ))] pub(crate) mod jack; pub(crate) mod null; #[cfg(target_os = "android")] pub(crate) mod oboe; #[cfg(windows)] pub(crate) mod wasapi; #[cfg(all(target_arch = "wasm32", feature = "wasm-bindgen"))] pub(crate) mod webaudio; cpal-0.15.2/src/host/null/mod.rs000064400000000000000000000072031046102023000144750ustar 00000000000000use std::time::Duration; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; #[derive(Default)] pub struct Devices; #[derive(Clone, Debug, PartialEq, Eq)] pub struct Device; pub struct Host; #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Stream; pub struct SupportedInputConfigs; pub struct SupportedOutputConfigs; impl Host { #[allow(dead_code)] pub fn new() -> Result { Ok(Host) } } impl Devices { pub fn new() -> Result { Ok(Devices) } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; #[inline] fn name(&self) -> Result { Ok("null".to_owned()) } #[inline] fn supported_input_configs( &self, ) -> Result { unimplemented!() } #[inline] fn supported_output_configs( &self, ) -> Result { unimplemented!() } #[inline] fn default_input_config(&self) -> Result { unimplemented!() } #[inline] fn default_output_config(&self) -> Result { unimplemented!() } fn build_input_stream_raw( &self, _config: &StreamConfig, _sample_format: SampleFormat, _data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { unimplemented!() } /// Create an output stream. fn build_output_stream_raw( &self, _config: &StreamConfig, _sample_format: SampleFormat, _data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { unimplemented!() } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { false } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { None } fn default_output_device(&self) -> Option { None } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { unimplemented!() } fn pause(&self) -> Result<(), PauseStreamError> { unimplemented!() } } impl Iterator for Devices { type Item = Device; #[inline] fn next(&mut self) -> Option { None } } impl Iterator for SupportedInputConfigs { type Item = SupportedStreamConfigRange; #[inline] fn next(&mut self) -> Option { None } } impl Iterator for SupportedOutputConfigs { type Item = SupportedStreamConfigRange; #[inline] fn next(&mut self) -> Option { None } } cpal-0.15.2/src/host/oboe/android_media.rs000064400000000000000000000032571046102023000164540ustar 00000000000000use std::sync::Arc; extern crate jni; use self::jni::Executor; use self::jni::{errors::Result as JResult, JNIEnv, JavaVM}; // constants from android.media.AudioFormat pub const ENCODING_PCM_16BIT: i32 = 2; pub const ENCODING_PCM_FLOAT: i32 = 4; pub const CHANNEL_OUT_MONO: i32 = 4; pub const CHANNEL_OUT_STEREO: i32 = 12; fn with_attached(closure: F) -> JResult where F: FnOnce(&JNIEnv) -> JResult, { let android_context = ndk_context::android_context(); let vm = Arc::new(unsafe { JavaVM::from_raw(android_context.vm().cast())? }); Executor::new(vm).with_attached(|env| closure(env)) } fn get_min_buffer_size( class: &'static str, sample_rate: i32, channel_mask: i32, format: i32, ) -> i32 { // Unwrapping everything because these operations are not expected to fail // or throw exceptions. Android returns negative values for invalid parameters, // which is what we expect. with_attached(|env| { let class = env.find_class(class).unwrap(); env.call_static_method( class, "getMinBufferSize", "(III)I", &[sample_rate.into(), channel_mask.into(), format.into()], ) .unwrap() .i() }) .unwrap() } pub fn get_audio_track_min_buffer_size(sample_rate: i32, channel_mask: i32, format: i32) -> i32 { get_min_buffer_size( "android/media/AudioTrack", sample_rate, channel_mask, format, ) } pub fn get_audio_record_min_buffer_size(sample_rate: i32, channel_mask: i32, format: i32) -> i32 { get_min_buffer_size( "android/media/AudioRecord", sample_rate, channel_mask, format, ) } cpal-0.15.2/src/host/oboe/convert.rs000064400000000000000000000044671046102023000153610ustar 00000000000000use std::convert::TryInto; use std::time::Duration; extern crate oboe; use crate::{ BackendSpecificError, BuildStreamError, PauseStreamError, PlayStreamError, StreamError, StreamInstant, }; pub fn to_stream_instant(duration: Duration) -> StreamInstant { StreamInstant::new( duration.as_secs().try_into().unwrap(), duration.subsec_nanos(), ) } pub fn stream_instant(stream: &mut T) -> StreamInstant { const CLOCK_MONOTONIC: i32 = 1; let ts = stream .get_timestamp(CLOCK_MONOTONIC) .unwrap_or(oboe::FrameTimestamp { position: 0, timestamp: 0, }); to_stream_instant(Duration::from_nanos(ts.timestamp as u64)) } impl From for StreamError { fn from(error: oboe::Error) -> Self { use self::oboe::Error::*; match error { Disconnected | Unavailable | Closed => Self::DeviceNotAvailable, e => (BackendSpecificError { description: e.to_string(), }) .into(), } } } impl From for PlayStreamError { fn from(error: oboe::Error) -> Self { use self::oboe::Error::*; match error { Disconnected | Unavailable | Closed => Self::DeviceNotAvailable, e => (BackendSpecificError { description: e.to_string(), }) .into(), } } } impl From for PauseStreamError { fn from(error: oboe::Error) -> Self { use self::oboe::Error::*; match error { Disconnected | Unavailable | Closed => Self::DeviceNotAvailable, e => (BackendSpecificError { description: e.to_string(), }) .into(), } } } impl From for BuildStreamError { fn from(error: oboe::Error) -> Self { use self::oboe::Error::*; match error { Disconnected | Unavailable | Closed => Self::DeviceNotAvailable, NoFreeHandles => Self::StreamIdOverflow, InvalidFormat | InvalidRate => Self::StreamConfigNotSupported, IllegalArgument => Self::InvalidArgument, e => (BackendSpecificError { description: e.to_string(), }) .into(), } } } cpal-0.15.2/src/host/oboe/input_callback.rs000064400000000000000000000051111046102023000166370ustar 00000000000000use std::marker::PhantomData; use std::time::Instant; extern crate oboe; use super::convert::{stream_instant, to_stream_instant}; use crate::{Data, InputCallbackInfo, InputStreamTimestamp, SizedSample, StreamError}; pub struct CpalInputCallback { data_cb: Box, error_cb: Box, created: Instant, phantom_channel: PhantomData, phantom_input: PhantomData, } impl CpalInputCallback { pub fn new(data_cb: D, error_cb: E) -> Self where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Self { data_cb: Box::new(data_cb), error_cb: Box::new(error_cb), created: Instant::now(), phantom_channel: PhantomData, phantom_input: PhantomData, } } fn make_callback_info( &self, audio_stream: &mut dyn oboe::AudioInputStreamSafe, ) -> InputCallbackInfo { InputCallbackInfo { timestamp: InputStreamTimestamp { callback: to_stream_instant(self.created.elapsed()), capture: stream_instant(audio_stream), }, } } } impl oboe::AudioInputCallback for CpalInputCallback where (T, C): oboe::IsFrameType, { type FrameType = (T, C); fn on_error_before_close( &mut self, _audio_stream: &mut dyn oboe::AudioInputStreamSafe, error: oboe::Error, ) { (self.error_cb)(StreamError::from(error)) } fn on_error_after_close( &mut self, _audio_stream: &mut dyn oboe::AudioInputStreamSafe, error: oboe::Error, ) { (self.error_cb)(StreamError::from(error)) } fn on_audio_ready( &mut self, audio_stream: &mut dyn oboe::AudioInputStreamSafe, audio_data: &[<::FrameType as oboe::IsFrameType>::Type], ) -> oboe::DataCallbackResult { let cb_info = self.make_callback_info(audio_stream); let channel_count = if C::CHANNEL_COUNT == oboe::ChannelCount::Mono { 1 } else { 2 }; (self.data_cb)( &unsafe { Data::from_parts( audio_data.as_ptr() as *mut _, audio_data.len() * channel_count, T::FORMAT, ) }, &cb_info, ); oboe::DataCallbackResult::Continue } } cpal-0.15.2/src/host/oboe/mod.rs000064400000000000000000000410351046102023000144500ustar 00000000000000use std::cell::RefCell; use std::cmp; use std::convert::TryInto; use std::time::Duration; use std::vec::IntoIter as VecIntoIter; extern crate oboe; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, SizedSample, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; mod android_media; mod convert; mod input_callback; mod output_callback; use self::android_media::{get_audio_record_min_buffer_size, get_audio_track_min_buffer_size}; use self::input_callback::CpalInputCallback; use self::oboe::{AudioInputStream, AudioOutputStream}; use self::output_callback::CpalOutputCallback; // Android Java API supports up to 8 channels, but oboe API // only exposes mono and stereo. const CHANNEL_MASKS: [i32; 2] = [ android_media::CHANNEL_OUT_MONO, android_media::CHANNEL_OUT_STEREO, ]; const SAMPLE_RATES: [i32; 13] = [ 5512, 8000, 11025, 16000, 22050, 32000, 44100, 48000, 64000, 88200, 96000, 176_400, 192_000, ]; pub struct Host; pub struct Device(Option); pub enum Stream { Input(Box>), Output(Box>), } pub type SupportedInputConfigs = VecIntoIter; pub type SupportedOutputConfigs = VecIntoIter; pub type Devices = VecIntoIter; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { true } fn devices(&self) -> Result { if let Ok(devices) = oboe::AudioDeviceInfo::request(oboe::AudioDeviceDirection::InputOutput) { Ok(devices .into_iter() .map(|d| Device(Some(d))) .collect::>() .into_iter()) } else { Ok(vec![Device(None)].into_iter()) } } fn default_input_device(&self) -> Option { Some(Device(None)) } fn default_output_device(&self) -> Option { Some(Device(None)) } } fn buffer_size_range_for_params( is_output: bool, sample_rate: i32, channel_mask: i32, android_format: i32, ) -> SupportedBufferSize { let min_buffer_size = if is_output { get_audio_track_min_buffer_size(sample_rate, channel_mask, android_format) } else { get_audio_record_min_buffer_size(sample_rate, channel_mask, android_format) }; if min_buffer_size > 0 { SupportedBufferSize::Range { min: min_buffer_size as u32, max: i32::MAX as u32, } } else { SupportedBufferSize::Unknown } } fn default_supported_configs(is_output: bool) -> VecIntoIter { // Have to "brute force" the parameter combinations with getMinBufferSize const FORMATS: [SampleFormat; 2] = [SampleFormat::I16, SampleFormat::F32]; let mut output = Vec::with_capacity(SAMPLE_RATES.len() * CHANNEL_MASKS.len() * FORMATS.len()); for sample_format in &FORMATS { let android_format = if *sample_format == SampleFormat::I16 { android_media::ENCODING_PCM_16BIT } else { android_media::ENCODING_PCM_FLOAT }; for (mask_idx, channel_mask) in CHANNEL_MASKS.iter().enumerate() { let channel_count = mask_idx + 1; for sample_rate in &SAMPLE_RATES { if let SupportedBufferSize::Range { min, max } = buffer_size_range_for_params( is_output, *sample_rate, *channel_mask, android_format, ) { output.push(SupportedStreamConfigRange { channels: channel_count as u16, min_sample_rate: SampleRate(*sample_rate as u32), max_sample_rate: SampleRate(*sample_rate as u32), buffer_size: SupportedBufferSize::Range { min, max }, sample_format: *sample_format, }); } } } } output.into_iter() } fn device_supported_configs( device: &oboe::AudioDeviceInfo, is_output: bool, ) -> VecIntoIter { let sample_rates = if !device.sample_rates.is_empty() { device.sample_rates.as_slice() } else { &SAMPLE_RATES }; const ALL_CHANNELS: [i32; 2] = [1, 2]; let channel_counts = if !device.channel_counts.is_empty() { device.channel_counts.as_slice() } else { &ALL_CHANNELS }; const ALL_FORMATS: [oboe::AudioFormat; 2] = [oboe::AudioFormat::I16, oboe::AudioFormat::F32]; let formats = if !device.formats.is_empty() { device.formats.as_slice() } else { &ALL_FORMATS }; let mut output = Vec::with_capacity(sample_rates.len() * channel_counts.len() * formats.len()); for sample_rate in sample_rates { for channel_count in channel_counts { assert!(*channel_count > 0); if *channel_count > 2 { // could be supported by the device, but oboe does not support more than 2 channels continue; } let channel_mask = CHANNEL_MASKS[*channel_count as usize - 1]; for format in formats { let (android_format, sample_format) = match format { oboe::AudioFormat::I16 => { (android_media::ENCODING_PCM_16BIT, SampleFormat::I16) } oboe::AudioFormat::F32 => { (android_media::ENCODING_PCM_FLOAT, SampleFormat::F32) } _ => panic!("Unexpected format"), }; let buffer_size = buffer_size_range_for_params( is_output, *sample_rate, channel_mask, android_format, ); output.push(SupportedStreamConfigRange { channels: cmp::min(*channel_count as u16, 2u16), min_sample_rate: SampleRate(*sample_rate as u32), max_sample_rate: SampleRate(*sample_rate as u32), buffer_size, sample_format, }); } } } output.into_iter() } fn configure_for_device( builder: oboe::AudioStreamBuilder, device: &Device, config: &StreamConfig, ) -> oboe::AudioStreamBuilder { let mut builder = if let Some(info) = &device.0 { builder.set_device_id(info.id) } else { builder }; builder = builder.set_sample_rate(config.sample_rate.0.try_into().unwrap()); match &config.buffer_size { BufferSize::Default => builder, BufferSize::Fixed(size) => builder.set_buffer_capacity_in_frames(*size as i32), } } fn build_input_stream( device: &Device, config: &StreamConfig, data_callback: D, error_callback: E, builder: oboe::AudioStreamBuilder, ) -> Result where T: SizedSample + oboe::IsFormat + Send + 'static, C: oboe::IsChannelCount + Send + 'static, (T, C): oboe::IsFrameType, D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let builder = configure_for_device(builder, device, config); let stream = builder .set_callback(CpalInputCallback::::new( data_callback, error_callback, )) .open_stream()?; Ok(Stream::Input(Box::new(RefCell::new(stream)))) } fn build_output_stream( device: &Device, config: &StreamConfig, data_callback: D, error_callback: E, builder: oboe::AudioStreamBuilder, ) -> Result where T: SizedSample + oboe::IsFormat + Send + 'static, C: oboe::IsChannelCount + Send + 'static, (T, C): oboe::IsFrameType, D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let builder = configure_for_device(builder, device, config); let stream = builder .set_callback(CpalOutputCallback::::new( data_callback, error_callback, )) .open_stream()?; Ok(Stream::Output(Box::new(RefCell::new(stream)))) } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { match &self.0 { None => Ok("default".to_owned()), Some(info) => Ok(info.product_name.clone()), } } fn supported_input_configs( &self, ) -> Result { if let Some(info) = &self.0 { Ok(device_supported_configs(info, false)) } else { Ok(default_supported_configs(false)) } } fn supported_output_configs( &self, ) -> Result { if let Some(info) = &self.0 { Ok(device_supported_configs(info, true)) } else { Ok(default_supported_configs(true)) } } fn default_input_config(&self) -> Result { let mut configs: Vec<_> = self.supported_input_configs().unwrap().collect(); configs.sort_by(|a, b| b.cmp_default_heuristics(a)); let config = configs .into_iter() .next() .ok_or(DefaultStreamConfigError::StreamTypeNotSupported)? .with_max_sample_rate(); Ok(config) } fn default_output_config(&self) -> Result { let mut configs: Vec<_> = self.supported_output_configs().unwrap().collect(); configs.sort_by(|a, b| b.cmp_default_heuristics(a)); let config = configs .into_iter() .next() .ok_or(DefaultStreamConfigError::StreamTypeNotSupported)? .with_max_sample_rate(); Ok(config) } fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { match sample_format { SampleFormat::I16 => { let builder = oboe::AudioStreamBuilder::default() .set_input() .set_format::(); if config.channels == 1 { build_input_stream( self, config, data_callback, error_callback, builder.set_mono(), ) } else if config.channels == 2 { build_input_stream( self, config, data_callback, error_callback, builder.set_stereo(), ) } else { Err(BackendSpecificError { description: "More than 2 channels are not supported by Oboe.".to_owned(), } .into()) } } SampleFormat::F32 => { let builder = oboe::AudioStreamBuilder::default() .set_input() .set_format::(); if config.channels == 1 { build_input_stream( self, config, data_callback, error_callback, builder.set_mono(), ) } else if config.channels == 2 { build_input_stream( self, config, data_callback, error_callback, builder.set_stereo(), ) } else { Err(BackendSpecificError { description: "More than 2 channels are not supported by Oboe.".to_owned(), } .into()) } } sample_format => Err(BackendSpecificError { description: format!("{} format is not supported on Android.", sample_format), } .into()), } } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { match sample_format { SampleFormat::I16 => { let builder = oboe::AudioStreamBuilder::default() .set_output() .set_format::(); if config.channels == 1 { build_output_stream( self, config, data_callback, error_callback, builder.set_mono(), ) } else if config.channels == 2 { build_output_stream( self, config, data_callback, error_callback, builder.set_stereo(), ) } else { Err(BackendSpecificError { description: "More than 2 channels are not supported by Oboe.".to_owned(), } .into()) } } SampleFormat::F32 => { let builder = oboe::AudioStreamBuilder::default() .set_output() .set_format::(); if config.channels == 1 { build_output_stream( self, config, data_callback, error_callback, builder.set_mono(), ) } else if config.channels == 2 { build_output_stream( self, config, data_callback, error_callback, builder.set_stereo(), ) } else { Err(BackendSpecificError { description: "More than 2 channels are not supported by Oboe.".to_owned(), } .into()) } } sample_format => Err(BackendSpecificError { description: format!("{} format is not supported on Android.", sample_format), } .into()), } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { match self { Self::Input(stream) => stream .borrow_mut() .request_start() .map_err(PlayStreamError::from), Self::Output(stream) => stream .borrow_mut() .request_start() .map_err(PlayStreamError::from), } } fn pause(&self) -> Result<(), PauseStreamError> { match self { Self::Input(_) => Err(BackendSpecificError { description: "Pause called on the input stream.".to_owned(), } .into()), Self::Output(stream) => stream .borrow_mut() .request_pause() .map_err(PauseStreamError::from), } } } cpal-0.15.2/src/host/oboe/output_callback.rs000064400000000000000000000051561046102023000170510ustar 00000000000000use std::marker::PhantomData; use std::time::Instant; extern crate oboe; use super::convert::{stream_instant, to_stream_instant}; use crate::{Data, OutputCallbackInfo, OutputStreamTimestamp, SizedSample, StreamError}; pub struct CpalOutputCallback { data_cb: Box, error_cb: Box, created: Instant, phantom_channel: PhantomData, phantom_input: PhantomData, } impl CpalOutputCallback { pub fn new(data_cb: D, error_cb: E) -> Self where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { Self { data_cb: Box::new(data_cb), error_cb: Box::new(error_cb), created: Instant::now(), phantom_channel: PhantomData, phantom_input: PhantomData, } } fn make_callback_info( &self, audio_stream: &mut dyn oboe::AudioOutputStreamSafe, ) -> OutputCallbackInfo { OutputCallbackInfo { timestamp: OutputStreamTimestamp { callback: to_stream_instant(self.created.elapsed()), playback: stream_instant(audio_stream), }, } } } impl oboe::AudioOutputCallback for CpalOutputCallback where (T, C): oboe::IsFrameType, { type FrameType = (T, C); fn on_error_before_close( &mut self, _audio_stream: &mut dyn oboe::AudioOutputStreamSafe, error: oboe::Error, ) { (self.error_cb)(StreamError::from(error)) } fn on_error_after_close( &mut self, _audio_stream: &mut dyn oboe::AudioOutputStreamSafe, error: oboe::Error, ) { (self.error_cb)(StreamError::from(error)) } fn on_audio_ready( &mut self, audio_stream: &mut dyn oboe::AudioOutputStreamSafe, audio_data: &mut [<::FrameType as oboe::IsFrameType>::Type], ) -> oboe::DataCallbackResult { let cb_info = self.make_callback_info(audio_stream); let channel_count = if C::CHANNEL_COUNT == oboe::ChannelCount::Mono { 1 } else { 2 }; (self.data_cb)( &mut unsafe { Data::from_parts( audio_data.as_mut_ptr() as *mut _, audio_data.len() * channel_count, T::FORMAT, ) }, &cb_info, ); oboe::DataCallbackResult::Continue } } cpal-0.15.2/src/host/wasapi/com.rs000064400000000000000000000037511046102023000150120ustar 00000000000000//! Handles COM initialization and cleanup. use super::IoError; use std::marker::PhantomData; use windows::Win32::Foundation::RPC_E_CHANGED_MODE; use windows::Win32::System::Com::{CoInitializeEx, CoUninitialize, COINIT_APARTMENTTHREADED}; thread_local!(static COM_INITIALIZED: ComInitialized = { unsafe { // Try to initialize COM with STA by default to avoid compatibility issues with the ASIO // backend (where CoInitialize() is called by the ASIO SDK) or winit (where drag and drop // requires STA). // This call can fail with RPC_E_CHANGED_MODE if another library initialized COM with MTA. // That's OK though since COM ensures thread-safety/compatibility through marshalling when // necessary. let result = CoInitializeEx(None, COINIT_APARTMENTTHREADED); match result.clone().map_err(|e| e.code()) { Ok(_) | Err(RPC_E_CHANGED_MODE) => { ComInitialized { result, _ptr: PhantomData, } }, Err(e) => { // COM initialization failed in another way, something is really wrong. panic!("Failed to initialize COM: {}", IoError::from_raw_os_error(e.0)); } } } }); /// RAII object that guards the fact that COM is initialized. /// // We store a raw pointer because it's the only way at the moment to remove `Send`/`Sync` from the // object. struct ComInitialized { result: windows::core::Result<()>, _ptr: PhantomData<*mut ()>, } impl Drop for ComInitialized { #[inline] fn drop(&mut self) { // Need to avoid calling CoUninitialize() if CoInitializeEx failed since it may have // returned RPC_E_MODE_CHANGED - which is OK, see above. if self.result.is_ok() { unsafe { CoUninitialize() }; } } } /// Ensures that COM is initialized in this thread. #[inline] pub fn com_initialized() { COM_INITIALIZED.with(|_| {}); } cpal-0.15.2/src/host/wasapi/device.rs000064400000000000000000001150211046102023000154650ustar 00000000000000use crate::FrameCount; use crate::{ BackendSpecificError, BufferSize, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, SampleFormat, SampleRate, StreamConfig, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, COMMON_SAMPLE_RATES, }; use once_cell::sync::Lazy; use std::ffi::OsString; use std::fmt; use std::mem; use std::ops::{Deref, DerefMut}; use std::os::windows::ffi::OsStringExt; use std::ptr; use std::slice; use std::sync::{Arc, Mutex, MutexGuard}; use std::time::Duration; use super::com; use super::{windows_err_to_cpal_err, windows_err_to_cpal_err_message}; use windows::core::ComInterface; use windows::core::GUID; use windows::Win32::Devices::Properties; use windows::Win32::Foundation; use windows::Win32::Media::Audio::IAudioRenderClient; use windows::Win32::Media::{Audio, KernelStreaming, Multimedia}; use windows::Win32::System::Com; use windows::Win32::System::Com::{StructuredStorage, STGM_READ, VT_LPWSTR}; use windows::Win32::System::Threading; use super::stream::{AudioClientFlow, Stream, StreamInner}; use crate::{traits::DeviceTrait, BuildStreamError, StreamError}; pub type SupportedInputConfigs = std::vec::IntoIter; pub type SupportedOutputConfigs = std::vec::IntoIter; /// Wrapper because of that stupid decision to remove `Send` and `Sync` from raw pointers. #[derive(Clone)] struct IAudioClientWrapper(Audio::IAudioClient); unsafe impl Send for IAudioClientWrapper {} unsafe impl Sync for IAudioClientWrapper {} /// An opaque type that identifies an end point. #[derive(Clone)] pub struct Device { device: Audio::IMMDevice, /// We cache an uninitialized `IAudioClient` so that we can call functions from it without /// having to create/destroy audio clients all the time. future_audio_client: Arc>>, // TODO: add NonZero around the ptr } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { Device::name(self) } fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } fn default_input_config(&self) -> Result { Device::default_input_config(self) } fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_inner = self.build_input_stream_raw_inner(config, sample_format)?; Ok(Stream::new_input( stream_inner, data_callback, error_callback, )) } fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let stream_inner = self.build_output_stream_raw_inner(config, sample_format)?; Ok(Stream::new_output( stream_inner, data_callback, error_callback, )) } } struct Endpoint { endpoint: Audio::IMMEndpoint, } enum WaveFormat { Ex(Audio::WAVEFORMATEX), Extensible(Audio::WAVEFORMATEXTENSIBLE), } // Use RAII to make sure CoTaskMemFree is called when we are responsible for freeing. struct WaveFormatExPtr(*mut Audio::WAVEFORMATEX); impl Drop for WaveFormatExPtr { fn drop(&mut self) { unsafe { Com::CoTaskMemFree(Some(self.0 as *mut _)); } } } impl WaveFormat { // Given a pointer to some format, returns a valid copy of the format. pub fn copy_from_waveformatex_ptr(ptr: *const Audio::WAVEFORMATEX) -> Option { unsafe { match (*ptr).wFormatTag as u32 { Audio::WAVE_FORMAT_PCM | Multimedia::WAVE_FORMAT_IEEE_FLOAT => { Some(WaveFormat::Ex(*ptr)) } KernelStreaming::WAVE_FORMAT_EXTENSIBLE => { let extensible_ptr = ptr as *const Audio::WAVEFORMATEXTENSIBLE; Some(WaveFormat::Extensible(*extensible_ptr)) } _ => None, } } } // Get the pointer to the WAVEFORMATEX struct. pub fn as_ptr(&self) -> *const Audio::WAVEFORMATEX { self.deref() as *const _ } } impl Deref for WaveFormat { type Target = Audio::WAVEFORMATEX; fn deref(&self) -> &Self::Target { match *self { WaveFormat::Ex(ref f) => f, WaveFormat::Extensible(ref f) => &f.Format, } } } impl DerefMut for WaveFormat { fn deref_mut(&mut self) -> &mut Self::Target { match *self { WaveFormat::Ex(ref mut f) => f, WaveFormat::Extensible(ref mut f) => &mut f.Format, } } } unsafe fn immendpoint_from_immdevice(device: Audio::IMMDevice) -> Audio::IMMEndpoint { device .cast::() .expect("could not query IMMDevice interface for IMMEndpoint") } unsafe fn data_flow_from_immendpoint(endpoint: &Audio::IMMEndpoint) -> Audio::EDataFlow { endpoint .GetDataFlow() .expect("could not get endpoint data_flow") } // Given the audio client and format, returns whether or not the format is supported. pub unsafe fn is_format_supported( client: &Audio::IAudioClient, waveformatex_ptr: *const Audio::WAVEFORMATEX, ) -> Result { // Check if the given format is supported. let is_supported = |waveformatex_ptr, closest_waveformatex_ptr| { let result = client.IsFormatSupported( Audio::AUDCLNT_SHAREMODE_SHARED, waveformatex_ptr, Some(closest_waveformatex_ptr), ); // `IsFormatSupported` can return `S_FALSE` (which means that a compatible format // has been found, but not an exact match) so we also treat this as unsupported. match result { Audio::AUDCLNT_E_DEVICE_INVALIDATED => { Err(SupportedStreamConfigsError::DeviceNotAvailable) } r if r.is_err() => Ok(false), Foundation::S_FALSE => Ok(false), _ => Ok(true), } }; // First we want to retrieve a pointer to the `WAVEFORMATEX`. // Although `GetMixFormat` writes the format to a given `WAVEFORMATEX` pointer, // the pointer itself may actually point to a `WAVEFORMATEXTENSIBLE` structure. // We check the wFormatTag to determine this and get a pointer to the correct type. match (*waveformatex_ptr).wFormatTag as u32 { Audio::WAVE_FORMAT_PCM | Multimedia::WAVE_FORMAT_IEEE_FLOAT => { let mut closest_waveformatex = *waveformatex_ptr; let mut closest_waveformatex_ptr = &mut closest_waveformatex as *mut _; is_supported(waveformatex_ptr, &mut closest_waveformatex_ptr as *mut _) } KernelStreaming::WAVE_FORMAT_EXTENSIBLE => { let waveformatextensible_ptr = waveformatex_ptr as *const Audio::WAVEFORMATEXTENSIBLE; let mut closest_waveformatextensible = *waveformatextensible_ptr; let closest_waveformatextensible_ptr = &mut closest_waveformatextensible as *mut _; let mut closest_waveformatex_ptr = closest_waveformatextensible_ptr as *mut Audio::WAVEFORMATEX; is_supported(waveformatex_ptr, &mut closest_waveformatex_ptr as *mut _) } _ => Ok(false), } } // Get a cpal Format from a WAVEFORMATEX. unsafe fn format_from_waveformatex_ptr( waveformatex_ptr: *const Audio::WAVEFORMATEX, audio_client: &Audio::IAudioClient, ) -> Option { fn cmp_guid(a: &GUID, b: &GUID) -> bool { (a.data1, a.data2, a.data3, a.data4) == (b.data1, b.data2, b.data3, b.data4) } let sample_format = match ( (*waveformatex_ptr).wBitsPerSample, (*waveformatex_ptr).wFormatTag as u32, ) { (16, Audio::WAVE_FORMAT_PCM) => SampleFormat::I16, (32, Multimedia::WAVE_FORMAT_IEEE_FLOAT) => SampleFormat::F32, (n_bits, KernelStreaming::WAVE_FORMAT_EXTENSIBLE) => { let waveformatextensible_ptr = waveformatex_ptr as *const Audio::WAVEFORMATEXTENSIBLE; let sub = (*waveformatextensible_ptr).SubFormat; if n_bits == 16 && cmp_guid(&sub, &KernelStreaming::KSDATAFORMAT_SUBTYPE_PCM) { SampleFormat::I16 } else if n_bits == 32 && cmp_guid(&sub, &Multimedia::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT) { SampleFormat::F32 } else { return None; } } // Unknown data format returned by GetMixFormat. _ => return None, }; let sample_rate = SampleRate((*waveformatex_ptr).nSamplesPerSec); // GetBufferSizeLimits is only used for Hardware-Offloaded Audio // Processing, which was added in Windows 8, which places hardware // limits on the size of the audio buffer. If the sound system // *isn't* using offloaded audio, we're using a software audio // processing stack and have pretty much free rein to set buffer // size. // // In software audio stacks GetBufferSizeLimits returns // AUDCLNT_E_OFFLOAD_MODE_ONLY. // // https://docs.microsoft.com/en-us/windows-hardware/drivers/audio/hardware-offloaded-audio-processing let (mut min_buffer_duration, mut max_buffer_duration) = (0, 0); let buffer_size_is_limited = audio_client .cast::() .and_then(|audio_client| { audio_client.GetBufferSizeLimits( waveformatex_ptr, true, &mut min_buffer_duration, &mut max_buffer_duration, ) }) .is_ok(); let buffer_size = if buffer_size_is_limited { SupportedBufferSize::Range { min: buffer_duration_to_frames(min_buffer_duration, sample_rate.0), max: buffer_duration_to_frames(max_buffer_duration, sample_rate.0), } } else { SupportedBufferSize::Range { min: 0, max: u32::max_value(), } }; let format = SupportedStreamConfig { channels: (*waveformatex_ptr).nChannels as _, sample_rate, buffer_size, sample_format, }; Some(format) } unsafe impl Send for Device {} unsafe impl Sync for Device {} impl Device { pub fn name(&self) -> Result { unsafe { // Open the device's property store. let property_store = self .device .OpenPropertyStore(STGM_READ) .expect("could not open property store"); // Get the endpoint's friendly-name property. let mut property_value = property_store .GetValue(&Properties::DEVPKEY_Device_FriendlyName as *const _ as *const _) .map_err(|err| { let description = format!("failed to retrieve name from property store: {}", err); let err = BackendSpecificError { description }; DeviceNameError::from(err) })?; let prop_variant = &property_value.Anonymous.Anonymous; // Read the friendly-name from the union data field, expecting a *const u16. if prop_variant.vt != VT_LPWSTR { let description = format!( "property store produced invalid data: {:?}", prop_variant.vt ); let err = BackendSpecificError { description }; return Err(err.into()); } let ptr_utf16 = *(&prop_variant.Anonymous as *const _ as *const *const u16); // Find the length of the friendly name. let mut len = 0; while *ptr_utf16.offset(len) != 0 { len += 1; } // Create the utf16 slice and convert it into a string. let name_slice = slice::from_raw_parts(ptr_utf16, len as usize); let name_os_string: OsString = OsStringExt::from_wide(name_slice); let name_string = match name_os_string.into_string() { Ok(string) => string, Err(os_string) => os_string.to_string_lossy().into(), }; // Clean up the property. StructuredStorage::PropVariantClear(&mut property_value).ok(); Ok(name_string) } } #[inline] fn from_immdevice(device: Audio::IMMDevice) -> Self { Device { device, future_audio_client: Arc::new(Mutex::new(None)), } } /// Ensures that `future_audio_client` contains a `Some` and returns a locked mutex to it. fn ensure_future_audio_client( &self, ) -> Result>, windows::core::Error> { let mut lock = self.future_audio_client.lock().unwrap(); if lock.is_some() { return Ok(lock); } let audio_client: Audio::IAudioClient = unsafe { // can fail if the device has been disconnected since we enumerated it, or if // the device doesn't support playback for some reason self.device.Activate(Com::CLSCTX_ALL, None)? }; *lock = Some(IAudioClientWrapper(audio_client)); Ok(lock) } /// Returns an uninitialized `IAudioClient`. #[inline] pub(crate) fn build_audioclient(&self) -> Result { let mut lock = self.ensure_future_audio_client()?; Ok(lock.take().unwrap().0) } // There is no way to query the list of all formats that are supported by the // audio processor, so instead we just trial some commonly supported formats. // // Common formats are trialed by first getting the default format (returned via // `GetMixFormat`) and then mutating that format with common sample rates and // querying them via `IsFormatSupported`. // // When calling `IsFormatSupported` with the shared-mode audio engine, only the default // number of channels seems to be supported. Any, more or less returns an invalid // parameter error. Thus, we just assume that the default number of channels is the only // number supported. fn supported_formats(&self) -> Result { // initializing COM because we call `CoTaskMemFree` to release the format. com::com_initialized(); // Retrieve the `IAudioClient`. let lock = match self.ensure_future_audio_client() { Ok(lock) => lock, Err(ref e) if e.code() == Audio::AUDCLNT_E_DEVICE_INVALIDATED => { return Err(SupportedStreamConfigsError::DeviceNotAvailable) } Err(e) => { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } }; let client = &lock.as_ref().unwrap().0; unsafe { // Retrieve the pointer to the default WAVEFORMATEX. let default_waveformatex_ptr = client .GetMixFormat() .map(WaveFormatExPtr) .map_err(windows_err_to_cpal_err::)?; // If the default format can't succeed we have no hope of finding other formats. assert!(is_format_supported(client, default_waveformatex_ptr.0)?); // Copy the format to use as a test format (as to avoid mutating the original format). let mut test_format = { match WaveFormat::copy_from_waveformatex_ptr(default_waveformatex_ptr.0) { Some(f) => f, // If the format is neither EX nor EXTENSIBLE we don't know how to work with it. None => return Ok(vec![].into_iter()), } }; // Begin testing common sample rates. // // NOTE: We should really be testing for whole ranges here, but it is infeasible to // test every sample rate up to the overflow limit as the `IsFormatSupported` method is // quite slow. let mut supported_sample_rates: Vec = Vec::new(); for &rate in COMMON_SAMPLE_RATES { let rate = rate.0; test_format.nSamplesPerSec = rate; test_format.nAvgBytesPerSec = rate * u32::from((*default_waveformatex_ptr.0).nBlockAlign); if is_format_supported(client, test_format.as_ptr())? { supported_sample_rates.push(rate); } } // If the common rates don't include the default one, add the default. let default_sr = (*default_waveformatex_ptr.0).nSamplesPerSec as _; if !supported_sample_rates.iter().any(|&r| r == default_sr) { supported_sample_rates.push(default_sr); } // Reset the sample rate on the test format now that we're done. test_format.nSamplesPerSec = (*default_waveformatex_ptr.0).nSamplesPerSec; test_format.nAvgBytesPerSec = (*default_waveformatex_ptr.0).nAvgBytesPerSec; // TODO: Test the different sample formats? // Create the supported formats. let format = match format_from_waveformatex_ptr(default_waveformatex_ptr.0, client) { Some(fmt) => fmt, None => { let description = "could not create a `cpal::SupportedStreamConfig` from a `WAVEFORMATEX`" .to_string(); let err = BackendSpecificError { description }; return Err(err.into()); } }; let mut supported_formats = Vec::with_capacity(supported_sample_rates.len()); for rate in supported_sample_rates { supported_formats.push(SupportedStreamConfigRange { channels: format.channels, min_sample_rate: SampleRate(rate as _), max_sample_rate: SampleRate(rate as _), buffer_size: format.buffer_size.clone(), sample_format: format.sample_format, }) } Ok(supported_formats.into_iter()) } } pub fn supported_input_configs( &self, ) -> Result { if self.data_flow() == Audio::eCapture { self.supported_formats() // If it's an output device, assume no input formats. } else { Ok(vec![].into_iter()) } } pub fn supported_output_configs( &self, ) -> Result { if self.data_flow() == Audio::eRender { self.supported_formats() // If it's an input device, assume no output formats. } else { Ok(vec![].into_iter()) } } // We always create voices in shared mode, therefore all samples go through an audio // processor to mix them together. // // One format is guaranteed to be supported, the one returned by `GetMixFormat`. fn default_format(&self) -> Result { // initializing COM because we call `CoTaskMemFree` com::com_initialized(); let lock = match self.ensure_future_audio_client() { Ok(lock) => lock, Err(ref e) if e.code() == Audio::AUDCLNT_E_DEVICE_INVALIDATED => { return Err(DefaultStreamConfigError::DeviceNotAvailable) } Err(e) => { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } }; let client = &lock.as_ref().unwrap().0; unsafe { let format_ptr = client .GetMixFormat() .map(WaveFormatExPtr) .map_err(windows_err_to_cpal_err::)?; format_from_waveformatex_ptr(format_ptr.0, client) .ok_or(DefaultStreamConfigError::StreamTypeNotSupported) } } pub(crate) fn data_flow(&self) -> Audio::EDataFlow { let endpoint = Endpoint::from(self.device.clone()); endpoint.data_flow() } pub fn default_input_config(&self) -> Result { if self.data_flow() == Audio::eCapture { self.default_format() } else { Err(DefaultStreamConfigError::StreamTypeNotSupported) } } pub fn default_output_config(&self) -> Result { let data_flow = self.data_flow(); if data_flow == Audio::eRender { self.default_format() } else { Err(DefaultStreamConfigError::StreamTypeNotSupported) } } pub(crate) fn build_input_stream_raw_inner( &self, config: &StreamConfig, sample_format: SampleFormat, ) -> Result { unsafe { // Making sure that COM is initialized. // It's not actually sure that this is required, but when in doubt do it. com::com_initialized(); // Obtaining a `IAudioClient`. let audio_client = match self.build_audioclient() { Ok(client) => client, Err(ref e) if e.code() == Audio::AUDCLNT_E_DEVICE_INVALIDATED => { return Err(BuildStreamError::DeviceNotAvailable) } Err(e) => { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } }; let buffer_duration = buffer_size_to_duration(&config.buffer_size, config.sample_rate.0); let mut stream_flags = Audio::AUDCLNT_STREAMFLAGS_EVENTCALLBACK; if self.data_flow() == Audio::eRender { stream_flags |= Audio::AUDCLNT_STREAMFLAGS_LOOPBACK; } // Computing the format and initializing the device. let waveformatex = { let format_attempt = config_to_waveformatextensible(config, sample_format) .ok_or(BuildStreamError::StreamConfigNotSupported)?; let share_mode = Audio::AUDCLNT_SHAREMODE_SHARED; // Ensure the format is supported. match super::device::is_format_supported(&audio_client, &format_attempt.Format) { Ok(false) => return Err(BuildStreamError::StreamConfigNotSupported), Err(_) => return Err(BuildStreamError::DeviceNotAvailable), _ => (), } // Finally, initializing the audio client let hresult = audio_client.Initialize( share_mode, stream_flags, buffer_duration, 0, &format_attempt.Format, None, ); match hresult { Err(ref e) if e.code() == Audio::AUDCLNT_E_DEVICE_INVALIDATED => { return Err(BuildStreamError::DeviceNotAvailable); } Err(e) => { let description = format!("{}", e); let err = BackendSpecificError { description }; return Err(err.into()); } Ok(()) => (), }; format_attempt.Format }; // obtaining the size of the samples buffer in number of frames let max_frames_in_buffer = audio_client .GetBufferSize() .map_err(windows_err_to_cpal_err::)?; // Creating the event that will be signalled whenever we need to submit some samples. let event = { let event = Threading::CreateEventA(None, false, false, windows::core::PCSTR(ptr::null())) .map_err(|e| { let description = format!("failed to create event: {}", e); let err = BackendSpecificError { description }; BuildStreamError::from(err) })?; if let Err(e) = audio_client.SetEventHandle(event) { let description = format!("failed to call SetEventHandle: {}", e); let err = BackendSpecificError { description }; return Err(err.into()); } event }; // Building a `IAudioCaptureClient` that will be used to read captured samples. let capture_client = audio_client .GetService::() .map_err(|e| { windows_err_to_cpal_err_message::( e, "failed to build capture client: ", ) })?; // Once we built the `StreamInner`, we add a command that will be picked up by the // `run()` method and added to the `RunContext`. let client_flow = AudioClientFlow::Capture { capture_client }; let audio_clock = get_audio_clock(&audio_client)?; Ok(StreamInner { audio_client, audio_clock, client_flow, event, playing: false, max_frames_in_buffer, bytes_per_frame: waveformatex.nBlockAlign, config: config.clone(), sample_format, }) } } pub(crate) fn build_output_stream_raw_inner( &self, config: &StreamConfig, sample_format: SampleFormat, ) -> Result { unsafe { // Making sure that COM is initialized. // It's not actually sure that this is required, but when in doubt do it. com::com_initialized(); // Obtaining a `IAudioClient`. let audio_client = self .build_audioclient() .map_err(windows_err_to_cpal_err::)?; let buffer_duration = buffer_size_to_duration(&config.buffer_size, config.sample_rate.0); // Computing the format and initializing the device. let waveformatex = { let format_attempt = config_to_waveformatextensible(config, sample_format) .ok_or(BuildStreamError::StreamConfigNotSupported)?; let share_mode = Audio::AUDCLNT_SHAREMODE_SHARED; // Ensure the format is supported. match super::device::is_format_supported(&audio_client, &format_attempt.Format) { Ok(false) => return Err(BuildStreamError::StreamConfigNotSupported), Err(_) => return Err(BuildStreamError::DeviceNotAvailable), _ => (), } // Finally, initializing the audio client audio_client .Initialize( share_mode, Audio::AUDCLNT_STREAMFLAGS_EVENTCALLBACK, buffer_duration, 0, &format_attempt.Format, None, ) .map_err(windows_err_to_cpal_err::)?; format_attempt.Format }; // Creating the event that will be signalled whenever we need to submit some samples. let event = { let event = Threading::CreateEventA(None, false, false, windows::core::PCSTR(ptr::null())) .map_err(|e| { let description = format!("failed to create event: {}", e); let err = BackendSpecificError { description }; BuildStreamError::from(err) })?; if let Err(e) = audio_client.SetEventHandle(event) { let description = format!("failed to call SetEventHandle: {}", e); let err = BackendSpecificError { description }; return Err(err.into()); } event }; // obtaining the size of the samples buffer in number of frames let max_frames_in_buffer = audio_client.GetBufferSize().map_err(|e| { windows_err_to_cpal_err_message::( e, "failed to obtain buffer size: ", ) })?; // Building a `IAudioRenderClient` that will be used to fill the samples buffer. let render_client = audio_client .GetService::() .map_err(|e| { windows_err_to_cpal_err_message::( e, "failed to build render client: ", ) })?; // Once we built the `StreamInner`, we add a command that will be picked up by the // `run()` method and added to the `RunContext`. let client_flow = AudioClientFlow::Render { render_client }; let audio_clock = get_audio_clock(&audio_client)?; Ok(StreamInner { audio_client, audio_clock, client_flow, event, playing: false, max_frames_in_buffer, bytes_per_frame: waveformatex.nBlockAlign, config: config.clone(), sample_format, }) } } } impl PartialEq for Device { #[inline] fn eq(&self, other: &Device) -> bool { // Use case: In order to check whether the default device has changed // the client code might need to compare the previous default device with the current one. // The pointer comparison (`self.device == other.device`) don't work there, // because the pointers are different even when the default device stays the same. // // In this code section we're trying to use the GetId method for the device comparison, cf. // https://docs.microsoft.com/en-us/windows/desktop/api/mmdeviceapi/nf-mmdeviceapi-immdevice-getid unsafe { struct IdRAII(windows::core::PWSTR); /// RAII for device IDs. impl Drop for IdRAII { fn drop(&mut self) { unsafe { Com::CoTaskMemFree(Some(self.0 .0 as *mut _)) } } } // GetId only fails with E_OUTOFMEMORY and if it does, we're probably dead already. // Plus it won't do to change the device comparison logic unexpectedly. let id1 = self.device.GetId().expect("cpal: GetId failure"); let id1 = IdRAII(id1); let id2 = other.device.GetId().expect("cpal: GetId failure"); let id2 = IdRAII(id2); // 16-bit null-terminated comparison. let mut offset = 0; loop { let w1: u16 = *(id1.0).0.offset(offset); let w2: u16 = *(id2.0).0.offset(offset); if w1 == 0 && w2 == 0 { return true; } if w1 != w2 { return false; } offset += 1; } } } } impl Eq for Device {} impl fmt::Debug for Device { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("Device") .field("device", &self.device) .field("name", &self.name()) .finish() } } impl From for Endpoint { fn from(device: Audio::IMMDevice) -> Self { unsafe { let endpoint = immendpoint_from_immdevice(device); Endpoint { endpoint } } } } impl Endpoint { fn data_flow(&self) -> Audio::EDataFlow { unsafe { data_flow_from_immendpoint(&self.endpoint) } } } static ENUMERATOR: Lazy = Lazy::new(|| { // COM initialization is thread local, but we only need to have COM initialized in the // thread we create the objects in com::com_initialized(); // building the devices enumerator object unsafe { let enumerator = Com::CoCreateInstance::<_, Audio::IMMDeviceEnumerator>( &Audio::MMDeviceEnumerator, None, Com::CLSCTX_ALL, ) .unwrap(); Enumerator(enumerator) } }); /// Send/Sync wrapper around `IMMDeviceEnumerator`. struct Enumerator(Audio::IMMDeviceEnumerator); unsafe impl Send for Enumerator {} unsafe impl Sync for Enumerator {} /// WASAPI implementation for `Devices`. pub struct Devices { collection: Audio::IMMDeviceCollection, total_count: u32, next_item: u32, } impl Devices { pub fn new() -> Result { unsafe { // can fail because of wrong parameters (should never happen) or out of memory let collection = ENUMERATOR .0 .EnumAudioEndpoints(Audio::eAll, Audio::DEVICE_STATE_ACTIVE) .map_err(BackendSpecificError::from)?; let count = collection.GetCount().map_err(BackendSpecificError::from)?; Ok(Devices { collection, total_count: count, next_item: 0, }) } } } unsafe impl Send for Devices {} unsafe impl Sync for Devices {} impl Iterator for Devices { type Item = Device; fn next(&mut self) -> Option { if self.next_item >= self.total_count { return None; } unsafe { let device = self.collection.Item(self.next_item).unwrap(); self.next_item += 1; Some(Device::from_immdevice(device)) } } #[inline] fn size_hint(&self) -> (usize, Option) { let num = self.total_count - self.next_item; let num = num as usize; (num, Some(num)) } } fn default_device(data_flow: Audio::EDataFlow) -> Option { unsafe { let device = ENUMERATOR .0 .GetDefaultAudioEndpoint(data_flow, Audio::eConsole) .ok()?; // TODO: check specifically for `E_NOTFOUND`, and panic otherwise Some(Device::from_immdevice(device)) } } pub fn default_input_device() -> Option { default_device(Audio::eCapture) } pub fn default_output_device() -> Option { default_device(Audio::eRender) } /// Get the audio clock used to produce `StreamInstant`s. unsafe fn get_audio_clock( audio_client: &Audio::IAudioClient, ) -> Result { audio_client .GetService::() .map_err(|e| { windows_err_to_cpal_err_message::(e, "failed to build audio clock: ") }) } // Turns a `Format` into a `WAVEFORMATEXTENSIBLE`. // // Returns `None` if the WAVEFORMATEXTENSIBLE does not support the given format. fn config_to_waveformatextensible( config: &StreamConfig, sample_format: SampleFormat, ) -> Option { let format_tag = match sample_format { SampleFormat::I16 => Audio::WAVE_FORMAT_PCM, SampleFormat::F32 => KernelStreaming::WAVE_FORMAT_EXTENSIBLE, _ => return None, } as u16; let channels = config.channels; let sample_rate = config.sample_rate.0; let sample_bytes = sample_format.sample_size() as u16; let avg_bytes_per_sec = u32::from(channels) * sample_rate * u32::from(sample_bytes); let block_align = channels * sample_bytes; let bits_per_sample = 8 * sample_bytes; let cb_size = match sample_format { SampleFormat::I16 => 0, SampleFormat::F32 => { let extensible_size = mem::size_of::(); let ex_size = mem::size_of::(); (extensible_size - ex_size) as u16 } _ => return None, }; let waveformatex = Audio::WAVEFORMATEX { wFormatTag: format_tag, nChannels: channels, nSamplesPerSec: sample_rate, nAvgBytesPerSec: avg_bytes_per_sec, nBlockAlign: block_align, wBitsPerSample: bits_per_sample, cbSize: cb_size, }; // CPAL does not care about speaker positions, so pass audio ight through. let channel_mask = KernelStreaming::KSAUDIO_SPEAKER_DIRECTOUT; let sub_format = match sample_format { SampleFormat::I16 => KernelStreaming::KSDATAFORMAT_SUBTYPE_PCM, SampleFormat::F32 => Multimedia::KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, _ => return None, }; let waveformatextensible = Audio::WAVEFORMATEXTENSIBLE { Format: waveformatex, Samples: Audio::WAVEFORMATEXTENSIBLE_0 { wSamplesPerBlock: bits_per_sample, }, dwChannelMask: channel_mask, SubFormat: sub_format, }; Some(waveformatextensible) } fn buffer_size_to_duration(buffer_size: &BufferSize, sample_rate: u32) -> i64 { match buffer_size { BufferSize::Fixed(frames) => *frames as i64 * (1_000_000_000 / 100) / sample_rate as i64, BufferSize::Default => 0, } } fn buffer_duration_to_frames(buffer_duration: i64, sample_rate: u32) -> FrameCount { (buffer_duration * sample_rate as i64 * 100 / 1_000_000_000) as FrameCount } cpal-0.15.2/src/host/wasapi/mod.rs000064400000000000000000000051311046102023000150050ustar 00000000000000pub use self::device::{ default_input_device, default_output_device, Device, Devices, SupportedInputConfigs, SupportedOutputConfigs, }; pub use self::stream::Stream; use crate::traits::HostTrait; use crate::BackendSpecificError; use crate::DevicesError; use std::io::Error as IoError; use windows::Win32::Media::Audio; mod com; mod device; mod stream; /// The WASAPI host, the default windows host type. /// /// Note: If you use a WASAPI output device as an input device it will /// transparently enable loopback mode (see /// https://docs.microsoft.com/en-us/windows/win32/coreaudio/loopback-recording). #[derive(Debug)] pub struct Host; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { // Assume WASAPI is always available on Windows. true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl From for BackendSpecificError { fn from(error: windows::core::Error) -> Self { BackendSpecificError { description: format!("{}", IoError::from(error)), } } } trait ErrDeviceNotAvailable: From { fn device_not_available() -> Self; } impl ErrDeviceNotAvailable for crate::BuildStreamError { fn device_not_available() -> Self { Self::DeviceNotAvailable } } impl ErrDeviceNotAvailable for crate::SupportedStreamConfigsError { fn device_not_available() -> Self { Self::DeviceNotAvailable } } impl ErrDeviceNotAvailable for crate::DefaultStreamConfigError { fn device_not_available() -> Self { Self::DeviceNotAvailable } } impl ErrDeviceNotAvailable for crate::StreamError { fn device_not_available() -> Self { Self::DeviceNotAvailable } } fn windows_err_to_cpal_err(e: windows::core::Error) -> E { windows_err_to_cpal_err_message::(e, "") } fn windows_err_to_cpal_err_message( e: windows::core::Error, message: &str, ) -> E { match e.code() { Audio::AUDCLNT_E_DEVICE_INVALIDATED => E::device_not_available(), _ => { let description = format!("{}{}", message, e); let err = BackendSpecificError { description }; err.into() } } } cpal-0.15.2/src/host/wasapi/stream.rs000064400000000000000000000446341046102023000155340ustar 00000000000000use super::windows_err_to_cpal_err; use crate::traits::StreamTrait; use crate::{ BackendSpecificError, Data, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, StreamError, }; use std::mem; use std::ptr; use std::sync::mpsc::{channel, Receiver, SendError, Sender}; use std::thread::{self, JoinHandle}; use windows::Win32::Foundation; use windows::Win32::Foundation::WAIT_OBJECT_0; use windows::Win32::Media::Audio; use windows::Win32::System::SystemServices; use windows::Win32::System::Threading; use windows::Win32::System::WindowsProgramming; pub struct Stream { /// The high-priority audio processing thread calling callbacks. /// Option used for moving out in destructor. /// /// TODO: Actually set the thread priority. thread: Option>, // Commands processed by the `run()` method that is currently running. // `pending_scheduled_event` must be signalled whenever a command is added here, so that it // will get picked up. commands: Sender, // This event is signalled after a new entry is added to `commands`, so that the `run()` // method can be notified. pending_scheduled_event: Foundation::HANDLE, } struct RunContext { // Streams that have been created in this event loop. stream: StreamInner, // Handles corresponding to the `event` field of each element of `voices`. Must always be in // sync with `voices`, except that the first element is always `pending_scheduled_event`. handles: Vec, commands: Receiver, } // Once we start running the eventloop, the RunContext will not be moved. unsafe impl Send for RunContext {} pub enum Command { PlayStream, PauseStream, Terminate, } pub enum AudioClientFlow { Render { render_client: Audio::IAudioRenderClient, }, Capture { capture_client: Audio::IAudioCaptureClient, }, } pub struct StreamInner { pub audio_client: Audio::IAudioClient, pub audio_clock: Audio::IAudioClock, pub client_flow: AudioClientFlow, // Event that is signalled by WASAPI whenever audio data must be written. pub event: Foundation::HANDLE, // True if the stream is currently playing. False if paused. pub playing: bool, // Number of frames of audio data in the underlying buffer allocated by WASAPI. pub max_frames_in_buffer: u32, // Number of bytes that each frame occupies. pub bytes_per_frame: u16, // The configuration with which the stream was created. pub config: crate::StreamConfig, // The sample format with which the stream was created. pub sample_format: SampleFormat, } impl Stream { pub(crate) fn new_input( stream_inner: StreamInner, mut data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let pending_scheduled_event = unsafe { Threading::CreateEventA(None, false, false, windows::core::PCSTR(ptr::null())) } .expect("cpal: could not create input stream event"); let (tx, rx) = channel(); let run_context = RunContext { handles: vec![pending_scheduled_event, stream_inner.event], stream: stream_inner, commands: rx, }; let thread = thread::Builder::new() .name("cpal_wasapi_in".to_owned()) .spawn(move || run_input(run_context, &mut data_callback, &mut error_callback)) .unwrap(); Stream { thread: Some(thread), commands: tx, pending_scheduled_event, } } pub(crate) fn new_output( stream_inner: StreamInner, mut data_callback: D, mut error_callback: E, ) -> Stream where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { let pending_scheduled_event = unsafe { Threading::CreateEventA(None, false, false, windows::core::PCSTR(ptr::null())) } .expect("cpal: could not create output stream event"); let (tx, rx) = channel(); let run_context = RunContext { handles: vec![pending_scheduled_event, stream_inner.event], stream: stream_inner, commands: rx, }; let thread = thread::Builder::new() .name("cpal_wasapi_out".to_owned()) .spawn(move || run_output(run_context, &mut data_callback, &mut error_callback)) .unwrap(); Stream { thread: Some(thread), commands: tx, pending_scheduled_event, } } #[inline] fn push_command(&self, command: Command) -> Result<(), SendError> { self.commands.send(command)?; unsafe { let result = Threading::SetEvent(self.pending_scheduled_event); assert_ne!(result, false); } Ok(()) } } impl Drop for Stream { #[inline] fn drop(&mut self) { if let Ok(_) = self.push_command(Command::Terminate) { self.thread.take().unwrap().join().unwrap(); unsafe { Foundation::CloseHandle(self.pending_scheduled_event); } } } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { self.push_command(Command::PlayStream) .map_err(|_| crate::error::PlayStreamError::DeviceNotAvailable)?; Ok(()) } fn pause(&self) -> Result<(), PauseStreamError> { self.push_command(Command::PauseStream) .map_err(|_| crate::error::PauseStreamError::DeviceNotAvailable)?; Ok(()) } } impl Drop for StreamInner { #[inline] fn drop(&mut self) { unsafe { Foundation::CloseHandle(self.event); } } } // Process any pending commands that are queued within the `RunContext`. // Returns `true` if the loop should continue running, `false` if it should terminate. fn process_commands(run_context: &mut RunContext) -> Result { // Process the pending commands. for command in run_context.commands.try_iter() { match command { Command::PlayStream => unsafe { if !run_context.stream.playing { run_context .stream .audio_client .Start() .map_err(windows_err_to_cpal_err::)?; run_context.stream.playing = true; } }, Command::PauseStream => unsafe { if run_context.stream.playing { run_context .stream .audio_client .Stop() .map_err(windows_err_to_cpal_err::)?; run_context.stream.playing = false; } }, Command::Terminate => { return Ok(false); } } } Ok(true) } // Wait for any of the given handles to be signalled. // // Returns the index of the `handle` that was signalled, or an `Err` if // `WaitForMultipleObjectsEx` fails. // // This is called when the `run` thread is ready to wait for the next event. The // next event might be some command submitted by the user (the first handle) or // might indicate that one of the streams is ready to deliver or receive audio. fn wait_for_handle_signal(handles: &[Foundation::HANDLE]) -> Result { debug_assert!(handles.len() <= SystemServices::MAXIMUM_WAIT_OBJECTS as usize); let result = unsafe { Threading::WaitForMultipleObjectsEx( handles, false, // Don't wait for all, just wait for the first WindowsProgramming::INFINITE, // TODO: allow setting a timeout false, // irrelevant parameter here ) }; if result == Foundation::WAIT_FAILED { let err = unsafe { Foundation::GetLastError() }; let description = format!("`WaitForMultipleObjectsEx failed: {}", err.0); let err = BackendSpecificError { description }; return Err(err); } // Notifying the corresponding task handler. let handle_idx = (result.0 - WAIT_OBJECT_0.0) as usize; Ok(handle_idx) } // Get the number of available frames that are available for writing/reading. fn get_available_frames(stream: &StreamInner) -> Result { unsafe { let padding = stream .audio_client .GetCurrentPadding() .map_err(windows_err_to_cpal_err::)?; Ok(stream.max_frames_in_buffer - padding) } } fn run_input( mut run_ctxt: RunContext, data_callback: &mut dyn FnMut(&Data, &InputCallbackInfo), error_callback: &mut dyn FnMut(StreamError), ) { loop { match process_commands_and_await_signal(&mut run_ctxt, error_callback) { Some(ControlFlow::Break) => break, Some(ControlFlow::Continue) => continue, None => (), } let capture_client = match run_ctxt.stream.client_flow { AudioClientFlow::Capture { ref capture_client } => capture_client.clone(), _ => unreachable!(), }; match process_input( &run_ctxt.stream, capture_client, data_callback, error_callback, ) { ControlFlow::Break => break, ControlFlow::Continue => continue, } } } fn run_output( mut run_ctxt: RunContext, data_callback: &mut dyn FnMut(&mut Data, &OutputCallbackInfo), error_callback: &mut dyn FnMut(StreamError), ) { loop { match process_commands_and_await_signal(&mut run_ctxt, error_callback) { Some(ControlFlow::Break) => break, Some(ControlFlow::Continue) => continue, None => (), } let render_client = match run_ctxt.stream.client_flow { AudioClientFlow::Render { ref render_client } => render_client.clone(), _ => unreachable!(), }; match process_output( &run_ctxt.stream, render_client, data_callback, error_callback, ) { ControlFlow::Break => break, ControlFlow::Continue => continue, } } } enum ControlFlow { Break, Continue, } fn process_commands_and_await_signal( run_context: &mut RunContext, error_callback: &mut dyn FnMut(StreamError), ) -> Option { // Process queued commands. match process_commands(run_context) { Ok(true) => (), Ok(false) => return Some(ControlFlow::Break), Err(err) => { error_callback(err); return Some(ControlFlow::Break); } }; // Wait for any of the handles to be signalled. let handle_idx = match wait_for_handle_signal(&run_context.handles) { Ok(idx) => idx, Err(err) => { error_callback(err.into()); return Some(ControlFlow::Break); } }; // If `handle_idx` is 0, then it's `pending_scheduled_event` that was signalled in // order for us to pick up the pending commands. Otherwise, a stream needs data. if handle_idx == 0 { return Some(ControlFlow::Continue); } None } // The loop for processing pending input data. fn process_input( stream: &StreamInner, capture_client: Audio::IAudioCaptureClient, data_callback: &mut dyn FnMut(&Data, &InputCallbackInfo), error_callback: &mut dyn FnMut(StreamError), ) -> ControlFlow { unsafe { // Get the available data in the shared buffer. let mut buffer: *mut u8 = ptr::null_mut(); let mut flags = mem::MaybeUninit::uninit(); loop { let mut frames_available = match capture_client.GetNextPacketSize() { Ok(0) => return ControlFlow::Continue, Ok(f) => f, Err(err) => { error_callback(windows_err_to_cpal_err(err)); return ControlFlow::Break; } }; let mut qpc_position: u64 = 0; let result = capture_client.GetBuffer( &mut buffer, &mut frames_available, flags.as_mut_ptr(), None, Some(&mut qpc_position), ); match result { // TODO: Can this happen? Err(e) if e.code() == Audio::AUDCLNT_S_BUFFER_EMPTY => continue, Err(e) => { error_callback(windows_err_to_cpal_err(e)); return ControlFlow::Break; } Ok(_) => (), } debug_assert!(!buffer.is_null()); let data = buffer as *mut (); let len = frames_available as usize * stream.bytes_per_frame as usize / stream.sample_format.sample_size(); let data = Data::from_parts(data, len, stream.sample_format); // The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds. let timestamp = match input_timestamp(stream, qpc_position) { Ok(ts) => ts, Err(err) => { error_callback(err); return ControlFlow::Break; } }; let info = InputCallbackInfo { timestamp }; data_callback(&data, &info); // Release the buffer. let result = capture_client .ReleaseBuffer(frames_available) .map_err(windows_err_to_cpal_err); if let Err(err) = result { error_callback(err); return ControlFlow::Break; } } } } // The loop for writing output data. fn process_output( stream: &StreamInner, render_client: Audio::IAudioRenderClient, data_callback: &mut dyn FnMut(&mut Data, &OutputCallbackInfo), error_callback: &mut dyn FnMut(StreamError), ) -> ControlFlow { // The number of frames available for writing. let frames_available = match get_available_frames(stream) { Ok(0) => return ControlFlow::Continue, // TODO: Can this happen? Ok(n) => n, Err(err) => { error_callback(err); return ControlFlow::Break; } }; unsafe { let buffer = match render_client.GetBuffer(frames_available) { Ok(b) => b, Err(e) => { error_callback(windows_err_to_cpal_err(e)); return ControlFlow::Break; } }; debug_assert!(!buffer.is_null()); let data = buffer as *mut (); let len = frames_available as usize * stream.bytes_per_frame as usize / stream.sample_format.sample_size(); let mut data = Data::from_parts(data, len, stream.sample_format); let sample_rate = stream.config.sample_rate; let timestamp = match output_timestamp(stream, frames_available, sample_rate) { Ok(ts) => ts, Err(err) => { error_callback(err); return ControlFlow::Break; } }; let info = OutputCallbackInfo { timestamp }; data_callback(&mut data, &info); if let Err(err) = render_client.ReleaseBuffer(frames_available, 0) { error_callback(windows_err_to_cpal_err(err)); return ControlFlow::Break; } } ControlFlow::Continue } /// Convert the given duration in frames at the given sample rate to a `std::time::Duration`. fn frames_to_duration(frames: u32, rate: crate::SampleRate) -> std::time::Duration { let secsf = frames as f64 / rate.0 as f64; let secs = secsf as u64; let nanos = ((secsf - secs as f64) * 1_000_000_000.0) as u32; std::time::Duration::new(secs, nanos) } /// Use the stream's `IAudioClock` to produce the current stream instant. /// /// Uses the QPC position produced via the `GetPosition` method. fn stream_instant(stream: &StreamInner) -> Result { let mut position: u64 = 0; let mut qpc_position: u64 = 0; unsafe { stream .audio_clock .GetPosition(&mut position, Some(&mut qpc_position)) .map_err(windows_err_to_cpal_err::)?; }; // The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds. let qpc_nanos = qpc_position as i128 * 100; let instant = crate::StreamInstant::from_nanos_i128(qpc_nanos) .expect("performance counter out of range of `StreamInstant` representation"); Ok(instant) } /// Produce the input stream timestamp. /// /// `buffer_qpc_position` is the `qpc_position` returned via the `GetBuffer` call on the capture /// client. It represents the instant at which the first sample of the retrieved buffer was /// captured. fn input_timestamp( stream: &StreamInner, buffer_qpc_position: u64, ) -> Result { // The `qpc_position` is in 100 nanosecond units. Convert it to nanoseconds. let qpc_nanos = buffer_qpc_position as i128 * 100; let capture = crate::StreamInstant::from_nanos_i128(qpc_nanos) .expect("performance counter out of range of `StreamInstant` representation"); let callback = stream_instant(stream)?; Ok(crate::InputStreamTimestamp { capture, callback }) } /// Produce the output stream timestamp. /// /// `frames_available` is the number of frames available for writing as reported by subtracting the /// result of `GetCurrentPadding` from the maximum buffer size. /// /// `sample_rate` is the rate at which audio frames are processed by the device. /// /// TODO: The returned `playback` is an estimate that assumes audio is delivered immediately after /// `frames_available` are consumed. The reality is that there is likely a tiny amount of latency /// after this, but not sure how to determine this. fn output_timestamp( stream: &StreamInner, frames_available: u32, sample_rate: crate::SampleRate, ) -> Result { let callback = stream_instant(stream)?; let buffer_duration = frames_to_duration(frames_available, sample_rate); let playback = callback .add(buffer_duration) .expect("`playback` occurs beyond representation supported by `StreamInstant`"); Ok(crate::OutputStreamTimestamp { callback, playback }) } cpal-0.15.2/src/host/webaudio/mod.rs000064400000000000000000000413101046102023000153170ustar 00000000000000extern crate js_sys; extern crate wasm_bindgen; extern crate web_sys; use self::js_sys::eval; use self::wasm_bindgen::prelude::*; use self::wasm_bindgen::JsCast; use self::web_sys::{AudioContext, AudioContextOptions}; use crate::traits::{DeviceTrait, HostTrait, StreamTrait}; use crate::{ BackendSpecificError, BufferSize, BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, OutputCallbackInfo, PauseStreamError, PlayStreamError, SampleFormat, SampleRate, StreamConfig, StreamError, SupportedBufferSize, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; use std::ops::DerefMut; use std::sync::{Arc, Mutex, RwLock}; use std::time::Duration; /// Content is false if the iterator is empty. pub struct Devices(bool); #[derive(Clone, Debug, PartialEq, Eq)] pub struct Device; pub struct Host; pub struct Stream { ctx: Arc, on_ended_closures: Vec>>>>, config: StreamConfig, buffer_size_frames: usize, } pub type SupportedInputConfigs = ::std::vec::IntoIter; pub type SupportedOutputConfigs = ::std::vec::IntoIter; const MIN_CHANNELS: u16 = 1; const MAX_CHANNELS: u16 = 32; const MIN_SAMPLE_RATE: SampleRate = SampleRate(8_000); const MAX_SAMPLE_RATE: SampleRate = SampleRate(96_000); const DEFAULT_SAMPLE_RATE: SampleRate = SampleRate(44_100); const MIN_BUFFER_SIZE: u32 = 1; const MAX_BUFFER_SIZE: u32 = u32::MAX; const DEFAULT_BUFFER_SIZE: usize = 2048; const SUPPORTED_SAMPLE_FORMAT: SampleFormat = SampleFormat::F32; impl Host { pub fn new() -> Result { Ok(Host) } } impl HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { // Assume this host is always available on webaudio. true } fn devices(&self) -> Result { Devices::new() } fn default_input_device(&self) -> Option { default_input_device() } fn default_output_device(&self) -> Option { default_output_device() } } impl Devices { fn new() -> Result { Ok(Self::default()) } } impl Device { #[inline] fn name(&self) -> Result { Ok("Default Device".to_owned()) } #[inline] fn supported_input_configs( &self, ) -> Result { // TODO Ok(Vec::new().into_iter()) } #[inline] fn supported_output_configs( &self, ) -> Result { let buffer_size = SupportedBufferSize::Range { min: MIN_BUFFER_SIZE, max: MAX_BUFFER_SIZE, }; let configs: Vec<_> = (MIN_CHANNELS..=MAX_CHANNELS) .map(|channels| SupportedStreamConfigRange { channels, min_sample_rate: MIN_SAMPLE_RATE, max_sample_rate: MAX_SAMPLE_RATE, buffer_size: buffer_size.clone(), sample_format: SUPPORTED_SAMPLE_FORMAT, }) .collect(); Ok(configs.into_iter()) } #[inline] fn default_input_config(&self) -> Result { // TODO Err(DefaultStreamConfigError::StreamTypeNotSupported) } #[inline] fn default_output_config(&self) -> Result { const EXPECT: &str = "expected at least one valid webaudio stream config"; let config = self .supported_output_configs() .expect(EXPECT) .max_by(|a, b| a.cmp_default_heuristics(b)) .unwrap() .with_sample_rate(DEFAULT_SAMPLE_RATE); Ok(config) } } impl DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; #[inline] fn name(&self) -> Result { Device::name(self) } #[inline] fn supported_input_configs( &self, ) -> Result { Device::supported_input_configs(self) } #[inline] fn supported_output_configs( &self, ) -> Result { Device::supported_output_configs(self) } #[inline] fn default_input_config(&self) -> Result { Device::default_input_config(self) } #[inline] fn default_output_config(&self) -> Result { Device::default_output_config(self) } fn build_input_stream_raw( &self, _config: &StreamConfig, _sample_format: SampleFormat, _data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { // TODO Err(BuildStreamError::StreamConfigNotSupported) } /// Create an output stream. fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, _error_callback: E, _timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { if !valid_config(config, sample_format) { return Err(BuildStreamError::StreamConfigNotSupported); } let n_channels = config.channels as usize; let buffer_size_frames = match config.buffer_size { BufferSize::Fixed(v) => { if v == 0 { return Err(BuildStreamError::StreamConfigNotSupported); } else { v as usize } } BufferSize::Default => DEFAULT_BUFFER_SIZE, }; let buffer_size_samples = buffer_size_frames * n_channels; let buffer_time_step_secs = buffer_time_step_secs(buffer_size_frames, config.sample_rate); let data_callback = Arc::new(Mutex::new(Box::new(data_callback))); // Create the WebAudio stream. let mut stream_opts = AudioContextOptions::new(); stream_opts.sample_rate(config.sample_rate.0 as f32); let ctx = AudioContext::new_with_context_options(&stream_opts).map_err( |err| -> BuildStreamError { let description = format!("{:?}", err); let err = BackendSpecificError { description }; err.into() }, )?; let destination = ctx.destination(); // If possible, set the destination's channel_count to the given config.channel. // If not, fallback on the default destination channel_count to keep previous behavior // and do not return an error. if config.channels as u32 <= destination.max_channel_count() { destination.set_channel_count(config.channels as u32); } let ctx = Arc::new(ctx); // A container for managing the lifecycle of the audio callbacks. let mut on_ended_closures: Vec>>>> = Vec::new(); // A cursor keeping track of the current time at which new frames should be scheduled. let time = Arc::new(RwLock::new(0f64)); // Create a set of closures / callbacks which will continuously fetch and schedule sample // playback. Starting with two workers, e.g. a front and back buffer so that audio frames // can be fetched in the background. for _i in 0..2 { let data_callback_handle = data_callback.clone(); let ctx_handle = ctx.clone(); let time_handle = time.clone(); // A set of temporary buffers to be used for intermediate sample transformation steps. let mut temporary_buffer = vec![0f32; buffer_size_samples]; let mut temporary_channel_buffer = vec![0f32; buffer_size_frames]; // Create a webaudio buffer which will be reused to avoid allocations. let ctx_buffer = ctx .create_buffer( config.channels as u32, buffer_size_frames as u32, config.sample_rate.0 as f32, ) .map_err(|err| -> BuildStreamError { let description = format!("{:?}", err); let err = BackendSpecificError { description }; err.into() })?; // A self reference to this closure for passing to future audio event calls. let on_ended_closure: Arc>>> = Arc::new(RwLock::new(None)); let on_ended_closure_handle = on_ended_closure.clone(); on_ended_closure .write() .unwrap() .replace(Closure::wrap(Box::new(move || { let now = ctx_handle.current_time(); let time_at_start_of_buffer = { let time_at_start_of_buffer = time_handle .read() .expect("Unable to get a read lock on the time cursor"); // Synchronise first buffer as necessary (eg. keep the time value // referenced to the context clock). if *time_at_start_of_buffer > 0.001 { *time_at_start_of_buffer } else { // 25ms of time to fetch the first sample data, increase to avoid // initial underruns. now + 0.025 } }; // Populate the sample data into an interleaved temporary buffer. { let len = temporary_buffer.len(); let data = temporary_buffer.as_mut_ptr() as *mut (); let mut data = unsafe { Data::from_parts(data, len, sample_format) }; let mut data_callback = data_callback_handle.lock().unwrap(); let callback = crate::StreamInstant::from_secs_f64(now); let playback = crate::StreamInstant::from_secs_f64(time_at_start_of_buffer); let timestamp = crate::OutputStreamTimestamp { callback, playback }; let info = OutputCallbackInfo { timestamp }; (data_callback.deref_mut())(&mut data, &info); } // Deinterleave the sample data and copy into the audio context buffer. // We do not reference the audio context buffer directly e.g. getChannelData. // As wasm-bindgen only gives us a copy, not a direct reference. for channel in 0..n_channels { for i in 0..buffer_size_frames { temporary_channel_buffer[i] = temporary_buffer[n_channels * i + channel]; } ctx_buffer .copy_to_channel(&mut temporary_channel_buffer, channel as i32) .expect("Unable to write sample data into the audio context buffer"); } // Create an AudioBufferSourceNode, schedule it to playback the reused buffer // in the future. let source = ctx_handle .create_buffer_source() .expect("Unable to create a webaudio buffer source"); source.set_buffer(Some(&ctx_buffer)); source .connect_with_audio_node(&ctx_handle.destination()) .expect( "Unable to connect the web audio buffer source to the context destination", ); source.set_onended(Some( on_ended_closure_handle .read() .unwrap() .as_ref() .unwrap() .as_ref() .unchecked_ref(), )); source .start_with_when(time_at_start_of_buffer) .expect("Unable to start the webaudio buffer source"); // Keep track of when the next buffer worth of samples should be played. *time_handle.write().unwrap() = time_at_start_of_buffer + buffer_time_step_secs; }) as Box)); on_ended_closures.push(on_ended_closure); } Ok(Stream { ctx, on_ended_closures, config: config.clone(), buffer_size_frames, }) } } impl Stream { /// Return the [`AudioContext`](https://developer.mozilla.org/docs/Web/API/AudioContext) used /// by this stream. pub fn audio_context(&self) -> &AudioContext { &*self.ctx } } impl StreamTrait for Stream { fn play(&self) -> Result<(), PlayStreamError> { let window = web_sys::window().unwrap(); match self.ctx.resume() { Ok(_) => { // Begin webaudio playback, initially scheduling the closures to fire on a timeout // event. let mut offset_ms = 10; let time_step_secs = buffer_time_step_secs(self.buffer_size_frames, self.config.sample_rate); let time_step_ms = (time_step_secs * 1_000.0) as i32; for on_ended_closure in self.on_ended_closures.iter() { window .set_timeout_with_callback_and_timeout_and_arguments_0( on_ended_closure .read() .unwrap() .as_ref() .unwrap() .as_ref() .unchecked_ref(), offset_ms, ) .unwrap(); offset_ms += time_step_ms; } Ok(()) } Err(err) => { let description = format!("{:?}", err); let err = BackendSpecificError { description }; Err(err.into()) } } } fn pause(&self) -> Result<(), PauseStreamError> { match self.ctx.suspend() { Ok(_) => Ok(()), Err(err) => { let description = format!("{:?}", err); let err = BackendSpecificError { description }; Err(err.into()) } } } } impl Drop for Stream { fn drop(&mut self) { let _ = self.ctx.close(); } } impl Default for Devices { fn default() -> Devices { // We produce an empty iterator if the WebAudio API isn't available. Devices(is_webaudio_available()) } } impl Iterator for Devices { type Item = Device; #[inline] fn next(&mut self) -> Option { if self.0 { self.0 = false; Some(Device) } else { None } } } #[inline] fn default_input_device() -> Option { // TODO None } #[inline] fn default_output_device() -> Option { if is_webaudio_available() { Some(Device) } else { None } } // Detects whether the `AudioContext` global variable is available. fn is_webaudio_available() -> bool { if let Ok(audio_context_is_defined) = eval("typeof AudioContext !== 'undefined'") { audio_context_is_defined.as_bool().unwrap() } else { false } } // Whether or not the given stream configuration is valid for building a stream. fn valid_config(conf: &StreamConfig, sample_format: SampleFormat) -> bool { conf.channels <= MAX_CHANNELS && conf.channels >= MIN_CHANNELS && conf.sample_rate <= MAX_SAMPLE_RATE && conf.sample_rate >= MIN_SAMPLE_RATE && sample_format == SUPPORTED_SAMPLE_FORMAT } fn buffer_time_step_secs(buffer_size_frames: usize, sample_rate: SampleRate) -> f64 { buffer_size_frames as f64 / sample_rate.0 as f64 } cpal-0.15.2/src/lib.rs000064400000000000000000000740261046102023000125440ustar 00000000000000//! # How to use cpal //! //! Here are some concepts cpal exposes: //! //! - A [`Host`] provides access to the available audio devices on the system. //! Some platforms have more than one host available, but every platform supported by CPAL has at //! least one [default_host] that is guaranteed to be available. //! - A [`Device`] is an audio device that may have any number of input and //! output streams. //! - A [`Stream`] is an open flow of audio data. Input streams allow you to //! receive audio data, output streams allow you to play audio data. You must choose which //! [Device] will run your stream before you can create one. Often, a default device can be //! retrieved via the [Host]. //! //! The first step is to initialise the [`Host`]: //! //! ``` //! use cpal::traits::HostTrait; //! let host = cpal::default_host(); //! ``` //! //! Then choose an available [`Device`]. The easiest way is to use the default input or output //! `Device` via the [`default_input_device()`] or [`default_output_device()`] methods on `host`. //! //! Alternatively, you can enumerate all the available devices with the [`devices()`] method. //! Beware that the `default_*_device()` functions return an `Option` in case no device //! is available for that stream type on the system. //! //! ```no_run //! # use cpal::traits::HostTrait; //! # let host = cpal::default_host(); //! let device = host.default_output_device().expect("no output device available"); //! ``` //! //! Before we can create a stream, we must decide what the configuration of the audio stream is //! going to be. //! You can query all the supported configurations with the //! [`supported_input_configs()`] and [`supported_output_configs()`] methods. //! These produce a list of [`SupportedStreamConfigRange`] structs which can later be turned into //! actual [`SupportedStreamConfig`] structs. //! //! If you don't want to query the list of configs, //! you can also build your own [`StreamConfig`] manually, but doing so could lead to an error when //! building the stream if the config is not supported by the device. //! //! > **Note**: the `supported_input/output_configs()` methods //! > could return an error for example if the device has been disconnected. //! //! ```no_run //! use cpal::traits::{DeviceTrait, HostTrait}; //! # let host = cpal::default_host(); //! # let device = host.default_output_device().unwrap(); //! let mut supported_configs_range = device.supported_output_configs() //! .expect("error while querying configs"); //! let supported_config = supported_configs_range.next() //! .expect("no supported config?!") //! .with_max_sample_rate(); //! ``` //! //! Now that we have everything for the stream, we are ready to create it from our selected device: //! //! ```no_run //! use cpal::Data; //! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; //! # let host = cpal::default_host(); //! # let device = host.default_output_device().unwrap(); //! # let config = device.default_output_config().unwrap().into(); //! let stream = device.build_output_stream( //! &config, //! move |data: &mut [f32], _: &cpal::OutputCallbackInfo| { //! // react to stream events and read or write stream data here. //! }, //! move |err| { //! // react to errors here. //! }, //! None // None=blocking, Some(Duration)=timeout //! ); //! ``` //! //! While the stream is running, the selected audio device will periodically call the data callback //! that was passed to the function. The callback is passed an instance of either [`&Data` or //! `&mut Data`](Data) depending on whether the stream is an input stream or output stream respectively. //! //! > **Note**: Creating and running a stream will *not* block the thread. On modern platforms, the //! > given callback is called by a dedicated, high-priority thread responsible for delivering //! > audio data to the system's audio device in a timely manner. On older platforms that only //! > provide a blocking API (e.g. ALSA), CPAL will create a thread in order to consistently //! > provide non-blocking behaviour (currently this is a thread per stream, but this may change to //! > use a single thread for all streams). *If this is an issue for your platform or design, //! > please share your issue and use-case with the CPAL team on the GitHub issue tracker for //! > consideration.* //! //! In this example, we simply fill the given output buffer with silence. //! //! ```no_run //! use cpal::{Data, Sample, SampleFormat, FromSample}; //! use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; //! # let host = cpal::default_host(); //! # let device = host.default_output_device().unwrap(); //! # let supported_config = device.default_output_config().unwrap(); //! let err_fn = |err| eprintln!("an error occurred on the output audio stream: {}", err); //! let sample_format = supported_config.sample_format(); //! let config = supported_config.into(); //! let stream = match sample_format { //! SampleFormat::F32 => device.build_output_stream(&config, write_silence::, err_fn, None), //! SampleFormat::I16 => device.build_output_stream(&config, write_silence::, err_fn, None), //! SampleFormat::U16 => device.build_output_stream(&config, write_silence::, err_fn, None), //! sample_format => panic!("Unsupported sample format '{sample_format}'") //! }.unwrap(); //! //! fn write_silence(data: &mut [T], _: &cpal::OutputCallbackInfo) { //! for sample in data.iter_mut() { //! *sample = Sample::EQUILIBRIUM; //! } //! } //! ``` //! //! Not all platforms automatically run the stream upon creation. To ensure the stream has started, //! we can use [`Stream::play`](traits::StreamTrait::play). //! //! ```no_run //! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; //! # let host = cpal::default_host(); //! # let device = host.default_output_device().unwrap(); //! # let supported_config = device.default_output_config().unwrap(); //! # let sample_format = supported_config.sample_format(); //! # let config = supported_config.into(); //! # let data_fn = move |_data: &mut cpal::Data, _: &cpal::OutputCallbackInfo| {}; //! # let err_fn = move |_err| {}; //! # let stream = device.build_output_stream_raw(&config, sample_format, data_fn, err_fn, None).unwrap(); //! stream.play().unwrap(); //! ``` //! //! Some devices support pausing the audio stream. This can be useful for saving energy in moments //! of silence. //! //! ```no_run //! # use cpal::traits::{DeviceTrait, HostTrait, StreamTrait}; //! # let host = cpal::default_host(); //! # let device = host.default_output_device().unwrap(); //! # let supported_config = device.default_output_config().unwrap(); //! # let sample_format = supported_config.sample_format(); //! # let config = supported_config.into(); //! # let data_fn = move |_data: &mut cpal::Data, _: &cpal::OutputCallbackInfo| {}; //! # let err_fn = move |_err| {}; //! # let stream = device.build_output_stream_raw(&config, sample_format, data_fn, err_fn, None).unwrap(); //! stream.pause().unwrap(); //! ``` //! //! [`default_input_device()`]: traits::HostTrait::default_input_device //! [`default_output_device()`]: traits::HostTrait::default_output_device //! [`devices()`]: traits::HostTrait::devices //! [`supported_input_configs()`]: traits::DeviceTrait::supported_input_configs //! [`supported_output_configs()`]: traits::DeviceTrait::supported_output_configs #![recursion_limit = "2048"] // Extern crate declarations with `#[macro_use]` must unfortunately be at crate root. #[cfg(target_os = "emscripten")] #[macro_use] extern crate wasm_bindgen; #[cfg(target_os = "emscripten")] extern crate js_sys; #[cfg(target_os = "emscripten")] extern crate web_sys; pub use error::*; pub use platform::{ available_hosts, default_host, host_from_id, Device, Devices, Host, HostId, Stream, SupportedInputConfigs, SupportedOutputConfigs, ALL_HOSTS, }; pub use samples_formats::{FromSample, Sample, SampleFormat, SizedSample, I24, I48, U24, U48}; use std::convert::TryInto; use std::ops::{Div, Mul}; use std::time::Duration; #[cfg(target_os = "emscripten")] use wasm_bindgen::prelude::*; mod error; mod host; pub mod platform; mod samples_formats; pub mod traits; /// A host's device iterator yielding only *input* devices. pub type InputDevices = std::iter::Filter::Item) -> bool>; /// A host's device iterator yielding only *output* devices. pub type OutputDevices = std::iter::Filter::Item) -> bool>; /// Number of channels. pub type ChannelCount = u16; /// The number of samples processed per second for a single channel of audio. #[cfg_attr(target_os = "emscripten", wasm_bindgen)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] pub struct SampleRate(pub u32); impl Mul for SampleRate where u32: Mul, { type Output = Self; fn mul(self, rhs: T) -> Self { SampleRate(self.0 * rhs) } } impl Div for SampleRate where u32: Div, { type Output = Self; fn div(self, rhs: T) -> Self { SampleRate(self.0 / rhs) } } /// The desired number of frames for the hardware buffer. pub type FrameCount = u32; /// The buffer size used by the device. /// /// [`Default`] is used when no specific buffer size is set and uses the default /// behavior of the given host. Note, the default buffer size may be surprisingly /// large, leading to latency issues. If low latency is desired, [`Fixed(FrameCount)`] /// should be used in accordance with the [`SupportedBufferSize`] range produced by /// the [`SupportedStreamConfig`] API. /// /// [`Default`]: BufferSize::Default /// [`Fixed(FrameCount)`]: BufferSize::Fixed /// [`SupportedStreamConfig`]: SupportedStreamConfig::buffer_size #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum BufferSize { Default, Fixed(FrameCount), } #[cfg(target_os = "emscripten")] impl wasm_bindgen::describe::WasmDescribe for BufferSize { fn describe() {} } #[cfg(target_os = "emscripten")] impl wasm_bindgen::convert::IntoWasmAbi for BufferSize { type Abi = wasm_bindgen::convert::WasmOption; fn into_abi(self) -> Self::Abi { match self { Self::Default => None, Self::Fixed(fc) => Some(fc), } .into_abi() } } /// The set of parameters used to describe how to open a stream. /// /// The sample format is omitted in favour of using a sample type. #[cfg_attr(target_os = "emscripten", wasm_bindgen)] #[derive(Clone, Debug, Eq, PartialEq)] pub struct StreamConfig { pub channels: ChannelCount, pub sample_rate: SampleRate, pub buffer_size: BufferSize, } /// Describes the minimum and maximum supported buffer size for the device #[derive(Clone, Debug, Eq, PartialEq)] pub enum SupportedBufferSize { Range { min: FrameCount, max: FrameCount, }, /// In the case that the platform provides no way of getting the default /// buffersize before starting a stream. Unknown, } /// Describes a range of supported stream configurations, retrieved via the /// [`Device::supported_input/output_configs`](traits::DeviceTrait#required-methods) method. #[derive(Debug, Clone, PartialEq, Eq)] pub struct SupportedStreamConfigRange { pub(crate) channels: ChannelCount, /// Minimum value for the samples rate of the supported formats. pub(crate) min_sample_rate: SampleRate, /// Maximum value for the samples rate of the supported formats. pub(crate) max_sample_rate: SampleRate, /// Buffersize ranges supported by the device pub(crate) buffer_size: SupportedBufferSize, /// Type of data expected by the device. pub(crate) sample_format: SampleFormat, } /// Describes a single supported stream configuration, retrieved via either a /// [`SupportedStreamConfigRange`] instance or one of the /// [`Device::default_input/output_config`](traits::DeviceTrait#required-methods) methods. #[derive(Debug, Clone, PartialEq, Eq)] pub struct SupportedStreamConfig { channels: ChannelCount, sample_rate: SampleRate, buffer_size: SupportedBufferSize, sample_format: SampleFormat, } /// A buffer of dynamically typed audio data, passed to raw stream callbacks. /// /// Raw input stream callbacks receive `&Data`, while raw output stream callbacks expect `&mut /// Data`. #[cfg_attr(target_os = "emscripten", wasm_bindgen)] #[derive(Debug)] pub struct Data { data: *mut (), len: usize, sample_format: SampleFormat, } /// A monotonic time instance associated with a stream, retrieved from either: /// /// 1. A timestamp provided to the stream's underlying audio data callback or /// 2. The same time source used to generate timestamps for a stream's underlying audio data /// callback. /// /// `StreamInstant` represents a duration since some unspecified origin occurring either before /// or equal to the moment the stream from which it was created begins. /// /// ## Host `StreamInstant` Sources /// /// | Host | Source | /// | ---- | ------ | /// | alsa | `snd_pcm_status_get_htstamp` | /// | coreaudio | `mach_absolute_time` | /// | wasapi | `QueryPerformanceCounter` | /// | asio | `timeGetTime` | /// | emscripten | `AudioContext.getOutputTimestamp` | #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)] pub struct StreamInstant { secs: i64, nanos: u32, } /// A timestamp associated with a call to an input stream's data callback. #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)] pub struct InputStreamTimestamp { /// The instant the stream's data callback was invoked. pub callback: StreamInstant, /// The instant that data was captured from the device. /// /// E.g. The instant data was read from an ADC. pub capture: StreamInstant, } /// A timestamp associated with a call to an output stream's data callback. #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)] pub struct OutputStreamTimestamp { /// The instant the stream's data callback was invoked. pub callback: StreamInstant, /// The predicted instant that data written will be delivered to the device for playback. /// /// E.g. The instant data will be played by a DAC. pub playback: StreamInstant, } /// Information relevant to a single call to the user's input stream data callback. #[derive(Debug, Clone, PartialEq, Eq)] pub struct InputCallbackInfo { timestamp: InputStreamTimestamp, } /// Information relevant to a single call to the user's output stream data callback. #[cfg_attr(target_os = "emscripten", wasm_bindgen)] #[derive(Debug, Clone, PartialEq, Eq)] pub struct OutputCallbackInfo { timestamp: OutputStreamTimestamp, } impl SupportedStreamConfig { pub fn new( channels: ChannelCount, sample_rate: SampleRate, buffer_size: SupportedBufferSize, sample_format: SampleFormat, ) -> Self { Self { channels, sample_rate, buffer_size, sample_format, } } pub fn channels(&self) -> ChannelCount { self.channels } pub fn sample_rate(&self) -> SampleRate { self.sample_rate } pub fn buffer_size(&self) -> &SupportedBufferSize { &self.buffer_size } pub fn sample_format(&self) -> SampleFormat { self.sample_format } pub fn config(&self) -> StreamConfig { StreamConfig { channels: self.channels, sample_rate: self.sample_rate, buffer_size: BufferSize::Default, } } } impl StreamInstant { /// The amount of time elapsed from another instant to this one. /// /// Returns `None` if `earlier` is later than self. pub fn duration_since(&self, earlier: &Self) -> Option { if self < earlier { None } else { (self.as_nanos() - earlier.as_nanos()) .try_into() .ok() .map(Duration::from_nanos) } } /// Returns the instant in time after the given duration has passed. /// /// Returns `None` if the resulting instant would exceed the bounds of the underlying data /// structure. pub fn add(&self, duration: Duration) -> Option { self.as_nanos() .checked_add(duration.as_nanos() as i128) .and_then(Self::from_nanos_i128) } /// Returns the instant in time one `duration` ago. /// /// Returns `None` if the resulting instant would underflow. As a result, it is important to /// consider that on some platforms the [`StreamInstant`] may begin at `0` from the moment the /// source stream is created. pub fn sub(&self, duration: Duration) -> Option { self.as_nanos() .checked_sub(duration.as_nanos() as i128) .and_then(Self::from_nanos_i128) } fn as_nanos(&self) -> i128 { (self.secs as i128 * 1_000_000_000) + self.nanos as i128 } #[allow(dead_code)] fn from_nanos(nanos: i64) -> Self { let secs = nanos / 1_000_000_000; let subsec_nanos = nanos - secs * 1_000_000_000; Self::new(secs as i64, subsec_nanos as u32) } #[allow(dead_code)] fn from_nanos_i128(nanos: i128) -> Option { let secs = nanos / 1_000_000_000; if secs > i64::MAX as i128 || secs < i64::MIN as i128 { None } else { let subsec_nanos = nanos - secs * 1_000_000_000; debug_assert!(subsec_nanos < u32::MAX as i128); Some(Self::new(secs as i64, subsec_nanos as u32)) } } #[allow(dead_code)] fn from_secs_f64(secs: f64) -> crate::StreamInstant { let s = secs.floor() as i64; let ns = ((secs - s as f64) * 1_000_000_000.0) as u32; Self::new(s, ns) } fn new(secs: i64, nanos: u32) -> Self { StreamInstant { secs, nanos } } } impl InputCallbackInfo { /// The timestamp associated with the call to an input stream's data callback. pub fn timestamp(&self) -> InputStreamTimestamp { self.timestamp } } impl OutputCallbackInfo { /// The timestamp associated with the call to an output stream's data callback. pub fn timestamp(&self) -> OutputStreamTimestamp { self.timestamp } } #[allow(clippy::len_without_is_empty)] impl Data { // Internal constructor for host implementations to use. // // The following requirements must be met in order for the safety of `Data`'s public API. // // - The `data` pointer must point to the first sample in the slice containing all samples. // - The `len` must describe the length of the buffer as a number of samples in the expected // format specified via the `sample_format` argument. // - The `sample_format` must correctly represent the underlying sample data delivered/expected // by the stream. pub(crate) unsafe fn from_parts( data: *mut (), len: usize, sample_format: SampleFormat, ) -> Self { Data { data, len, sample_format, } } /// The sample format of the internal audio data. pub fn sample_format(&self) -> SampleFormat { self.sample_format } /// The full length of the buffer in samples. /// /// The returned length is the same length as the slice of type `T` that would be returned via /// [`as_slice`](Self::as_slice) given a sample type that matches the inner sample format. pub fn len(&self) -> usize { self.len } /// The raw slice of memory representing the underlying audio data as a slice of bytes. /// /// It is up to the user to interpret the slice of memory based on [`Data::sample_format`]. pub fn bytes(&self) -> &[u8] { let len = self.len * self.sample_format.sample_size(); // The safety of this block relies on correct construction of the `Data` instance. // See the unsafe `from_parts` constructor for these requirements. unsafe { std::slice::from_raw_parts(self.data as *const u8, len) } } /// The raw slice of memory representing the underlying audio data as a slice of bytes. /// /// It is up to the user to interpret the slice of memory based on [`Data::sample_format`]. pub fn bytes_mut(&mut self) -> &mut [u8] { let len = self.len * self.sample_format.sample_size(); // The safety of this block relies on correct construction of the `Data` instance. See // the unsafe `from_parts` constructor for these requirements. unsafe { std::slice::from_raw_parts_mut(self.data as *mut u8, len) } } /// Access the data as a slice of sample type `T`. /// /// Returns `None` if the sample type does not match the expected sample format. pub fn as_slice(&self) -> Option<&[T]> where T: SizedSample, { if T::FORMAT == self.sample_format { // The safety of this block relies on correct construction of the `Data` instance. See // the unsafe `from_parts` constructor for these requirements. unsafe { Some(std::slice::from_raw_parts(self.data as *const T, self.len)) } } else { None } } /// Access the data as a slice of sample type `T`. /// /// Returns `None` if the sample type does not match the expected sample format. pub fn as_slice_mut(&mut self) -> Option<&mut [T]> where T: SizedSample, { if T::FORMAT == self.sample_format { // The safety of this block relies on correct construction of the `Data` instance. See // the unsafe `from_parts` constructor for these requirements. unsafe { Some(std::slice::from_raw_parts_mut( self.data as *mut T, self.len, )) } } else { None } } } impl SupportedStreamConfigRange { pub fn new( channels: ChannelCount, min_sample_rate: SampleRate, max_sample_rate: SampleRate, buffer_size: SupportedBufferSize, sample_format: SampleFormat, ) -> Self { Self { channels, min_sample_rate, max_sample_rate, buffer_size, sample_format, } } pub fn channels(&self) -> ChannelCount { self.channels } pub fn min_sample_rate(&self) -> SampleRate { self.min_sample_rate } pub fn max_sample_rate(&self) -> SampleRate { self.max_sample_rate } pub fn buffer_size(&self) -> &SupportedBufferSize { &self.buffer_size } pub fn sample_format(&self) -> SampleFormat { self.sample_format } /// Retrieve a [`SupportedStreamConfig`] with the given sample rate and buffer size. /// /// # Panics /// /// Panics if the given `sample_rate` is outside the range specified within this /// [`SupportedStreamConfigRange`] instance. pub fn with_sample_rate(self, sample_rate: SampleRate) -> SupportedStreamConfig { assert!(self.min_sample_rate <= sample_rate && sample_rate <= self.max_sample_rate); SupportedStreamConfig { channels: self.channels, sample_rate, sample_format: self.sample_format, buffer_size: self.buffer_size, } } /// Turns this [`SupportedStreamConfigRange`] into a [`SupportedStreamConfig`] corresponding to the maximum samples rate. #[inline] pub fn with_max_sample_rate(self) -> SupportedStreamConfig { SupportedStreamConfig { channels: self.channels, sample_rate: self.max_sample_rate, sample_format: self.sample_format, buffer_size: self.buffer_size, } } /// A comparison function which compares two [`SupportedStreamConfigRange`]s in terms of their priority of /// use as a default stream format. /// /// Some backends do not provide a default stream format for their audio devices. In these /// cases, CPAL attempts to decide on a reasonable default format for the user. To do this we /// use the "greatest" of all supported stream formats when compared with this method. /// /// SupportedStreamConfigs are prioritised by the following heuristics: /// /// **Channels**: /// /// - Stereo /// - Mono /// - Max available channels /// /// **Sample format**: /// - f32 /// - i16 /// - u16 /// /// **Sample rate**: /// /// - 44100 (cd quality) /// - Max sample rate pub fn cmp_default_heuristics(&self, other: &Self) -> std::cmp::Ordering { use std::cmp::Ordering::Equal; use SampleFormat::{F32, I16, U16}; let cmp_stereo = (self.channels == 2).cmp(&(other.channels == 2)); if cmp_stereo != Equal { return cmp_stereo; } let cmp_mono = (self.channels == 1).cmp(&(other.channels == 1)); if cmp_mono != Equal { return cmp_mono; } let cmp_channels = self.channels.cmp(&other.channels); if cmp_channels != Equal { return cmp_channels; } let cmp_f32 = (self.sample_format == F32).cmp(&(other.sample_format == F32)); if cmp_f32 != Equal { return cmp_f32; } let cmp_i16 = (self.sample_format == I16).cmp(&(other.sample_format == I16)); if cmp_i16 != Equal { return cmp_i16; } let cmp_u16 = (self.sample_format == U16).cmp(&(other.sample_format == U16)); if cmp_u16 != Equal { return cmp_u16; } const HZ_44100: SampleRate = SampleRate(44_100); let r44100_in_self = self.min_sample_rate <= HZ_44100 && HZ_44100 <= self.max_sample_rate; let r44100_in_other = other.min_sample_rate <= HZ_44100 && HZ_44100 <= other.max_sample_rate; let cmp_r44100 = r44100_in_self.cmp(&r44100_in_other); if cmp_r44100 != Equal { return cmp_r44100; } self.max_sample_rate.cmp(&other.max_sample_rate) } } #[test] fn test_cmp_default_heuristics() { let mut formats = vec![ SupportedStreamConfigRange { buffer_size: SupportedBufferSize::Range { min: 256, max: 512 }, channels: 2, min_sample_rate: SampleRate(1), max_sample_rate: SampleRate(96000), sample_format: SampleFormat::F32, }, SupportedStreamConfigRange { buffer_size: SupportedBufferSize::Range { min: 256, max: 512 }, channels: 1, min_sample_rate: SampleRate(1), max_sample_rate: SampleRate(96000), sample_format: SampleFormat::F32, }, SupportedStreamConfigRange { buffer_size: SupportedBufferSize::Range { min: 256, max: 512 }, channels: 2, min_sample_rate: SampleRate(1), max_sample_rate: SampleRate(96000), sample_format: SampleFormat::I16, }, SupportedStreamConfigRange { buffer_size: SupportedBufferSize::Range { min: 256, max: 512 }, channels: 2, min_sample_rate: SampleRate(1), max_sample_rate: SampleRate(96000), sample_format: SampleFormat::U16, }, SupportedStreamConfigRange { buffer_size: SupportedBufferSize::Range { min: 256, max: 512 }, channels: 2, min_sample_rate: SampleRate(1), max_sample_rate: SampleRate(22050), sample_format: SampleFormat::F32, }, ]; formats.sort_by(|a, b| a.cmp_default_heuristics(b)); // lowest-priority first: assert_eq!(formats[0].sample_format(), SampleFormat::F32); assert_eq!(formats[0].min_sample_rate(), SampleRate(1)); assert_eq!(formats[0].max_sample_rate(), SampleRate(96000)); assert_eq!(formats[0].channels(), 1); assert_eq!(formats[1].sample_format(), SampleFormat::U16); assert_eq!(formats[1].min_sample_rate(), SampleRate(1)); assert_eq!(formats[1].max_sample_rate(), SampleRate(96000)); assert_eq!(formats[1].channels(), 2); assert_eq!(formats[2].sample_format(), SampleFormat::I16); assert_eq!(formats[2].min_sample_rate(), SampleRate(1)); assert_eq!(formats[2].max_sample_rate(), SampleRate(96000)); assert_eq!(formats[2].channels(), 2); assert_eq!(formats[3].sample_format(), SampleFormat::F32); assert_eq!(formats[3].min_sample_rate(), SampleRate(1)); assert_eq!(formats[3].max_sample_rate(), SampleRate(22050)); assert_eq!(formats[3].channels(), 2); assert_eq!(formats[4].sample_format(), SampleFormat::F32); assert_eq!(formats[4].min_sample_rate(), SampleRate(1)); assert_eq!(formats[4].max_sample_rate(), SampleRate(96000)); assert_eq!(formats[4].channels(), 2); } impl From for StreamConfig { fn from(conf: SupportedStreamConfig) -> Self { conf.config() } } // If a backend does not provide an API for retrieving supported formats, we query it with a bunch // of commonly used rates. This is always the case for wasapi and is sometimes the case for alsa. // // If a rate you desire is missing from this list, feel free to add it! #[cfg(target_os = "windows")] const COMMON_SAMPLE_RATES: &[SampleRate] = &[ SampleRate(5512), SampleRate(8000), SampleRate(11025), SampleRate(16000), SampleRate(22050), SampleRate(32000), SampleRate(44100), SampleRate(48000), SampleRate(64000), SampleRate(88200), SampleRate(96000), SampleRate(176400), SampleRate(192000), ]; #[test] fn test_stream_instant() { let a = StreamInstant::new(2, 0); let b = StreamInstant::new(-2, 0); let min = StreamInstant::new(i64::MIN, 0); let max = StreamInstant::new(i64::MAX, 0); assert_eq!( a.sub(Duration::from_secs(1)), Some(StreamInstant::new(1, 0)) ); assert_eq!( a.sub(Duration::from_secs(2)), Some(StreamInstant::new(0, 0)) ); assert_eq!( a.sub(Duration::from_secs(3)), Some(StreamInstant::new(-1, 0)) ); assert_eq!(min.sub(Duration::from_secs(1)), None); assert_eq!( b.add(Duration::from_secs(1)), Some(StreamInstant::new(-1, 0)) ); assert_eq!( b.add(Duration::from_secs(2)), Some(StreamInstant::new(0, 0)) ); assert_eq!( b.add(Duration::from_secs(3)), Some(StreamInstant::new(1, 0)) ); assert_eq!(max.add(Duration::from_secs(1)), None); } cpal-0.15.2/src/platform/mod.rs000064400000000000000000000625371046102023000144050ustar 00000000000000//! Platform-specific items. //! //! This module also contains the implementation of the platform's dynamically dispatched [`Host`] //! type and its associated [`Device`], [`Stream`] and other associated types. These //! types are useful in the case that users require switching between audio host APIs at runtime. #[doc(inline)] pub use self::platform_impl::*; /// A macro to assist with implementing a platform's dynamically dispatched [`Host`] type. /// /// These dynamically dispatched types are necessary to allow for users to switch between hosts at /// runtime. /// /// For example the invocation `impl_platform_host(Wasapi wasapi "WASAPI", Asio asio "ASIO")`, /// this macro should expand to: /// // This sample code block is marked as text because it's not a valid test, // it's just illustrative. (see rust issue #96573) /// ```text /// pub enum HostId { /// Wasapi, /// Asio, /// } /// /// pub enum Host { /// Wasapi(crate::host::wasapi::Host), /// Asio(crate::host::asio::Host), /// } /// ``` /// /// And so on for Device, Devices, Host, Stream, SupportedInputConfigs, /// SupportedOutputConfigs and all their necessary trait implementations. /// macro_rules! impl_platform_host { ($($(#[cfg($feat: meta)])? $HostVariant:ident $host_mod:ident $host_name:literal),*) => { /// All hosts supported by CPAL on this platform. pub const ALL_HOSTS: &'static [HostId] = &[ $( $(#[cfg($feat)])? HostId::$HostVariant, )* ]; /// The platform's dynamically dispatched `Host` type. /// /// An instance of this `Host` type may represent one of the `Host`s available /// on the platform. /// /// Use this type if you require switching between available hosts at runtime. /// /// This type may be constructed via the [`host_from_id`] function. [`HostId`]s may /// be acquired via the [`ALL_HOSTS`] const, and the [`available_hosts`] function. pub struct Host(HostInner); /// The `Device` implementation associated with the platform's dynamically dispatched /// [`Host`] type. pub struct Device(DeviceInner); /// The `Devices` iterator associated with the platform's dynamically dispatched [`Host`] /// type. pub struct Devices(DevicesInner); /// The `Stream` implementation associated with the platform's dynamically dispatched /// [`Host`] type. // Streams cannot be `Send` or `Sync` if we plan to support Android's AAudio API. This is // because the stream API is not thread-safe, and the API prohibits calling certain // functions within the callback. // // TODO: Confirm this and add more specific detail and references. pub struct Stream(StreamInner, crate::platform::NotSendSyncAcrossAllPlatforms); /// The `SupportedInputConfigs` iterator associated with the platform's dynamically /// dispatched [`Host`] type. pub struct SupportedInputConfigs(SupportedInputConfigsInner); /// The `SupportedOutputConfigs` iterator associated with the platform's dynamically /// dispatched [`Host`] type. pub struct SupportedOutputConfigs(SupportedOutputConfigsInner); /// Unique identifier for available hosts on the platform. #[derive(Copy, Clone, Debug, Eq, Hash, PartialEq)] pub enum HostId { $( $(#[cfg($feat)])? $HostVariant, )* } /// Contains a platform specific [`Device`] implementation. pub enum DeviceInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::Device), )* } /// Contains a platform specific [`Devices`] implementation. pub enum DevicesInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::Devices), )* } /// Contains a platform specific [`Host`] implementation. pub enum HostInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::Host), )* } /// Contains a platform specific [`Stream`] implementation. pub enum StreamInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::Stream), )* } enum SupportedInputConfigsInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::SupportedInputConfigs), )* } enum SupportedOutputConfigsInner { $( $(#[cfg($feat)])? $HostVariant(crate::host::$host_mod::SupportedOutputConfigs), )* } impl HostId { pub fn name(&self) -> &'static str { match self { $( $(#[cfg($feat)])? HostId::$HostVariant => $host_name, )* } } } impl Devices { /// Returns a reference to the underlying platform specific implementation of this /// `Devices`. pub fn as_inner(&self) -> &DevicesInner { &self.0 } /// Returns a mutable reference to the underlying platform specific implementation of /// this `Devices`. pub fn as_inner_mut(&mut self) -> &mut DevicesInner { &mut self.0 } /// Returns the underlying platform specific implementation of this `Devices`. pub fn into_inner(self) -> DevicesInner { self.0 } } impl Device { /// Returns a reference to the underlying platform specific implementation of this /// `Device`. pub fn as_inner(&self) -> &DeviceInner { &self.0 } /// Returns a mutable reference to the underlying platform specific implementation of /// this `Device`. pub fn as_inner_mut(&mut self) -> &mut DeviceInner { &mut self.0 } /// Returns the underlying platform specific implementation of this `Device`. pub fn into_inner(self) -> DeviceInner { self.0 } } impl Host { /// The unique identifier associated with this `Host`. pub fn id(&self) -> HostId { match self.0 { $( $(#[cfg($feat)])? HostInner::$HostVariant(_) => HostId::$HostVariant, )* } } /// Returns a reference to the underlying platform specific implementation of this /// `Host`. pub fn as_inner(&self) -> &HostInner { &self.0 } /// Returns a mutable reference to the underlying platform specific implementation of /// this `Host`. pub fn as_inner_mut(&mut self) -> &mut HostInner { &mut self.0 } /// Returns the underlying platform specific implementation of this `Host`. pub fn into_inner(self) -> HostInner { self.0 } } impl Stream { /// Returns a reference to the underlying platform specific implementation of this /// `Stream`. pub fn as_inner(&self) -> &StreamInner { &self.0 } /// Returns a mutable reference to the underlying platform specific implementation of /// this `Stream`. pub fn as_inner_mut(&mut self) -> &mut StreamInner { &mut self.0 } /// Returns the underlying platform specific implementation of this `Stream`. pub fn into_inner(self) -> StreamInner { self.0 } } impl Iterator for Devices { type Item = Device; fn next(&mut self) -> Option { match self.0 { $( $(#[cfg($feat)])? DevicesInner::$HostVariant(ref mut d) => { d.next().map(DeviceInner::$HostVariant).map(Device::from) } )* } } fn size_hint(&self) -> (usize, Option) { match self.0 { $( $(#[cfg($feat)])? DevicesInner::$HostVariant(ref d) => d.size_hint(), )* } } } impl Iterator for SupportedInputConfigs { type Item = crate::SupportedStreamConfigRange; fn next(&mut self) -> Option { match self.0 { $( $(#[cfg($feat)])? SupportedInputConfigsInner::$HostVariant(ref mut s) => s.next(), )* } } fn size_hint(&self) -> (usize, Option) { match self.0 { $( $(#[cfg($feat)])? SupportedInputConfigsInner::$HostVariant(ref d) => d.size_hint(), )* } } } impl Iterator for SupportedOutputConfigs { type Item = crate::SupportedStreamConfigRange; fn next(&mut self) -> Option { match self.0 { $( $(#[cfg($feat)])? SupportedOutputConfigsInner::$HostVariant(ref mut s) => s.next(), )* } } fn size_hint(&self) -> (usize, Option) { match self.0 { $( $(#[cfg($feat)])? SupportedOutputConfigsInner::$HostVariant(ref d) => d.size_hint(), )* } } } impl crate::traits::DeviceTrait for Device { type SupportedInputConfigs = SupportedInputConfigs; type SupportedOutputConfigs = SupportedOutputConfigs; type Stream = Stream; fn name(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => d.name(), )* } } fn supported_input_configs(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => { d.supported_input_configs() .map(SupportedInputConfigsInner::$HostVariant) .map(SupportedInputConfigs) } )* } } fn supported_output_configs(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => { d.supported_output_configs() .map(SupportedOutputConfigsInner::$HostVariant) .map(SupportedOutputConfigs) } )* } } fn default_input_config(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => d.default_input_config(), )* } } fn default_output_config(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => d.default_output_config(), )* } } fn build_input_stream_raw( &self, config: &crate::StreamConfig, sample_format: crate::SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&crate::Data, &crate::InputCallbackInfo) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static, { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => d .build_input_stream_raw( config, sample_format, data_callback, error_callback, timeout, ) .map(StreamInner::$HostVariant) .map(Stream::from), )* } } fn build_output_stream_raw( &self, config: &crate::StreamConfig, sample_format: crate::SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&mut crate::Data, &crate::OutputCallbackInfo) + Send + 'static, E: FnMut(crate::StreamError) + Send + 'static, { match self.0 { $( $(#[cfg($feat)])? DeviceInner::$HostVariant(ref d) => d .build_output_stream_raw( config, sample_format, data_callback, error_callback, timeout, ) .map(StreamInner::$HostVariant) .map(Stream::from), )* } } } impl crate::traits::HostTrait for Host { type Devices = Devices; type Device = Device; fn is_available() -> bool { $( $(#[cfg($feat)])? if crate::host::$host_mod::Host::is_available() { return true; } )* false } fn devices(&self) -> Result { match self.0 { $( $(#[cfg($feat)])? HostInner::$HostVariant(ref h) => { h.devices().map(DevicesInner::$HostVariant).map(Devices::from) } )* } } fn default_input_device(&self) -> Option { match self.0 { $( $(#[cfg($feat)])? HostInner::$HostVariant(ref h) => { h.default_input_device().map(DeviceInner::$HostVariant).map(Device::from) } )* } } fn default_output_device(&self) -> Option { match self.0 { $( $(#[cfg($feat)])? HostInner::$HostVariant(ref h) => { h.default_output_device().map(DeviceInner::$HostVariant).map(Device::from) } )* } } } impl crate::traits::StreamTrait for Stream { fn play(&self) -> Result<(), crate::PlayStreamError> { match self.0 { $( $(#[cfg($feat)])? StreamInner::$HostVariant(ref s) => { s.play() } )* } } fn pause(&self) -> Result<(), crate::PauseStreamError> { match self.0 { $( $(#[cfg($feat)])? StreamInner::$HostVariant(ref s) => { s.pause() } )* } } } impl From for Device { fn from(d: DeviceInner) -> Self { Device(d) } } impl From for Devices { fn from(d: DevicesInner) -> Self { Devices(d) } } impl From for Host { fn from(h: HostInner) -> Self { Host(h) } } impl From for Stream { fn from(s: StreamInner) -> Self { Stream(s, Default::default()) } } $( $(#[cfg($feat)])? impl From for Device { fn from(h: crate::host::$host_mod::Device) -> Self { DeviceInner::$HostVariant(h).into() } } $(#[cfg($feat)])? impl From for Devices { fn from(h: crate::host::$host_mod::Devices) -> Self { DevicesInner::$HostVariant(h).into() } } $(#[cfg($feat)])? impl From for Host { fn from(h: crate::host::$host_mod::Host) -> Self { HostInner::$HostVariant(h).into() } } $(#[cfg($feat)])? impl From for Stream { fn from(h: crate::host::$host_mod::Stream) -> Self { StreamInner::$HostVariant(h).into() } } )* /// Produces a list of hosts that are currently available on the system. pub fn available_hosts() -> Vec { let mut host_ids = vec![]; $( $(#[cfg($feat)])? if ::is_available() { host_ids.push(HostId::$HostVariant); } )* host_ids } /// Given a unique host identifier, initialise and produce the host if it is available. pub fn host_from_id(id: HostId) -> Result { match id { $( $(#[cfg($feat)])? HostId::$HostVariant => { crate::host::$host_mod::Host::new() .map(HostInner::$HostVariant) .map(Host::from) } )* } } }; } // TODO: Add pulseaudio and jack here eventually. #[cfg(any( target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd" ))] mod platform_impl { pub use crate::host::alsa::{ Device as AlsaDevice, Devices as AlsaDevices, Host as AlsaHost, Stream as AlsaStream, SupportedInputConfigs as AlsaSupportedInputConfigs, SupportedOutputConfigs as AlsaSupportedOutputConfigs, }; #[cfg(feature = "jack")] pub use crate::host::jack::{ Device as JackDevice, Devices as JackDevices, Host as JackHost, Stream as JackStream, SupportedInputConfigs as JackSupportedInputConfigs, SupportedOutputConfigs as JackSupportedOutputConfigs, }; impl_platform_host!(#[cfg(feature = "jack")] Jack jack "JACK", Alsa alsa "ALSA"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { AlsaHost::new() .expect("the default host should always be available") .into() } } #[cfg(any(target_os = "macos", target_os = "ios"))] mod platform_impl { pub use crate::host::coreaudio::{ Device as CoreAudioDevice, Devices as CoreAudioDevices, Host as CoreAudioHost, Stream as CoreAudioStream, SupportedInputConfigs as CoreAudioSupportedInputConfigs, SupportedOutputConfigs as CoreAudioSupportedOutputConfigs, }; impl_platform_host!(CoreAudio coreaudio "CoreAudio"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { CoreAudioHost::new() .expect("the default host should always be available") .into() } } #[cfg(target_os = "emscripten")] mod platform_impl { pub use crate::host::emscripten::{ Device as EmscriptenDevice, Devices as EmscriptenDevices, Host as EmscriptenHost, Stream as EmscriptenStream, SupportedInputConfigs as EmscriptenSupportedInputConfigs, SupportedOutputConfigs as EmscriptenSupportedOutputConfigs, }; impl_platform_host!(Emscripten emscripten "Emscripten"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { EmscriptenHost::new() .expect("the default host should always be available") .into() } } #[cfg(all(target_arch = "wasm32", feature = "wasm-bindgen"))] mod platform_impl { pub use crate::host::webaudio::{ Device as WebAudioDevice, Devices as WebAudioDevices, Host as WebAudioHost, Stream as WebAudioStream, SupportedInputConfigs as WebAudioSupportedInputConfigs, SupportedOutputConfigs as WebAudioSupportedOutputConfigs, }; impl_platform_host!(WebAudio webaudio "WebAudio"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { WebAudioHost::new() .expect("the default host should always be available") .into() } } #[cfg(windows)] mod platform_impl { #[cfg(feature = "asio")] pub use crate::host::asio::{ Device as AsioDevice, Devices as AsioDevices, Host as AsioHost, Stream as AsioStream, SupportedInputConfigs as AsioSupportedInputConfigs, SupportedOutputConfigs as AsioSupportedOutputConfigs, }; pub use crate::host::wasapi::{ Device as WasapiDevice, Devices as WasapiDevices, Host as WasapiHost, Stream as WasapiStream, SupportedInputConfigs as WasapiSupportedInputConfigs, SupportedOutputConfigs as WasapiSupportedOutputConfigs, }; impl_platform_host!(#[cfg(feature = "asio")] Asio asio "ASIO", Wasapi wasapi "WASAPI"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { WasapiHost::new() .expect("the default host should always be available") .into() } } #[cfg(target_os = "android")] mod platform_impl { pub use crate::host::oboe::{ Device as OboeDevice, Devices as OboeDevices, Host as OboeHost, Stream as OboeStream, SupportedInputConfigs as OboeSupportedInputConfigs, SupportedOutputConfigs as OboeSupportedOutputConfigs, }; impl_platform_host!(Oboe oboe "Oboe"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { OboeHost::new() .expect("the default host should always be available") .into() } } #[cfg(not(any( windows, target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "netbsd", target_os = "macos", target_os = "ios", target_os = "emscripten", target_os = "android", all(target_arch = "wasm32", feature = "wasm-bindgen"), )))] mod platform_impl { pub use crate::host::null::{ Device as NullDevice, Devices as NullDevices, Host as NullHost, SupportedInputConfigs as NullSupportedInputConfigs, SupportedOutputConfigs as NullSupportedOutputConfigs, }; impl_platform_host!(Null null "Null"); /// The default host for the current compilation target platform. pub fn default_host() -> Host { NullHost::new() .expect("the default host should always be available") .into() } } // The following zero-sized types are for applying Send/Sync restrictions to ensure // consistent behaviour across different platforms. These verbosely named types are used // (rather than using the markers directly) in the hope of making the compile errors // slightly more helpful. // // TODO: Remove these in favour of using negative trait bounds if they stabilise. // A marker used to remove the `Send` and `Sync` traits. struct NotSendSyncAcrossAllPlatforms(std::marker::PhantomData<*mut ()>); impl Default for NotSendSyncAcrossAllPlatforms { fn default() -> Self { NotSendSyncAcrossAllPlatforms(std::marker::PhantomData) } } cpal-0.15.2/src/samples_formats.rs000064400000000000000000000121121046102023000151610ustar 00000000000000use std::{fmt::Display, mem}; #[cfg(target_os = "emscripten")] use wasm_bindgen::prelude::*; pub use dasp_sample::{FromSample, Sample, I24, I48, U24, U48}; /// Format that each sample has. #[cfg_attr(target_os = "emscripten", wasm_bindgen)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] #[non_exhaustive] pub enum SampleFormat { /// `i8` with a valid range of 'u8::MIN..=u8::MAX' with `0` being the origin I8, /// `i16` with a valid range of 'u16::MIN..=u16::MAX' with `0` being the origin I16, // /// `I24` with a valid range of '-(1 << 23)..(1 << 23)' with `0` being the origin // I24, /// `i32` with a valid range of 'u32::MIN..=u32::MAX' with `0` being the origin I32, // /// `I24` with a valid range of '-(1 << 47)..(1 << 47)' with `0` being the origin // I48, /// `i64` with a valid range of 'u64::MIN..=u64::MAX' with `0` being the origin I64, /// `u8` with a valid range of 'u8::MIN..=u8::MAX' with `1 << 7 == 128` being the origin U8, /// `u16` with a valid range of 'u16::MIN..=u16::MAX' with `1 << 15 == 32768` being the origin U16, // /// `U24` with a valid range of '0..16777216' with `1 << 23 == 8388608` being the origin // U24, /// `u32` with a valid range of 'u32::MIN..=u32::MAX' with `1 << 31` being the origin U32, // /// `U48` with a valid range of '0..(1 << 48)' with `1 << 47` being the origin // U48, /// `u64` with a valid range of 'u64::MIN..=u64::MAX' with `1 << 63` being the origin U64, /// `f32` with a valid range of `-1.0..1.0` with `0.0` being the origin F32, /// `f64` with a valid range of -1.0..1.0 with 0.0 being the origin F64, } impl SampleFormat { /// Returns the size in bytes of a sample of this format. #[inline] #[must_use] pub fn sample_size(&self) -> usize { match *self { SampleFormat::I8 | SampleFormat::U8 => mem::size_of::(), SampleFormat::I16 | SampleFormat::U16 => mem::size_of::(), // SampleFormat::I24 | SampleFormat::U24 => 3, SampleFormat::I32 | SampleFormat::U32 => mem::size_of::(), // SampleFormat::I48 | SampleFormat::U48 => 6, SampleFormat::I64 | SampleFormat::U64 => mem::size_of::(), SampleFormat::F32 => mem::size_of::(), SampleFormat::F64 => mem::size_of::(), } } #[inline] #[must_use] pub fn is_int(&self) -> bool { //matches!(*self, SampleFormat::I8 | SampleFormat::I16 | SampleFormat::I24 | SampleFormat::I32 | SampleFormat::I48 | SampleFormat::I64) matches!( *self, SampleFormat::I8 | SampleFormat::I16 | SampleFormat::I32 | SampleFormat::I64 ) } #[inline] #[must_use] pub fn is_uint(&self) -> bool { //matches!(*self, SampleFormat::U8 | SampleFormat::U16 | SampleFormat::U24 | SampleFormat::U32 | SampleFormat::U48 | SampleFormat::U64) matches!( *self, SampleFormat::U8 | SampleFormat::U16 | SampleFormat::U32 | SampleFormat::U64 ) } #[inline] #[must_use] pub fn is_float(&self) -> bool { matches!(*self, SampleFormat::F32 | SampleFormat::F64) } } impl Display for SampleFormat { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match *self { SampleFormat::I8 => "i8", SampleFormat::I16 => "i16", // SampleFormat::I24 => "i24", SampleFormat::I32 => "i32", // SampleFormat::I48 => "i48", SampleFormat::I64 => "i64", SampleFormat::U8 => "u8", SampleFormat::U16 => "u16", // SampleFormat::U24 => "u24", SampleFormat::U32 => "u32", // SampleFormat::U48 => "u48", SampleFormat::U64 => "u64", SampleFormat::F32 => "f32", SampleFormat::F64 => "f64", } .fmt(f) } } pub trait SizedSample: Sample { const FORMAT: SampleFormat; } impl SizedSample for i8 { const FORMAT: SampleFormat = SampleFormat::I8; } impl SizedSample for i16 { const FORMAT: SampleFormat = SampleFormat::I16; } // impl SizedSample for I24 { const FORMAT: SampleFormat = SampleFormat::I24; } impl SizedSample for i32 { const FORMAT: SampleFormat = SampleFormat::I32; } // impl SizedSample for I48 { const FORMAT: SampleFormat = SampleFormat::I48; } impl SizedSample for i64 { const FORMAT: SampleFormat = SampleFormat::I64; } impl SizedSample for u8 { const FORMAT: SampleFormat = SampleFormat::U8; } impl SizedSample for u16 { const FORMAT: SampleFormat = SampleFormat::U16; } // impl SizedSample for U24 { const FORMAT: SampleFormat = SampleFormat::U24; } impl SizedSample for u32 { const FORMAT: SampleFormat = SampleFormat::U32; } // impl SizedSample for U48 { const FORMAT: SampleFormat = SampleFormat::U48; } impl SizedSample for u64 { const FORMAT: SampleFormat = SampleFormat::U64; } impl SizedSample for f32 { const FORMAT: SampleFormat = SampleFormat::F32; } impl SizedSample for f64 { const FORMAT: SampleFormat = SampleFormat::F64; } cpal-0.15.2/src/traits.rs000064400000000000000000000213531046102023000132770ustar 00000000000000//! The suite of traits allowing CPAL to abstract over hosts, devices, event loops and stream IDs. use std::time::Duration; use crate::{ BuildStreamError, Data, DefaultStreamConfigError, DeviceNameError, DevicesError, InputCallbackInfo, InputDevices, OutputCallbackInfo, OutputDevices, PauseStreamError, PlayStreamError, SampleFormat, SizedSample, StreamConfig, StreamError, SupportedStreamConfig, SupportedStreamConfigRange, SupportedStreamConfigsError, }; /// A [`Host`] provides access to the available audio devices on the system. /// /// Each platform may have a number of available hosts depending on the system, each with their own /// pros and cons. /// /// For example, WASAPI is the standard audio host API that ships with the Windows operating /// system. However, due to historical limitations with respect to performance and flexibility, /// Steinberg created the ASIO API providing better audio device support for pro audio and /// low-latency applications. As a result, it is common for some devices and device capabilities to /// only be available via ASIO, while others are only available via WASAPI. /// /// Another great example is the Linux platform. While the ALSA host API is the lowest-level API /// available to almost all distributions of Linux, its flexibility is limited as it requires that /// each process have exclusive access to the devices with which they establish streams. PulseAudio /// is another popular host API that aims to solve this issue by providing user-space mixing, /// however it has its own limitations w.r.t. low-latency and high-performance audio applications. /// JACK is yet another host API that is more suitable to pro-audio applications, however it is /// less readily available by default in many Linux distributions and is known to be tricky to /// set up. /// /// [`Host`]: crate::Host pub trait HostTrait { /// The type used for enumerating available devices by the host. type Devices: Iterator; /// The `Device` type yielded by the host. type Device: DeviceTrait; /// Whether or not the host is available on the system. fn is_available() -> bool; /// An iterator yielding all [`Device`](DeviceTrait)s currently available to the host on the system. /// /// Can be empty if the system does not support audio in general. fn devices(&self) -> Result; /// The default input audio device on the system. /// /// Returns `None` if no input device is available. fn default_input_device(&self) -> Option; /// The default output audio device on the system. /// /// Returns `None` if no output device is available. fn default_output_device(&self) -> Option; /// An iterator yielding all `Device`s currently available to the system that support one or more /// input stream formats. /// /// Can be empty if the system does not support audio input. fn input_devices(&self) -> Result, DevicesError> { fn supports_input(device: &D) -> bool { device .supported_input_configs() .map(|mut iter| iter.next().is_some()) .unwrap_or(false) } Ok(self.devices()?.filter(supports_input::)) } /// An iterator yielding all `Device`s currently available to the system that support one or more /// output stream formats. /// /// Can be empty if the system does not support audio output. fn output_devices(&self) -> Result, DevicesError> { fn supports_output(device: &D) -> bool { device .supported_output_configs() .map(|mut iter| iter.next().is_some()) .unwrap_or(false) } Ok(self.devices()?.filter(supports_output::)) } } /// A device that is capable of audio input and/or output. /// /// Please note that `Device`s may become invalid if they get disconnected. Therefore, all the /// methods that involve a device return a `Result` allowing the user to handle this case. pub trait DeviceTrait { /// The iterator type yielding supported input stream formats. type SupportedInputConfigs: Iterator; /// The iterator type yielding supported output stream formats. type SupportedOutputConfigs: Iterator; /// The stream type created by [`build_input_stream_raw`] and [`build_output_stream_raw`]. /// /// [`build_input_stream_raw`]: Self::build_input_stream_raw /// [`build_output_stream_raw`]: Self::build_output_stream_raw type Stream: StreamTrait; /// The human-readable name of the device. fn name(&self) -> Result; /// An iterator yielding formats that are supported by the backend. /// /// Can return an error if the device is no longer valid (e.g. it has been disconnected). fn supported_input_configs( &self, ) -> Result; /// An iterator yielding output stream formats that are supported by the device. /// /// Can return an error if the device is no longer valid (e.g. it has been disconnected). fn supported_output_configs( &self, ) -> Result; /// The default input stream format for the device. fn default_input_config(&self) -> Result; /// The default output stream format for the device. fn default_output_config(&self) -> Result; /// Create an input stream. fn build_input_stream( &self, config: &StreamConfig, mut data_callback: D, error_callback: E, timeout: Option, ) -> Result where T: SizedSample, D: FnMut(&[T], &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { self.build_input_stream_raw( config, T::FORMAT, move |data, info| { data_callback( data.as_slice() .expect("host supplied incorrect sample type"), info, ) }, error_callback, timeout, ) } /// Create an output stream. fn build_output_stream( &self, config: &StreamConfig, mut data_callback: D, error_callback: E, timeout: Option, ) -> Result where T: SizedSample, D: FnMut(&mut [T], &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static, { self.build_output_stream_raw( config, T::FORMAT, move |data, info| { data_callback( data.as_slice_mut() .expect("host supplied incorrect sample type"), info, ) }, error_callback, timeout, ) } /// Create a dynamically typed input stream. fn build_input_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&Data, &InputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static; /// Create a dynamically typed output stream. fn build_output_stream_raw( &self, config: &StreamConfig, sample_format: SampleFormat, data_callback: D, error_callback: E, timeout: Option, ) -> Result where D: FnMut(&mut Data, &OutputCallbackInfo) + Send + 'static, E: FnMut(StreamError) + Send + 'static; } /// A stream created from [`Device`](DeviceTrait), with methods to control playback. pub trait StreamTrait { /// Run the stream. /// /// Note: Not all platforms automatically run the stream upon creation, so it is important to /// call `play` after creation if it is expected that the stream should run immediately. fn play(&self) -> Result<(), PlayStreamError>; /// Some devices support pausing the audio stream. This can be useful for saving energy in /// moments of silence. /// /// Note: Not all devices support suspending the stream at the hardware level. This method may /// fail in these cases. fn pause(&self) -> Result<(), PauseStreamError>; }