1
0
Fork 0
mirror of https://github.com/librespot-org/librespot.git synced 2025-10-04 02:09:26 +02:00

Merge remote-tracking branch 'upstream/dev' into fix-onevent-panic

This commit is contained in:
Roderick van Domburg 2021-04-21 21:07:56 +02:00
commit 78850dd45b
68 changed files with 4385 additions and 5306 deletions

View file

@ -4,61 +4,81 @@ name: test
on: on:
push: push:
branches: [master, dev] branches: [master, dev]
paths: ['**.rs', '**.toml', '**.lock', '**.yml'] paths:
[
"**.rs",
"Cargo.toml",
"/Cargo.lock",
"/rustfmt.toml",
"/.github/workflows",
]
pull_request: pull_request:
branches: [master, dev] paths:
paths: ['**.rs', '**.toml', '**.lock', '**.yml'] [
"**.rs",
"Cargo.toml",
"/Cargo.lock",
"/rustfmt.toml",
"/.github/workflows",
]
schedule:
# Run CI every week
- cron: "00 01 * * 0"
env:
RUST_BACKTRACE: 1
jobs: jobs:
fmt: fmt:
name: 'Rust: format check' name: rustfmt
runs-on: ${{ matrix.os }} runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
# Only run the formatting check for stable
include:
- os: ubuntu-latest
toolchain: stable
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
# Use default profile to get rustfmt
profile: default
toolchain: ${{ matrix.toolchain }}
override: true
- run: cargo fmt --verbose --all -- --check
test:
needs: fmt
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
toolchain:
- 1.41.1 # MSRV (Minimum supported rust version)
- stable
- beta
experimental: [false]
# Ignore failures in nightly, not ideal, but necessary
include:
- os: ubuntu-latest
toolchain: nightly
experimental: true
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt
- run: cargo fmt --all -- --check
test-linux:
needs: fmt
name: cargo +${{ matrix.toolchain }} build (${{ matrix.os }})
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest]
toolchain:
- 1.45 # MSRV (Minimum supported rust version)
- stable
- beta
experimental: [false]
# Ignore failures in nightly
include:
- os: ubuntu-latest
toolchain: nightly
experimental: true
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain - name: Install toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
profile: minimal profile: minimal
toolchain: ${{ matrix.toolchain }} toolchain: ${{ matrix.toolchain }}
override: true override: true
- name: Get Rustc version
id: get-rustc-version
run: echo "::set-output name=version::$(rustc -V)"
shell: bash
- name: Cache Rust dependencies - name: Cache Rust dependencies
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
@ -67,20 +87,65 @@ jobs:
~/.cargo/registry/cache ~/.cargo/registry/cache
~/.cargo/git ~/.cargo/git
target target
key: ${{ runner.os }}-build-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-${{ steps.get-rustc-version.outputs.version }}-${{ hashFiles('**/Cargo.lock') }}
- name: Install developer package dependencies - name: Install developer package dependencies
run: sudo apt-get update && sudo apt-get install libpulse-dev portaudio19-dev libasound2-dev libsdl2-dev gstreamer1.0-dev libgstreamer-plugins-base1.0-dev run: sudo apt-get update && sudo apt-get install libpulse-dev portaudio19-dev libasound2-dev libsdl2-dev gstreamer1.0-dev libgstreamer-plugins-base1.0-dev libavahi-compat-libdnssd-dev
- run: cargo build --locked --no-default-features
- run: cargo build --locked --examples - run: cargo build --workspace --examples
- run: cargo build --locked --no-default-features --features "with-tremor" - run: cargo test --workspace
- run: cargo build --locked --no-default-features --features "with-vorbis"
- run: cargo build --locked --no-default-features --features "alsa-backend" - run: cargo install cargo-hack
- run: cargo build --locked --no-default-features --features "portaudio-backend" - run: cargo hack --workspace --remove-dev-deps
- run: cargo build --locked --no-default-features --features "pulseaudio-backend" - run: cargo build -p librespot-core --no-default-features
- run: cargo build --locked --no-default-features --features "jackaudio-backend" - run: cargo build -p librespot-core
- run: cargo build --locked --no-default-features --features "rodio-backend" - run: cargo hack build --each-feature -p librespot-audio
- run: cargo build --locked --no-default-features --features "sdl-backend" - run: cargo build -p librespot-connect
- run: cargo build --locked --no-default-features --features "gstreamer-backend" - run: cargo build -p librespot-connect --no-default-features --features with-dns-sd
- run: cargo hack build --locked --each-feature
test-windows:
needs: fmt
name: cargo build (${{ matrix.os }})
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [windows-latest]
toolchain: [stable]
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install toolchain
uses: actions-rs/toolchain@v1
with:
toolchain: ${{ matrix.toolchain }}
profile: minimal
override: true
- name: Get Rustc version
id: get-rustc-version
run: echo "::set-output name=version::$(rustc -V)"
shell: bash
- name: Cache Rust dependencies
uses: actions/cache@v2
with:
path: |
~/.cargo/registry/index
~/.cargo/registry/cache
~/.cargo/git
target
key: ${{ runner.os }}-${{ steps.get-rustc-version.outputs.version }}-${{ hashFiles('**/Cargo.lock') }}
- run: cargo build --workspace --examples
- run: cargo test --workspace
- run: cargo install cargo-hack
- run: cargo hack --workspace --remove-dev-deps
- run: cargo build --no-default-features
- run: cargo build
test-cross-arm: test-cross-arm:
needs: fmt needs: fmt
@ -96,6 +161,7 @@ jobs:
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Install toolchain - name: Install toolchain
uses: actions-rs/toolchain@v1 uses: actions-rs/toolchain@v1
with: with:
@ -103,6 +169,12 @@ jobs:
target: ${{ matrix.target }} target: ${{ matrix.target }}
toolchain: ${{ matrix.toolchain }} toolchain: ${{ matrix.toolchain }}
override: true override: true
- name: Get Rustc version
id: get-rustc-version
run: echo "::set-output name=version::$(rustc -V)"
shell: bash
- name: Cache Rust dependencies - name: Cache Rust dependencies
uses: actions/cache@v2 uses: actions/cache@v2
with: with:
@ -111,7 +183,7 @@ jobs:
~/.cargo/registry/cache ~/.cargo/registry/cache
~/.cargo/git ~/.cargo/git
target target
key: ${{ runner.os }}-build-${{ hashFiles('**/Cargo.lock') }} key: ${{ runner.os }}-${{ matrix.target }}-${{ steps.get-rustc-version.outputs.version }}-${{ hashFiles('**/Cargo.lock') }}
- name: Install cross - name: Install cross
run: cargo install cross || true run: cargo install cross || true
- name: Build - name: Build

View file

@ -13,7 +13,7 @@ curl https://sh.rustup.rs -sSf | sh
Follow any prompts it gives you to install Rust. Once thats done, Rust's standard tools should be setup and ready to use. Follow any prompts it gives you to install Rust. Once thats done, Rust's standard tools should be setup and ready to use.
*Note: The current minimum required Rust version at the time of writing is 1.41, you can find the current minimum version specified in the `.github/workflow/test.yml` file.* *Note: The current minimum required Rust version at the time of writing is 1.45, you can find the current minimum version specified in the `.github/workflow/test.yml` file.*
#### Additional Rust tools - `rustfmt` #### Additional Rust tools - `rustfmt`
To ensure a consistent codebase, we utilise [`rustfmt`](https://github.com/rust-lang/rustfmt), which is installed by default with `rustup` these days, else it can be installed manually with: To ensure a consistent codebase, we utilise [`rustfmt`](https://github.com/rust-lang/rustfmt), which is installed by default with `rustup` these days, else it can be installed manually with:

1992
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -23,18 +23,24 @@ doc = false
[dependencies.librespot-audio] [dependencies.librespot-audio]
path = "audio" path = "audio"
version = "0.1.6" version = "0.1.6"
[dependencies.librespot-connect] [dependencies.librespot-connect]
path = "connect" path = "connect"
version = "0.1.6" version = "0.1.6"
[dependencies.librespot-core] [dependencies.librespot-core]
path = "core" path = "core"
version = "0.1.6" version = "0.1.6"
features = ["apresolve"]
[dependencies.librespot-metadata] [dependencies.librespot-metadata]
path = "metadata" path = "metadata"
version = "0.1.6" version = "0.1.6"
[dependencies.librespot-playback] [dependencies.librespot-playback]
path = "playback" path = "playback"
version = "0.1.6" version = "0.1.6"
[dependencies.librespot-protocol] [dependencies.librespot-protocol]
path = "protocol" path = "protocol"
version = "0.1.6" version = "0.1.6"
@ -42,29 +48,23 @@ version = "0.1.6"
[dependencies] [dependencies]
base64 = "0.13" base64 = "0.13"
env_logger = {version = "0.8", default-features = false, features = ["termcolor","humantime","atty"]} env_logger = {version = "0.8", default-features = false, features = ["termcolor","humantime","atty"]}
futures = "0.1" futures-util = { version = "0.3", default_features = false }
getopts = "0.2" getopts = "0.2"
hyper = "0.11"
log = "0.4"
num-bigint = "0.3"
protobuf = "~2.14.0"
rand = "0.7"
rpassword = "5.0"
tokio-core = "0.1"
tokio-io = "0.1"
tokio-process = "0.2"
tokio-signal = "0.2"
url = "1.7"
sha-1 = "0.8"
hex = "0.4" hex = "0.4"
hyper = "0.14"
log = "0.4"
rpassword = "5.0"
tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros", "signal", "sync", "process"] }
url = "2.1"
sha-1 = "0.9"
[features] [features]
alsa-backend = ["librespot-playback/alsa-backend"] alsa-backend = ["librespot-playback/alsa-backend"]
portaudio-backend = ["librespot-playback/portaudio-backend"] portaudio-backend = ["librespot-playback/portaudio-backend"]
pulseaudio-backend = ["librespot-playback/pulseaudio-backend"] pulseaudio-backend = ["librespot-playback/pulseaudio-backend"]
jackaudio-backend = ["librespot-playback/jackaudio-backend"] jackaudio-backend = ["librespot-playback/jackaudio-backend"]
rodiojack-backend = ["librespot-playback/rodiojack-backend"]
rodio-backend = ["librespot-playback/rodio-backend"] rodio-backend = ["librespot-playback/rodio-backend"]
rodiojack-backend = ["librespot-playback/rodiojack-backend"]
sdl-backend = ["librespot-playback/sdl-backend"] sdl-backend = ["librespot-playback/sdl-backend"]
gstreamer-backend = ["librespot-playback/gstreamer-backend"] gstreamer-backend = ["librespot-playback/gstreamer-backend"]
@ -73,7 +73,7 @@ with-vorbis = ["librespot-audio/with-vorbis"]
with-dns-sd = ["librespot-connect/with-dns-sd"] with-dns-sd = ["librespot-connect/with-dns-sd"]
default = ["librespot-playback/rodio-backend"] default = ["rodio-backend"]
[package.metadata.deb] [package.metadata.deb]
maintainer = "librespot-org" maintainer = "librespot-org"

View file

@ -12,19 +12,19 @@ version = "0.1.6"
[dependencies] [dependencies]
aes-ctr = "0.6" aes-ctr = "0.6"
bit-set = "0.5" byteorder = "1.4"
byteorder = "1.3" bytes = "1.0"
bytes = "0.4" cfg-if = "1"
futures = "0.1"
lewton = "0.10" lewton = "0.10"
ogg = "0.8"
log = "0.4" log = "0.4"
num-bigint = "0.3" futures-util = { version = "0.3", default_features = false }
num-traits = "0.2" ogg = "0.8"
tempfile = "3.1" tempfile = "3.1"
tokio = { version = "1", features = ["sync", "macros"] }
zerocopy = "0.3"
librespot-tremor = { version = "0.2.0", optional = true } librespot-tremor = { version = "0.2", optional = true }
vorbis = { version ="0.0.14", optional = true } vorbis = { version ="0.0", optional = true }
[features] [features]
with-tremor = ["librespot-tremor"] with-tremor = ["librespot-tremor"]

56
audio/src/convert.rs Normal file
View file

@ -0,0 +1,56 @@
use zerocopy::AsBytes;
#[derive(AsBytes, Copy, Clone, Debug)]
#[allow(non_camel_case_types)]
#[repr(transparent)]
pub struct i24([u8; 3]);
impl i24 {
fn pcm_from_i32(sample: i32) -> Self {
// drop the least significant byte
let [a, b, c, _d] = (sample >> 8).to_le_bytes();
i24([a, b, c])
}
}
// Losslessly represent [-1.0, 1.0] to [$type::MIN, $type::MAX] while maintaining DC linearity.
macro_rules! convert_samples_to {
($type: ident, $samples: expr) => {
convert_samples_to!($type, $samples, 0)
};
($type: ident, $samples: expr, $drop_bits: expr) => {
$samples
.iter()
.map(|sample| {
// Losslessly represent [-1.0, 1.0] to [$type::MIN, $type::MAX]
// while maintaining DC linearity. There is nothing to be gained
// by doing this in f64, as the significand of a f32 is 24 bits,
// just like the maximum bit depth we are converting to.
let int_value = *sample * (std::$type::MAX as f32 + 0.5) - 0.5;
// Casting floats to ints truncates by default, which results
// in larger quantization error than rounding arithmetically.
// Flooring is faster, but again with larger error.
int_value.round() as $type >> $drop_bits
})
.collect()
};
}
pub fn to_s32(samples: &[f32]) -> Vec<i32> {
convert_samples_to!(i32, samples)
}
pub fn to_s24(samples: &[f32]) -> Vec<i32> {
convert_samples_to!(i32, samples, 8)
}
pub fn to_s24_3(samples: &[f32]) -> Vec<i24> {
to_s32(samples)
.iter()
.map(|sample| i24::pcm_from_i32(*sample))
.collect()
}
pub fn to_s16(samples: &[f32]) -> Vec<i16> {
convert_samples_to!(i16, samples)
}

File diff suppressed because it is too large Load diff

511
audio/src/fetch/mod.rs Normal file
View file

@ -0,0 +1,511 @@
mod receive;
use std::cmp::{max, min};
use std::fs;
use std::io::{self, Read, Seek, SeekFrom};
use std::sync::atomic::{self, AtomicUsize};
use std::sync::{Arc, Condvar, Mutex};
use std::time::{Duration, Instant};
use byteorder::{BigEndian, ByteOrder};
use futures_util::{future, StreamExt, TryFutureExt, TryStreamExt};
use librespot_core::channel::{ChannelData, ChannelError, ChannelHeaders};
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
use tempfile::NamedTempFile;
use tokio::sync::{mpsc, oneshot};
use self::receive::{audio_file_fetch, request_range};
use crate::range_set::{Range, RangeSet};
const MINIMUM_DOWNLOAD_SIZE: usize = 1024 * 16;
// The minimum size of a block that is requested from the Spotify servers in one request.
// This is the block size that is typically requested while doing a seek() on a file.
// Note: smaller requests can happen if part of the block is downloaded already.
const INITIAL_DOWNLOAD_SIZE: usize = 1024 * 16;
// The amount of data that is requested when initially opening a file.
// Note: if the file is opened to play from the beginning, the amount of data to
// read ahead is requested in addition to this amount. If the file is opened to seek to
// another position, then only this amount is requested on the first request.
const INITIAL_PING_TIME_ESTIMATE_SECONDS: f64 = 0.5;
// The pig time that is used for calculations before a ping time was actually measured.
const MAXIMUM_ASSUMED_PING_TIME_SECONDS: f64 = 1.5;
// If the measured ping time to the Spotify server is larger than this value, it is capped
// to avoid run-away block sizes and pre-fetching.
pub const READ_AHEAD_BEFORE_PLAYBACK_SECONDS: f64 = 1.0;
// Before playback starts, this many seconds of data must be present.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
pub const READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS: f64 = 2.0;
// Same as READ_AHEAD_BEFORE_PLAYBACK_SECONDS, but the time is taken as a factor of the ping
// time to the Spotify server.
// Both, READ_AHEAD_BEFORE_PLAYBACK_SECONDS and READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS are
// obeyed.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
pub const READ_AHEAD_DURING_PLAYBACK_SECONDS: f64 = 5.0;
// While playing back, this many seconds of data ahead of the current read position are
// requested.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
pub const READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS: f64 = 10.0;
// Same as READ_AHEAD_DURING_PLAYBACK_SECONDS, but the time is taken as a factor of the ping
// time to the Spotify server.
// Note: the calculations are done using the nominal bitrate of the file. The actual amount
// of audio data may be larger or smaller.
const PREFETCH_THRESHOLD_FACTOR: f64 = 4.0;
// If the amount of data that is pending (requested but not received) is less than a certain amount,
// data is pre-fetched in addition to the read ahead settings above. The threshold for requesting more
// data is calculated as
// <pending bytes> < PREFETCH_THRESHOLD_FACTOR * <ping time> * <nominal data rate>
const FAST_PREFETCH_THRESHOLD_FACTOR: f64 = 1.5;
// Similar to PREFETCH_THRESHOLD_FACTOR, but it also takes the current download rate into account.
// The formula used is
// <pending bytes> < FAST_PREFETCH_THRESHOLD_FACTOR * <ping time> * <measured download rate>
// This mechanism allows for fast downloading of the remainder of the file. The number should be larger
// than 1 so the download rate ramps up until the bandwidth is saturated. The larger the value, the faster
// the download rate ramps up. However, this comes at the cost that it might hurt ping-time if a seek is
// performed while downloading. Values smaller than 1 cause the download rate to collapse and effectively
// only PREFETCH_THRESHOLD_FACTOR is in effect. Thus, set to zero if bandwidth saturation is not wanted.
const MAX_PREFETCH_REQUESTS: usize = 4;
// Limit the number of requests that are pending simultaneously before pre-fetching data. Pending
// requests share bandwidth. Thus, havint too many requests can lead to the one that is needed next
// for playback to be delayed leading to a buffer underrun. This limit has the effect that a new
// pre-fetch request is only sent if less than MAX_PREFETCH_REQUESTS are pending.
pub enum AudioFile {
Cached(fs::File),
Streaming(AudioFileStreaming),
}
#[derive(Debug)]
enum StreamLoaderCommand {
Fetch(Range), // signal the stream loader to fetch a range of the file
RandomAccessMode(), // optimise download strategy for random access
StreamMode(), // optimise download strategy for streaming
Close(), // terminate and don't load any more data
}
#[derive(Clone)]
pub struct StreamLoaderController {
channel_tx: Option<mpsc::UnboundedSender<StreamLoaderCommand>>,
stream_shared: Option<Arc<AudioFileShared>>,
file_size: usize,
}
impl StreamLoaderController {
pub fn len(&self) -> usize {
self.file_size
}
pub fn is_empty(&self) -> bool {
self.file_size == 0
}
pub fn range_available(&self, range: Range) -> bool {
if let Some(ref shared) = self.stream_shared {
let download_status = shared.download_status.lock().unwrap();
range.length
<= download_status
.downloaded
.contained_length_from_value(range.start)
} else {
range.length <= self.len() - range.start
}
}
pub fn range_to_end_available(&self) -> bool {
self.stream_shared.as_ref().map_or(true, |shared| {
let read_position = shared.read_position.load(atomic::Ordering::Relaxed);
self.range_available(Range::new(read_position, self.len() - read_position))
})
}
pub fn ping_time_ms(&self) -> usize {
self.stream_shared.as_ref().map_or(0, |shared| {
shared.ping_time_ms.load(atomic::Ordering::Relaxed)
})
}
fn send_stream_loader_command(&self, command: StreamLoaderCommand) {
if let Some(ref channel) = self.channel_tx {
// ignore the error in case the channel has been closed already.
let _ = channel.send(command);
}
}
pub fn fetch(&self, range: Range) {
// signal the stream loader to fetch a range of the file
self.send_stream_loader_command(StreamLoaderCommand::Fetch(range));
}
pub fn fetch_blocking(&self, mut range: Range) {
// signal the stream loader to tech a range of the file and block until it is loaded.
// ensure the range is within the file's bounds.
if range.start >= self.len() {
range.length = 0;
} else if range.end() > self.len() {
range.length = self.len() - range.start;
}
self.fetch(range);
if let Some(ref shared) = self.stream_shared {
let mut download_status = shared.download_status.lock().unwrap();
while range.length
> download_status
.downloaded
.contained_length_from_value(range.start)
{
download_status = shared
.cond
.wait_timeout(download_status, Duration::from_millis(1000))
.unwrap()
.0;
if range.length
> (download_status
.downloaded
.union(&download_status.requested)
.contained_length_from_value(range.start))
{
// For some reason, the requested range is neither downloaded nor requested.
// This could be due to a network error. Request it again.
self.fetch(range);
}
}
}
}
pub fn fetch_next(&self, length: usize) {
if let Some(ref shared) = self.stream_shared {
let range = Range {
start: shared.read_position.load(atomic::Ordering::Relaxed),
length,
};
self.fetch(range)
}
}
pub fn fetch_next_blocking(&self, length: usize) {
if let Some(ref shared) = self.stream_shared {
let range = Range {
start: shared.read_position.load(atomic::Ordering::Relaxed),
length,
};
self.fetch_blocking(range);
}
}
pub fn set_random_access_mode(&self) {
// optimise download strategy for random access
self.send_stream_loader_command(StreamLoaderCommand::RandomAccessMode());
}
pub fn set_stream_mode(&self) {
// optimise download strategy for streaming
self.send_stream_loader_command(StreamLoaderCommand::StreamMode());
}
pub fn close(&self) {
// terminate stream loading and don't load any more data for this file.
self.send_stream_loader_command(StreamLoaderCommand::Close());
}
}
pub struct AudioFileStreaming {
read_file: fs::File,
position: u64,
stream_loader_command_tx: mpsc::UnboundedSender<StreamLoaderCommand>,
shared: Arc<AudioFileShared>,
}
struct AudioFileDownloadStatus {
requested: RangeSet,
downloaded: RangeSet,
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum DownloadStrategy {
RandomAccess(),
Streaming(),
}
struct AudioFileShared {
file_id: FileId,
file_size: usize,
stream_data_rate: usize,
cond: Condvar,
download_status: Mutex<AudioFileDownloadStatus>,
download_strategy: Mutex<DownloadStrategy>,
number_of_open_requests: AtomicUsize,
ping_time_ms: AtomicUsize,
read_position: AtomicUsize,
}
impl AudioFile {
pub async fn open(
session: &Session,
file_id: FileId,
bytes_per_second: usize,
play_from_beginning: bool,
) -> Result<AudioFile, ChannelError> {
if let Some(file) = session.cache().and_then(|cache| cache.file(file_id)) {
debug!("File {} already in cache", file_id);
return Ok(AudioFile::Cached(file));
}
debug!("Downloading file {}", file_id);
let (complete_tx, complete_rx) = oneshot::channel();
let mut initial_data_length = if play_from_beginning {
INITIAL_DOWNLOAD_SIZE
+ max(
(READ_AHEAD_DURING_PLAYBACK_SECONDS * bytes_per_second as f64) as usize,
(INITIAL_PING_TIME_ESTIMATE_SECONDS
* READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* bytes_per_second as f64) as usize,
)
} else {
INITIAL_DOWNLOAD_SIZE
};
if initial_data_length % 4 != 0 {
initial_data_length += 4 - (initial_data_length % 4);
}
let (headers, data) = request_range(session, file_id, 0, initial_data_length).split();
let streaming = AudioFileStreaming::open(
session.clone(),
data,
initial_data_length,
Instant::now(),
headers,
file_id,
complete_tx,
bytes_per_second,
);
let session_ = session.clone();
session.spawn(complete_rx.map_ok(move |mut file| {
if let Some(cache) = session_.cache() {
debug!("File {} complete, saving to cache", file_id);
cache.save_file(file_id, &mut file);
} else {
debug!("File {} complete", file_id);
}
}));
Ok(AudioFile::Streaming(streaming.await?))
}
pub fn get_stream_loader_controller(&self) -> StreamLoaderController {
match self {
AudioFile::Streaming(ref stream) => StreamLoaderController {
channel_tx: Some(stream.stream_loader_command_tx.clone()),
stream_shared: Some(stream.shared.clone()),
file_size: stream.shared.file_size,
},
AudioFile::Cached(ref file) => StreamLoaderController {
channel_tx: None,
stream_shared: None,
file_size: file.metadata().unwrap().len() as usize,
},
}
}
pub fn is_cached(&self) -> bool {
matches!(self, AudioFile::Cached { .. })
}
}
impl AudioFileStreaming {
pub async fn open(
session: Session,
initial_data_rx: ChannelData,
initial_data_length: usize,
initial_request_sent_time: Instant,
headers: ChannelHeaders,
file_id: FileId,
complete_tx: oneshot::Sender<NamedTempFile>,
streaming_data_rate: usize,
) -> Result<AudioFileStreaming, ChannelError> {
let (_, data) = headers
.try_filter(|(id, _)| future::ready(*id == 0x3))
.next()
.await
.unwrap()?;
let size = BigEndian::read_u32(&data) as usize * 4;
let shared = Arc::new(AudioFileShared {
file_id,
file_size: size,
stream_data_rate: streaming_data_rate,
cond: Condvar::new(),
download_status: Mutex::new(AudioFileDownloadStatus {
requested: RangeSet::new(),
downloaded: RangeSet::new(),
}),
download_strategy: Mutex::new(DownloadStrategy::RandomAccess()), // start with random access mode until someone tells us otherwise
number_of_open_requests: AtomicUsize::new(0),
ping_time_ms: AtomicUsize::new(0),
read_position: AtomicUsize::new(0),
});
let mut write_file = NamedTempFile::new().unwrap();
write_file.as_file().set_len(size as u64).unwrap();
write_file.seek(SeekFrom::Start(0)).unwrap();
let read_file = write_file.reopen().unwrap();
//let (seek_tx, seek_rx) = mpsc::unbounded();
let (stream_loader_command_tx, stream_loader_command_rx) =
mpsc::unbounded_channel::<StreamLoaderCommand>();
session.spawn(audio_file_fetch(
session.clone(),
shared.clone(),
initial_data_rx,
initial_request_sent_time,
initial_data_length,
write_file,
stream_loader_command_rx,
complete_tx,
));
Ok(AudioFileStreaming {
read_file,
position: 0,
stream_loader_command_tx,
shared,
})
}
}
impl Read for AudioFileStreaming {
fn read(&mut self, output: &mut [u8]) -> io::Result<usize> {
let offset = self.position as usize;
if offset >= self.shared.file_size {
return Ok(0);
}
let length = min(output.len(), self.shared.file_size - offset);
let length_to_request = match *(self.shared.download_strategy.lock().unwrap()) {
DownloadStrategy::RandomAccess() => length,
DownloadStrategy::Streaming() => {
// Due to the read-ahead stuff, we potentially request more than the actual reqeust demanded.
let ping_time_seconds =
0.0001 * self.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as f64;
let length_to_request = length
+ max(
(READ_AHEAD_DURING_PLAYBACK_SECONDS * self.shared.stream_data_rate as f64)
as usize,
(READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS
* ping_time_seconds
* self.shared.stream_data_rate as f64) as usize,
);
min(length_to_request, self.shared.file_size - offset)
}
};
let mut ranges_to_request = RangeSet::new();
ranges_to_request.add_range(&Range::new(offset, length_to_request));
let mut download_status = self.shared.download_status.lock().unwrap();
ranges_to_request.subtract_range_set(&download_status.downloaded);
ranges_to_request.subtract_range_set(&download_status.requested);
for &range in ranges_to_request.iter() {
self.stream_loader_command_tx
.send(StreamLoaderCommand::Fetch(range))
.unwrap();
}
if length == 0 {
return Ok(0);
}
let mut download_message_printed = false;
while !download_status.downloaded.contains(offset) {
if let DownloadStrategy::Streaming() = *self.shared.download_strategy.lock().unwrap() {
if !download_message_printed {
debug!("Stream waiting for download of file position {}. Downloaded ranges: {}. Pending ranges: {}", offset, download_status.downloaded, download_status.requested.minus(&download_status.downloaded));
download_message_printed = true;
}
}
download_status = self
.shared
.cond
.wait_timeout(download_status, Duration::from_millis(1000))
.unwrap()
.0;
}
let available_length = download_status
.downloaded
.contained_length_from_value(offset);
assert!(available_length > 0);
drop(download_status);
self.position = self.read_file.seek(SeekFrom::Start(offset as u64)).unwrap();
let read_len = min(length, available_length);
let read_len = self.read_file.read(&mut output[..read_len])?;
if download_message_printed {
debug!(
"Read at postion {} completed. {} bytes returned, {} bytes were requested.",
offset,
read_len,
output.len()
);
}
self.position += read_len as u64;
self.shared
.read_position
.store(self.position as usize, atomic::Ordering::Relaxed);
Ok(read_len)
}
}
impl Seek for AudioFileStreaming {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
self.position = self.read_file.seek(pos)?;
// Do not seek past EOF
self.shared
.read_position
.store(self.position as usize, atomic::Ordering::Relaxed);
Ok(self.position)
}
}
impl Read for AudioFile {
fn read(&mut self, output: &mut [u8]) -> io::Result<usize> {
match *self {
AudioFile::Cached(ref mut file) => file.read(output),
AudioFile::Streaming(ref mut file) => file.read(output),
}
}
}
impl Seek for AudioFile {
fn seek(&mut self, pos: SeekFrom) -> io::Result<u64> {
match *self {
AudioFile::Cached(ref mut file) => file.seek(pos),
AudioFile::Streaming(ref mut file) => file.seek(pos),
}
}
}

447
audio/src/fetch/receive.rs Normal file
View file

@ -0,0 +1,447 @@
use std::cmp::{max, min};
use std::io::{Seek, SeekFrom, Write};
use std::sync::{atomic, Arc};
use std::time::Instant;
use byteorder::{BigEndian, WriteBytesExt};
use bytes::Bytes;
use futures_util::StreamExt;
use librespot_core::channel::{Channel, ChannelData};
use librespot_core::session::Session;
use librespot_core::spotify_id::FileId;
use tempfile::NamedTempFile;
use tokio::sync::{mpsc, oneshot};
use crate::range_set::{Range, RangeSet};
use super::{AudioFileShared, DownloadStrategy, StreamLoaderCommand};
use super::{
FAST_PREFETCH_THRESHOLD_FACTOR, MAXIMUM_ASSUMED_PING_TIME_SECONDS, MAX_PREFETCH_REQUESTS,
MINIMUM_DOWNLOAD_SIZE, PREFETCH_THRESHOLD_FACTOR,
};
pub fn request_range(session: &Session, file: FileId, offset: usize, length: usize) -> Channel {
assert!(
offset % 4 == 0,
"Range request start positions must be aligned by 4 bytes."
);
assert!(
length % 4 == 0,
"Range request range lengths must be aligned by 4 bytes."
);
let start = offset / 4;
let end = (offset + length) / 4;
let (id, channel) = session.channel().allocate();
let mut data: Vec<u8> = Vec::new();
data.write_u16::<BigEndian>(id).unwrap();
data.write_u8(0).unwrap();
data.write_u8(1).unwrap();
data.write_u16::<BigEndian>(0x0000).unwrap();
data.write_u32::<BigEndian>(0x00000000).unwrap();
data.write_u32::<BigEndian>(0x00009C40).unwrap();
data.write_u32::<BigEndian>(0x00020000).unwrap();
data.write(&file.0).unwrap();
data.write_u32::<BigEndian>(start as u32).unwrap();
data.write_u32::<BigEndian>(end as u32).unwrap();
session.send_packet(0x8, data);
channel
}
struct PartialFileData {
offset: usize,
data: Bytes,
}
enum ReceivedData {
ResponseTimeMs(usize),
Data(PartialFileData),
}
async fn receive_data(
shared: Arc<AudioFileShared>,
file_data_tx: mpsc::UnboundedSender<ReceivedData>,
mut data_rx: ChannelData,
initial_data_offset: usize,
initial_request_length: usize,
request_sent_time: Instant,
) {
let mut data_offset = initial_data_offset;
let mut request_length = initial_request_length;
let old_number_of_request = shared
.number_of_open_requests
.fetch_add(1, atomic::Ordering::SeqCst);
let mut measure_ping_time = old_number_of_request == 0;
let result = loop {
let data = match data_rx.next().await {
Some(Ok(data)) => data,
Some(Err(e)) => break Err(e),
None => break Ok(()),
};
if measure_ping_time {
let duration = Instant::now() - request_sent_time;
let duration_ms: u64;
if 0.001 * (duration.as_millis() as f64) > MAXIMUM_ASSUMED_PING_TIME_SECONDS {
duration_ms = (MAXIMUM_ASSUMED_PING_TIME_SECONDS * 1000.0) as u64;
} else {
duration_ms = duration.as_millis() as u64;
}
let _ = file_data_tx.send(ReceivedData::ResponseTimeMs(duration_ms as usize));
measure_ping_time = false;
}
let data_size = data.len();
let _ = file_data_tx.send(ReceivedData::Data(PartialFileData {
offset: data_offset,
data,
}));
data_offset += data_size;
if request_length < data_size {
warn!(
"Data receiver for range {} (+{}) received more data from server than requested.",
initial_data_offset, initial_request_length
);
request_length = 0;
} else {
request_length -= data_size;
}
if request_length == 0 {
break Ok(());
}
};
if request_length > 0 {
let missing_range = Range::new(data_offset, request_length);
let mut download_status = shared.download_status.lock().unwrap();
download_status.requested.subtract_range(&missing_range);
shared.cond.notify_all();
}
shared
.number_of_open_requests
.fetch_sub(1, atomic::Ordering::SeqCst);
if result.is_err() {
warn!(
"Error from channel for data receiver for range {} (+{}).",
initial_data_offset, initial_request_length
);
} else if request_length > 0 {
warn!(
"Data receiver for range {} (+{}) received less data from server than requested.",
initial_data_offset, initial_request_length
);
}
}
struct AudioFileFetch {
session: Session,
shared: Arc<AudioFileShared>,
output: Option<NamedTempFile>,
file_data_tx: mpsc::UnboundedSender<ReceivedData>,
complete_tx: Option<oneshot::Sender<NamedTempFile>>,
network_response_times_ms: Vec<usize>,
}
// Might be replaced by enum from std once stable
#[derive(PartialEq, Eq)]
enum ControlFlow {
Break,
Continue,
}
impl AudioFileFetch {
fn get_download_strategy(&mut self) -> DownloadStrategy {
*(self.shared.download_strategy.lock().unwrap())
}
fn download_range(&mut self, mut offset: usize, mut length: usize) {
if length < MINIMUM_DOWNLOAD_SIZE {
length = MINIMUM_DOWNLOAD_SIZE;
}
// ensure the values are within the bounds and align them by 4 for the spotify protocol.
if offset >= self.shared.file_size {
return;
}
if length == 0 {
return;
}
if offset + length > self.shared.file_size {
length = self.shared.file_size - offset;
}
if offset % 4 != 0 {
length += offset % 4;
offset -= offset % 4;
}
if length % 4 != 0 {
length += 4 - (length % 4);
}
let mut ranges_to_request = RangeSet::new();
ranges_to_request.add_range(&Range::new(offset, length));
let mut download_status = self.shared.download_status.lock().unwrap();
ranges_to_request.subtract_range_set(&download_status.downloaded);
ranges_to_request.subtract_range_set(&download_status.requested);
for range in ranges_to_request.iter() {
let (_headers, data) = request_range(
&self.session,
self.shared.file_id,
range.start,
range.length,
)
.split();
download_status.requested.add_range(range);
self.session.spawn(receive_data(
self.shared.clone(),
self.file_data_tx.clone(),
data,
range.start,
range.length,
Instant::now(),
));
}
}
fn pre_fetch_more_data(&mut self, bytes: usize, max_requests_to_send: usize) {
let mut bytes_to_go = bytes;
let mut requests_to_go = max_requests_to_send;
while bytes_to_go > 0 && requests_to_go > 0 {
// determine what is still missing
let mut missing_data = RangeSet::new();
missing_data.add_range(&Range::new(0, self.shared.file_size));
{
let download_status = self.shared.download_status.lock().unwrap();
missing_data.subtract_range_set(&download_status.downloaded);
missing_data.subtract_range_set(&download_status.requested);
}
// download data from after the current read position first
let mut tail_end = RangeSet::new();
let read_position = self.shared.read_position.load(atomic::Ordering::Relaxed);
tail_end.add_range(&Range::new(
read_position,
self.shared.file_size - read_position,
));
let tail_end = tail_end.intersection(&missing_data);
if !tail_end.is_empty() {
let range = tail_end.get_range(0);
let offset = range.start;
let length = min(range.length, bytes_to_go);
self.download_range(offset, length);
requests_to_go -= 1;
bytes_to_go -= length;
} else if !missing_data.is_empty() {
// ok, the tail is downloaded, download something fom the beginning.
let range = missing_data.get_range(0);
let offset = range.start;
let length = min(range.length, bytes_to_go);
self.download_range(offset, length);
requests_to_go -= 1;
bytes_to_go -= length;
} else {
return;
}
}
}
fn handle_file_data(&mut self, data: ReceivedData) -> ControlFlow {
match data {
ReceivedData::ResponseTimeMs(response_time_ms) => {
trace!("Ping time estimated as: {} ms.", response_time_ms);
// record the response time
self.network_response_times_ms.push(response_time_ms);
// prune old response times. Keep at most three.
while self.network_response_times_ms.len() > 3 {
self.network_response_times_ms.remove(0);
}
// stats::median is experimental. So we calculate the median of up to three ourselves.
let ping_time_ms: usize = match self.network_response_times_ms.len() {
1 => self.network_response_times_ms[0] as usize,
2 => {
((self.network_response_times_ms[0] + self.network_response_times_ms[1])
/ 2) as usize
}
3 => {
let mut times = self.network_response_times_ms.clone();
times.sort_unstable();
times[1]
}
_ => unreachable!(),
};
// store our new estimate for everyone to see
self.shared
.ping_time_ms
.store(ping_time_ms, atomic::Ordering::Relaxed);
}
ReceivedData::Data(data) => {
self.output
.as_mut()
.unwrap()
.seek(SeekFrom::Start(data.offset as u64))
.unwrap();
self.output
.as_mut()
.unwrap()
.write_all(data.data.as_ref())
.unwrap();
let mut download_status = self.shared.download_status.lock().unwrap();
let received_range = Range::new(data.offset, data.data.len());
download_status.downloaded.add_range(&received_range);
self.shared.cond.notify_all();
let full = download_status.downloaded.contained_length_from_value(0)
>= self.shared.file_size;
drop(download_status);
if full {
self.finish();
return ControlFlow::Break;
}
}
}
ControlFlow::Continue
}
fn handle_stream_loader_command(&mut self, cmd: StreamLoaderCommand) -> ControlFlow {
match cmd {
StreamLoaderCommand::Fetch(request) => {
self.download_range(request.start, request.length);
}
StreamLoaderCommand::RandomAccessMode() => {
*(self.shared.download_strategy.lock().unwrap()) = DownloadStrategy::RandomAccess();
}
StreamLoaderCommand::StreamMode() => {
*(self.shared.download_strategy.lock().unwrap()) = DownloadStrategy::Streaming();
}
StreamLoaderCommand::Close() => return ControlFlow::Break,
}
ControlFlow::Continue
}
fn finish(&mut self) {
let mut output = self.output.take().unwrap();
let complete_tx = self.complete_tx.take().unwrap();
output.seek(SeekFrom::Start(0)).unwrap();
let _ = complete_tx.send(output);
}
}
pub(super) async fn audio_file_fetch(
session: Session,
shared: Arc<AudioFileShared>,
initial_data_rx: ChannelData,
initial_request_sent_time: Instant,
initial_data_length: usize,
output: NamedTempFile,
mut stream_loader_command_rx: mpsc::UnboundedReceiver<StreamLoaderCommand>,
complete_tx: oneshot::Sender<NamedTempFile>,
) {
let (file_data_tx, mut file_data_rx) = mpsc::unbounded_channel();
{
let requested_range = Range::new(0, initial_data_length);
let mut download_status = shared.download_status.lock().unwrap();
download_status.requested.add_range(&requested_range);
}
session.spawn(receive_data(
shared.clone(),
file_data_tx.clone(),
initial_data_rx,
0,
initial_data_length,
initial_request_sent_time,
));
let mut fetch = AudioFileFetch {
session,
shared,
output: Some(output),
file_data_tx,
complete_tx: Some(complete_tx),
network_response_times_ms: Vec::new(),
};
loop {
tokio::select! {
cmd = stream_loader_command_rx.recv() => {
if cmd.map_or(true, |cmd| fetch.handle_stream_loader_command(cmd) == ControlFlow::Break) {
break;
}
},
data = file_data_rx.recv() => {
if data.map_or(true, |data| fetch.handle_file_data(data) == ControlFlow::Break) {
break;
}
}
}
if fetch.get_download_strategy() == DownloadStrategy::Streaming() {
let number_of_open_requests = fetch
.shared
.number_of_open_requests
.load(atomic::Ordering::SeqCst);
if number_of_open_requests < MAX_PREFETCH_REQUESTS {
let max_requests_to_send = MAX_PREFETCH_REQUESTS - number_of_open_requests;
let bytes_pending: usize = {
let download_status = fetch.shared.download_status.lock().unwrap();
download_status
.requested
.minus(&download_status.downloaded)
.len()
};
let ping_time_seconds =
0.001 * fetch.shared.ping_time_ms.load(atomic::Ordering::Relaxed) as f64;
let download_rate = fetch.session.channel().get_download_rate_estimate();
let desired_pending_bytes = max(
(PREFETCH_THRESHOLD_FACTOR
* ping_time_seconds
* fetch.shared.stream_data_rate as f64) as usize,
(FAST_PREFETCH_THRESHOLD_FACTOR * ping_time_seconds * download_rate as f64)
as usize,
);
if bytes_pending < desired_pending_bytes {
fetch.pre_fetch_more_data(
desired_pending_bytes - bytes_pending,
max_requests_to_send,
);
}
}
}
}
}

View file

@ -1,8 +1,7 @@
extern crate lewton;
use self::lewton::inside_ogg::OggStreamReader;
use super::{AudioDecoder, AudioError, AudioPacket}; use super::{AudioDecoder, AudioError, AudioPacket};
use lewton::inside_ogg::OggStreamReader;
use std::error; use std::error;
use std::fmt; use std::fmt;
use std::io::{Read, Seek}; use std::io::{Read, Seek};
@ -26,19 +25,21 @@ where
fn seek(&mut self, ms: i64) -> Result<(), AudioError> { fn seek(&mut self, ms: i64) -> Result<(), AudioError> {
let absgp = ms * 44100 / 1000; let absgp = ms * 44100 / 1000;
match self.0.seek_absgp_pg(absgp as u64) { match self.0.seek_absgp_pg(absgp as u64) {
Ok(_) => return Ok(()), Ok(_) => Ok(()),
Err(err) => return Err(AudioError::VorbisError(err.into())), Err(err) => Err(AudioError::VorbisError(err.into())),
} }
} }
fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> { fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> {
use self::lewton::audio::AudioReadError::AudioIsHeader; use lewton::audio::AudioReadError::AudioIsHeader;
use self::lewton::OggReadError::NoCapturePatternFound; use lewton::OggReadError::NoCapturePatternFound;
use self::lewton::VorbisError::BadAudio; use lewton::VorbisError::{BadAudio, OggError};
use self::lewton::VorbisError::OggError;
loop { loop {
match self.0.read_dec_packet_itl() { match self
Ok(Some(packet)) => return Ok(Some(AudioPacket::Samples(packet))), .0
.read_dec_packet_generic::<lewton::samples::InterleavedSamples<f32>>()
{
Ok(Some(packet)) => return Ok(Some(AudioPacket::Samples(packet.samples))),
Ok(None) => return Ok(None), Ok(None) => return Ok(None),
Err(BadAudio(AudioIsHeader)) => (), Err(BadAudio(AudioIsHeader)) => (),

View file

@ -1,31 +1,31 @@
#[macro_use] #![allow(clippy::unused_io_amount, clippy::too_many_arguments)]
extern crate futures;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate aes_ctr; pub mod convert;
extern crate bit_set;
extern crate byteorder;
extern crate bytes;
extern crate num_bigint;
extern crate num_traits;
extern crate tempfile;
extern crate librespot_core;
mod decrypt; mod decrypt;
mod fetch; mod fetch;
#[cfg(not(any(feature = "with-tremor", feature = "with-vorbis")))] use cfg_if::cfg_if;
mod lewton_decoder;
#[cfg(any(feature = "with-tremor", feature = "with-vorbis"))] cfg_if! {
mod libvorbis_decoder; if #[cfg(any(feature = "with-tremor", feature = "with-vorbis"))] {
mod libvorbis_decoder;
pub use crate::libvorbis_decoder::{VorbisDecoder, VorbisError};
} else {
mod lewton_decoder;
pub use lewton_decoder::{VorbisDecoder, VorbisError};
}
}
mod passthrough_decoder; mod passthrough_decoder;
pub use passthrough_decoder::{PassthroughDecoder, PassthroughError};
mod range_set; mod range_set;
pub use decrypt::AudioDecrypt; pub use decrypt::AudioDecrypt;
pub use fetch::{AudioFile, AudioFileOpen, StreamLoaderController}; pub use fetch::{AudioFile, StreamLoaderController};
pub use fetch::{ pub use fetch::{
READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_BEFORE_PLAYBACK_SECONDS, READ_AHEAD_BEFORE_PLAYBACK_ROUNDTRIPS, READ_AHEAD_BEFORE_PLAYBACK_SECONDS,
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS, READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS,
@ -33,12 +33,12 @@ pub use fetch::{
use std::fmt; use std::fmt;
pub enum AudioPacket { pub enum AudioPacket {
Samples(Vec<i16>), Samples(Vec<f32>),
OggData(Vec<u8>), OggData(Vec<u8>),
} }
impl AudioPacket { impl AudioPacket {
pub fn samples(&self) -> &[i16] { pub fn samples(&self) -> &[f32] {
match self { match self {
AudioPacket::Samples(s) => s, AudioPacket::Samples(s) => s,
AudioPacket::OggData(_) => panic!("can't return OggData on samples"), AudioPacket::OggData(_) => panic!("can't return OggData on samples"),
@ -60,12 +60,6 @@ impl AudioPacket {
} }
} }
#[cfg(not(any(feature = "with-tremor", feature = "with-vorbis")))]
pub use crate::lewton_decoder::{VorbisDecoder, VorbisError};
#[cfg(any(feature = "with-tremor", feature = "with-vorbis"))]
pub use libvorbis_decoder::{VorbisDecoder, VorbisError};
pub use passthrough_decoder::{PassthroughDecoder, PassthroughError};
#[derive(Debug)] #[derive(Debug)]
pub enum AudioError { pub enum AudioError {
PassthroughError(PassthroughError), PassthroughError(PassthroughError),
@ -83,13 +77,13 @@ impl fmt::Display for AudioError {
impl From<VorbisError> for AudioError { impl From<VorbisError> for AudioError {
fn from(err: VorbisError) -> AudioError { fn from(err: VorbisError) -> AudioError {
AudioError::VorbisError(VorbisError::from(err)) AudioError::VorbisError(err)
} }
} }
impl From<PassthroughError> for AudioError { impl From<PassthroughError> for AudioError {
fn from(err: PassthroughError) -> AudioError { fn from(err: PassthroughError) -> AudioError {
AudioError::PassthroughError(PassthroughError::from(err)) AudioError::PassthroughError(err)
} }
} }

View file

@ -1,7 +1,5 @@
#[cfg(feature = "with-tremor")] #[cfg(feature = "with-tremor")]
extern crate librespot_tremor as vorbis; use librespot_tremor as vorbis;
#[cfg(not(feature = "with-tremor"))]
extern crate vorbis;
use super::{AudioDecoder, AudioError, AudioPacket}; use super::{AudioDecoder, AudioError, AudioPacket};
use std::error; use std::error;
@ -39,7 +37,18 @@ where
fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> { fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> {
loop { loop {
match self.0.packets().next() { match self.0.packets().next() {
Some(Ok(packet)) => return Ok(Some(AudioPacket::Samples(packet.data))), Some(Ok(packet)) => {
// Losslessly represent [-32768, 32767] to [-1.0, 1.0] while maintaining DC linearity.
return Ok(Some(AudioPacket::Samples(
packet
.data
.iter()
.map(|sample| {
((*sample as f64 + 0.5) / (std::i16::MAX as f64 + 0.5)) as f32
})
.collect(),
)));
}
None => return Ok(None), None => return Ok(None),
Some(Err(vorbis::VorbisError::Hole)) => (), Some(Err(vorbis::VorbisError::Hole)) => (),

View file

@ -5,75 +5,32 @@ use std::fmt;
use std::io::{Read, Seek}; use std::io::{Read, Seek};
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
fn write_headers<T: Read + Seek>( fn get_header<T>(code: u8, rdr: &mut PacketReader<T>) -> Result<Box<[u8]>, PassthroughError>
rdr: &mut PacketReader<T>,
wtr: &mut PacketWriter<Vec<u8>>,
) -> Result<u32, PassthroughError> {
let mut stream_serial: u32 = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis() as u32;
// search for ident, comment, setup
get_header(1, rdr, wtr, &mut stream_serial, PacketWriteEndInfo::EndPage)?;
get_header(
3,
rdr,
wtr,
&mut stream_serial,
PacketWriteEndInfo::NormalPacket,
)?;
get_header(5, rdr, wtr, &mut stream_serial, PacketWriteEndInfo::EndPage)?;
// remove un-needed packets
rdr.delete_unread_packets();
return Ok(stream_serial);
}
fn get_header<T>(
code: u8,
rdr: &mut PacketReader<T>,
wtr: &mut PacketWriter<Vec<u8>>,
stream_serial: &mut u32,
info: PacketWriteEndInfo,
) -> Result<u32, PassthroughError>
where where
T: Read + Seek, T: Read + Seek,
{ {
let pck: Packet = rdr.read_packet_expected()?; let pck: Packet = rdr.read_packet_expected()?;
// set a unique serial number
if pck.stream_serial() != 0 {
*stream_serial = pck.stream_serial();
}
let pkt_type = pck.data[0]; let pkt_type = pck.data[0];
debug!("Vorbis header type{}", &pkt_type); debug!("Vorbis header type{}", &pkt_type);
// all headers are mandatory
if pkt_type != code { if pkt_type != code {
return Err(PassthroughError(OggReadError::InvalidData)); return Err(PassthroughError(OggReadError::InvalidData));
} }
// headers keep original granule number Ok(pck.data.into_boxed_slice())
let absgp_page = pck.absgp_page();
wtr.write_packet(
pck.data.into_boxed_slice(),
*stream_serial,
info,
absgp_page,
)
.unwrap();
return Ok(*stream_serial);
} }
pub struct PassthroughDecoder<R: Read + Seek> { pub struct PassthroughDecoder<R: Read + Seek> {
rdr: PacketReader<R>, rdr: PacketReader<R>,
wtr: PacketWriter<Vec<u8>>, wtr: PacketWriter<Vec<u8>>,
lastgp_page: Option<u64>, eos: bool,
absgp_page: u64, bos: bool,
ofsgp_page: u64,
stream_serial: u32, stream_serial: u32,
ident: Box<[u8]>,
comment: Box<[u8]>,
setup: Box<[u8]>,
} }
pub struct PassthroughError(ogg::OggReadError); pub struct PassthroughError(ogg::OggReadError);
@ -82,70 +39,126 @@ impl<R: Read + Seek> PassthroughDecoder<R> {
/// Constructs a new Decoder from a given implementation of `Read + Seek`. /// Constructs a new Decoder from a given implementation of `Read + Seek`.
pub fn new(rdr: R) -> Result<Self, PassthroughError> { pub fn new(rdr: R) -> Result<Self, PassthroughError> {
let mut rdr = PacketReader::new(rdr); let mut rdr = PacketReader::new(rdr);
let mut wtr = PacketWriter::new(Vec::new()); let stream_serial = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis() as u32;
let stream_serial = write_headers(&mut rdr, &mut wtr)?;
info!("Starting passthrough track with serial {}", stream_serial); info!("Starting passthrough track with serial {}", stream_serial);
return Ok(PassthroughDecoder { // search for ident, comment, setup
let ident = get_header(1, &mut rdr)?;
let comment = get_header(3, &mut rdr)?;
let setup = get_header(5, &mut rdr)?;
// remove un-needed packets
rdr.delete_unread_packets();
Ok(PassthroughDecoder {
rdr, rdr,
wtr, wtr: PacketWriter::new(Vec::new()),
lastgp_page: Some(0), ofsgp_page: 0,
absgp_page: 0,
stream_serial, stream_serial,
}); ident,
comment,
setup,
eos: false,
bos: false,
})
} }
} }
impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> { impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
fn seek(&mut self, ms: i64) -> Result<(), AudioError> { fn seek(&mut self, ms: i64) -> Result<(), AudioError> {
info!("Seeking to {}", ms); info!("Seeking to {}", ms);
self.lastgp_page = match ms {
0 => Some(0), // add an eos to previous stream if missing
_ => None, if self.bos && !self.eos {
match self.rdr.read_packet() {
Ok(Some(pck)) => {
let absgp_page = pck.absgp_page() - self.ofsgp_page;
self.wtr
.write_packet(
pck.data.into_boxed_slice(),
self.stream_serial,
PacketWriteEndInfo::EndStream,
absgp_page,
)
.unwrap();
}
_ => warn! {"Cannot write EoS after seeking"},
}; };
}
self.eos = false;
self.bos = false;
self.ofsgp_page = 0;
self.stream_serial += 1;
// hard-coded to 44.1 kHz // hard-coded to 44.1 kHz
match self.rdr.seek_absgp(None, (ms * 44100 / 1000) as u64) { match self.rdr.seek_absgp(None, (ms * 44100 / 1000) as u64) {
Ok(_) => return Ok(()), Ok(_) => {
Err(err) => return Err(AudioError::PassthroughError(err.into())), // need to set some offset for next_page()
let pck = self.rdr.read_packet().unwrap().unwrap();
self.ofsgp_page = pck.absgp_page();
debug!("Seek to offset page {}", self.ofsgp_page);
Ok(())
}
Err(err) => Err(AudioError::PassthroughError(err.into())),
} }
} }
fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> { fn next_packet(&mut self) -> Result<Option<AudioPacket>, AudioError> {
let mut skip = self.lastgp_page.is_none(); // write headers if we are (re)starting
if !self.bos {
self.wtr
.write_packet(
self.ident.clone(),
self.stream_serial,
PacketWriteEndInfo::EndPage,
0,
)
.unwrap();
self.wtr
.write_packet(
self.comment.clone(),
self.stream_serial,
PacketWriteEndInfo::NormalPacket,
0,
)
.unwrap();
self.wtr
.write_packet(
self.setup.clone(),
self.stream_serial,
PacketWriteEndInfo::EndPage,
0,
)
.unwrap();
self.bos = true;
debug!("Wrote Ogg headers");
}
loop { loop {
let pck = match self.rdr.read_packet() { let pck = match self.rdr.read_packet() {
Ok(Some(pck)) => pck, Ok(Some(pck)) => pck,
Ok(None) | Err(OggReadError::NoCapturePatternFound) => { Ok(None) | Err(OggReadError::NoCapturePatternFound) => {
info!("end of streaming"); info!("end of streaming");
return Ok(None); return Ok(None);
} }
Err(err) => return Err(AudioError::PassthroughError(err.into())), Err(err) => return Err(AudioError::PassthroughError(err.into())),
}; };
let pckgp_page = pck.absgp_page(); let pckgp_page = pck.absgp_page();
let lastgp_page = self.lastgp_page.get_or_insert(pckgp_page);
// consume packets till next page to get a granule reference // skip till we have audio and a calculable granule position
if skip { if pckgp_page == 0 || pckgp_page == self.ofsgp_page {
if *lastgp_page == pckgp_page {
debug!("skipping packet");
continue; continue;
} }
skip = false;
info!("skipped at {}", pckgp_page);
}
// now we can calculate absolute granule
self.absgp_page += pckgp_page - *lastgp_page;
self.lastgp_page = Some(pckgp_page);
// set packet type // set packet type
let inf = if pck.last_in_stream() { let inf = if pck.last_in_stream() {
self.lastgp_page = Some(0); self.eos = true;
PacketWriteEndInfo::EndStream PacketWriteEndInfo::EndStream
} else if pck.last_in_page() { } else if pck.last_in_page() {
PacketWriteEndInfo::EndPage PacketWriteEndInfo::EndPage
@ -158,13 +171,13 @@ impl<R: Read + Seek> AudioDecoder for PassthroughDecoder<R> {
pck.data.into_boxed_slice(), pck.data.into_boxed_slice(),
self.stream_serial, self.stream_serial,
inf, inf,
self.absgp_page, pckgp_page - self.ofsgp_page,
) )
.unwrap(); .unwrap();
let data = self.wtr.inner_mut(); let data = self.wtr.inner_mut();
if data.len() > 0 { if !data.is_empty() {
let result = AudioPacket::OggData(std::mem::take(data)); let result = AudioPacket::OggData(std::mem::take(data));
return Ok(Some(result)); return Ok(Some(result));
} }

View file

@ -2,7 +2,7 @@ use std::cmp::{max, min};
use std::fmt; use std::fmt;
use std::slice::Iter; use std::slice::Iter;
#[derive(Copy, Clone)] #[derive(Copy, Clone, Debug)]
pub struct Range { pub struct Range {
pub start: usize, pub start: usize,
pub length: usize, pub length: usize,
@ -16,14 +16,11 @@ impl fmt::Display for Range {
impl Range { impl Range {
pub fn new(start: usize, length: usize) -> Range { pub fn new(start: usize, length: usize) -> Range {
return Range { Range { start, length }
start: start,
length: length,
};
} }
pub fn end(&self) -> usize { pub fn end(&self) -> usize {
return self.start + self.length; self.start + self.length
} }
} }
@ -50,23 +47,19 @@ impl RangeSet {
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
return self.ranges.is_empty(); self.ranges.is_empty()
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
let mut result = 0; self.ranges.iter().map(|r| r.length).sum()
for range in self.ranges.iter() {
result += range.length;
}
return result;
} }
pub fn get_range(&self, index: usize) -> Range { pub fn get_range(&self, index: usize) -> Range {
return self.ranges[index].clone(); self.ranges[index]
} }
pub fn iter(&self) -> Iter<Range> { pub fn iter(&self) -> Iter<Range> {
return self.ranges.iter(); self.ranges.iter()
} }
pub fn contains(&self, value: usize) -> bool { pub fn contains(&self, value: usize) -> bool {
@ -77,7 +70,7 @@ impl RangeSet {
return true; return true;
} }
} }
return false; false
} }
pub fn contained_length_from_value(&self, value: usize) -> usize { pub fn contained_length_from_value(&self, value: usize) -> usize {
@ -88,7 +81,7 @@ impl RangeSet {
return range.end() - value; return range.end() - value;
} }
} }
return 0; 0
} }
#[allow(dead_code)] #[allow(dead_code)]
@ -98,12 +91,12 @@ impl RangeSet {
return false; return false;
} }
} }
return true; true
} }
pub fn add_range(&mut self, range: &Range) { pub fn add_range(&mut self, range: &Range) {
if range.length <= 0 { if range.length == 0 {
// the interval is empty or invalid -> nothing to do. // the interval is empty -> nothing to do.
return; return;
} }
@ -111,7 +104,7 @@ impl RangeSet {
// the new range is clear of any ranges we already iterated over. // the new range is clear of any ranges we already iterated over.
if range.end() < self.ranges[index].start { if range.end() < self.ranges[index].start {
// the new range starts after anything we already passed and ends before the next range starts (they don't touch) -> insert it. // the new range starts after anything we already passed and ends before the next range starts (they don't touch) -> insert it.
self.ranges.insert(index, range.clone()); self.ranges.insert(index, *range);
return; return;
} else if range.start <= self.ranges[index].end() } else if range.start <= self.ranges[index].end()
&& self.ranges[index].start <= range.end() && self.ranges[index].start <= range.end()
@ -119,7 +112,7 @@ impl RangeSet {
// the new range overlaps (or touches) the first range. They are to be merged. // the new range overlaps (or touches) the first range. They are to be merged.
// In addition we might have to merge further ranges in as well. // In addition we might have to merge further ranges in as well.
let mut new_range = range.clone(); let mut new_range = *range;
while index < self.ranges.len() && self.ranges[index].start <= new_range.end() { while index < self.ranges.len() && self.ranges[index].start <= new_range.end() {
let new_end = max(new_range.end(), self.ranges[index].end()); let new_end = max(new_range.end(), self.ranges[index].end());
@ -134,7 +127,7 @@ impl RangeSet {
} }
// the new range is after everything else -> just add it // the new range is after everything else -> just add it
self.ranges.push(range.clone()); self.ranges.push(*range);
} }
#[allow(dead_code)] #[allow(dead_code)]
@ -148,11 +141,11 @@ impl RangeSet {
pub fn union(&self, other: &RangeSet) -> RangeSet { pub fn union(&self, other: &RangeSet) -> RangeSet {
let mut result = self.clone(); let mut result = self.clone();
result.add_range_set(other); result.add_range_set(other);
return result; result
} }
pub fn subtract_range(&mut self, range: &Range) { pub fn subtract_range(&mut self, range: &Range) {
if range.length <= 0 { if range.length == 0 {
return; return;
} }
@ -208,7 +201,7 @@ impl RangeSet {
pub fn minus(&self, other: &RangeSet) -> RangeSet { pub fn minus(&self, other: &RangeSet) -> RangeSet {
let mut result = self.clone(); let mut result = self.clone();
result.subtract_range_set(other); result.subtract_range_set(other);
return result; result
} }
pub fn intersection(&self, other: &RangeSet) -> RangeSet { pub fn intersection(&self, other: &RangeSet) -> RangeSet {
@ -244,6 +237,6 @@ impl RangeSet {
} }
} }
return result; result
} }
} }

View file

@ -7,37 +7,40 @@ license = "MIT"
repository = "https://github.com/librespot-org/librespot" repository = "https://github.com/librespot-org/librespot"
edition = "2018" edition = "2018"
[dependencies.librespot-core]
path = "../core"
version = "0.1.6"
[dependencies.librespot-playback]
path = "../playback"
version = "0.1.6"
[dependencies.librespot-protocol]
path = "../protocol"
version = "0.1.6"
[dependencies] [dependencies]
aes-ctr = "0.6" aes-ctr = "0.6"
base64 = "0.13" base64 = "0.13"
block-modes = "0.7" block-modes = "0.7"
futures = "0.1" form_urlencoded = "1.0"
futures-core = "0.3"
futures-util = { version = "0.3", default_features = false }
hmac = "0.10" hmac = "0.10"
hyper = "0.11" hyper = { version = "0.14", features = ["server", "http1", "tcp"] }
libmdns = "0.6"
log = "0.4" log = "0.4"
num-bigint = "0.3"
protobuf = "~2.14.0" protobuf = "~2.14.0"
rand = "0.7" rand = "0.8"
serde = "1.0" serde = { version = "1.0", features = ["derive"] }
serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
sha-1 = "0.9" sha-1 = "0.9"
tokio-core = "0.1" tokio = { version = "1.0", features = ["macros", "rt", "sync"] }
url = "1.7" tokio-stream = { version = "0.1" }
url = "2.1"
dns-sd = { version = "0.1.3", optional = true } dns-sd = { version = "0.1.3", optional = true }
libmdns = { version = "0.2.7", optional = true }
[dependencies.librespot-core]
path = "../core"
version = "0.1.6"
[dependencies.librespot-playback]
path = "../playback"
version = "0.1.6"
[dependencies.librespot-protocol]
path = "../protocol"
version = "0.1.6"
[features] [features]
default = ["libmdns"]
with-dns-sd = ["dns-sd"] with-dns-sd = ["dns-sd"]

View file

@ -1,7 +1,7 @@
use crate::core::spotify_id::SpotifyId;
use crate::protocol::spirc::TrackRef; use crate::protocol::spirc::TrackRef;
use librespot_core::spotify_id::SpotifyId;
use serde; use serde::Deserialize;
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
pub struct StationContext { pub struct StationContext {

View file

@ -1,32 +1,29 @@
use aes_ctr::cipher::generic_array::GenericArray; use aes_ctr::cipher::generic_array::GenericArray;
use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher}; use aes_ctr::cipher::{NewStreamCipher, SyncStreamCipher};
use aes_ctr::Aes128Ctr; use aes_ctr::Aes128Ctr;
use base64; use futures_core::Stream;
use futures::sync::mpsc;
use futures::{Future, Poll, Stream};
use hmac::{Hmac, Mac, NewMac}; use hmac::{Hmac, Mac, NewMac};
use hyper::server::{Http, Request, Response, Service}; use hyper::service::{make_service_fn, service_fn};
use hyper::{self, Get, Post, StatusCode}; use hyper::{Body, Method, Request, Response, StatusCode};
use serde_json::json;
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use tokio::sync::{mpsc, oneshot};
#[cfg(feature = "with-dns-sd")] #[cfg(feature = "with-dns-sd")]
use dns_sd::DNSService; use dns_sd::DNSService;
#[cfg(not(feature = "with-dns-sd"))]
use libmdns;
use num_bigint::BigUint;
use rand;
use std::collections::BTreeMap;
use std::io;
use std::sync::Arc;
use tokio_core::reactor::Handle;
use url;
use librespot_core::authentication::Credentials; use librespot_core::authentication::Credentials;
use librespot_core::config::ConnectConfig; use librespot_core::config::ConnectConfig;
use librespot_core::diffie_hellman::{DH_GENERATOR, DH_PRIME}; use librespot_core::diffie_hellman::DhLocalKeys;
use librespot_core::util;
use std::borrow::Cow;
use std::collections::BTreeMap;
use std::convert::Infallible;
use std::io;
use std::net::{Ipv4Addr, SocketAddr};
use std::pin::Pin;
use std::sync::Arc;
use std::task::{Context, Poll};
type HmacSha1 = Hmac<Sha1>; type HmacSha1 = Hmac<Sha1>;
@ -35,8 +32,7 @@ struct Discovery(Arc<DiscoveryInner>);
struct DiscoveryInner { struct DiscoveryInner {
config: ConnectConfig, config: ConnectConfig,
device_id: String, device_id: String,
private_key: BigUint, keys: DhLocalKeys,
public_key: BigUint,
tx: mpsc::UnboundedSender<Credentials>, tx: mpsc::UnboundedSender<Credentials>,
} }
@ -45,31 +41,20 @@ impl Discovery {
config: ConnectConfig, config: ConnectConfig,
device_id: String, device_id: String,
) -> (Discovery, mpsc::UnboundedReceiver<Credentials>) { ) -> (Discovery, mpsc::UnboundedReceiver<Credentials>) {
let (tx, rx) = mpsc::unbounded(); let (tx, rx) = mpsc::unbounded_channel();
let key_data = util::rand_vec(&mut rand::thread_rng(), 95);
let private_key = BigUint::from_bytes_be(&key_data);
let public_key = util::powm(&DH_GENERATOR, &private_key, &DH_PRIME);
let discovery = Discovery(Arc::new(DiscoveryInner { let discovery = Discovery(Arc::new(DiscoveryInner {
config: config, config,
device_id: device_id, device_id,
private_key: private_key, keys: DhLocalKeys::random(&mut rand::thread_rng()),
public_key: public_key, tx,
tx: tx,
})); }));
(discovery, rx) (discovery, rx)
} }
}
impl Discovery { fn handle_get_info(&self, _: BTreeMap<Cow<'_, str>, Cow<'_, str>>) -> Response<hyper::Body> {
fn handle_get_info( let public_key = base64::encode(&self.0.keys.public_key());
&self,
_params: &BTreeMap<String, String>,
) -> ::futures::Finished<Response, hyper::Error> {
let public_key = self.0.public_key.to_bytes_be();
let public_key = base64::encode(&public_key);
let result = json!({ let result = json!({
"status": 101, "status": 101,
@ -91,29 +76,29 @@ impl Discovery {
}); });
let body = result.to_string(); let body = result.to_string();
::futures::finished(Response::new().with_body(body)) Response::new(Body::from(body))
} }
fn handle_add_user( fn handle_add_user(
&self, &self,
params: &BTreeMap<String, String>, params: BTreeMap<Cow<'_, str>, Cow<'_, str>>,
) -> ::futures::Finished<Response, hyper::Error> { ) -> Response<hyper::Body> {
let username = params.get("userName").unwrap(); let username = params.get("userName").unwrap().as_ref();
let encrypted_blob = params.get("blob").unwrap(); let encrypted_blob = params.get("blob").unwrap();
let client_key = params.get("clientKey").unwrap(); let client_key = params.get("clientKey").unwrap();
let encrypted_blob = base64::decode(encrypted_blob).unwrap(); let encrypted_blob = base64::decode(encrypted_blob.as_bytes()).unwrap();
let client_key = base64::decode(client_key).unwrap(); let shared_key = self
let client_key = BigUint::from_bytes_be(&client_key); .0
.keys
let shared_key = util::powm(&client_key, &self.0.private_key, &DH_PRIME); .shared_secret(&base64::decode(client_key.as_bytes()).unwrap());
let iv = &encrypted_blob[0..16]; let iv = &encrypted_blob[0..16];
let encrypted = &encrypted_blob[16..encrypted_blob.len() - 20]; let encrypted = &encrypted_blob[16..encrypted_blob.len() - 20];
let cksum = &encrypted_blob[encrypted_blob.len() - 20..encrypted_blob.len()]; let cksum = &encrypted_blob[encrypted_blob.len() - 20..encrypted_blob.len()];
let base_key = Sha1::digest(&shared_key.to_bytes_be()); let base_key = Sha1::digest(&shared_key);
let base_key = &base_key[..16]; let base_key = &base_key[..16];
let checksum_key = { let checksum_key = {
@ -130,7 +115,7 @@ impl Discovery {
let mut h = HmacSha1::new_varkey(&checksum_key).expect("HMAC can take key of any size"); let mut h = HmacSha1::new_varkey(&checksum_key).expect("HMAC can take key of any size");
h.update(encrypted); h.update(encrypted);
if let Err(_) = h.verify(cksum) { if h.verify(cksum).is_err() {
warn!("Login error for user {:?}: MAC mismatch", username); warn!("Login error for user {:?}: MAC mismatch", username);
let result = json!({ let result = json!({
"status": 102, "status": 102,
@ -139,7 +124,7 @@ impl Discovery {
}); });
let body = result.to_string(); let body = result.to_string();
return ::futures::finished(Response::new().with_body(body)); return Response::new(Body::from(body));
} }
let decrypted = { let decrypted = {
@ -153,9 +138,9 @@ impl Discovery {
}; };
let credentials = let credentials =
Credentials::with_blob(username.to_owned(), &decrypted, &self.0.device_id); Credentials::with_blob(username.to_string(), &decrypted, &self.0.device_id);
self.0.tx.unbounded_send(credentials).unwrap(); self.0.tx.send(credentials).unwrap();
let result = json!({ let result = json!({
"status": 101, "status": 101,
@ -164,49 +149,39 @@ impl Discovery {
}); });
let body = result.to_string(); let body = result.to_string();
::futures::finished(Response::new().with_body(body)) Response::new(Body::from(body))
} }
fn not_found(&self) -> ::futures::Finished<Response, hyper::Error> { fn not_found(&self) -> Response<hyper::Body> {
::futures::finished(Response::new().with_status(StatusCode::NotFound)) let mut res = Response::default();
*res.status_mut() = StatusCode::NOT_FOUND;
res
} }
}
impl Service for Discovery { async fn call(self, request: Request<Body>) -> hyper::Result<Response<Body>> {
type Request = Request;
type Response = Response;
type Error = hyper::Error;
type Future = Box<dyn Future<Item = Response, Error = hyper::Error>>;
fn call(&self, request: Request) -> Self::Future {
let mut params = BTreeMap::new(); let mut params = BTreeMap::new();
let (method, uri, _, _, body) = request.deconstruct(); let (parts, body) = request.into_parts();
if let Some(query) = uri.query() {
params.extend(url::form_urlencoded::parse(query.as_bytes()).into_owned()); if let Some(query) = parts.uri.query() {
let query_params = url::form_urlencoded::parse(query.as_bytes());
params.extend(query_params);
} }
if method != Get { if parts.method != Method::GET {
debug!("{:?} {:?} {:?}", method, uri.path(), params); debug!("{:?} {:?} {:?}", parts.method, parts.uri.path(), params);
} }
let this = self.clone(); let body = hyper::body::to_bytes(body).await?;
Box::new(
body.fold(Vec::new(), |mut acc, chunk| { params.extend(url::form_urlencoded::parse(&body));
acc.extend_from_slice(chunk.as_ref());
Ok::<_, hyper::Error>(acc) Ok(
}) match (parts.method, params.get("action").map(AsRef::as_ref)) {
.map(move |body| { (Method::GET, Some("getInfo")) => self.handle_get_info(params),
params.extend(url::form_urlencoded::parse(&body).into_owned()); (Method::POST, Some("addUser")) => self.handle_add_user(params),
params _ => self.not_found(),
}) },
.and_then(move |params| {
match (method, params.get("action").map(AsRef::as_ref)) {
(Get, Some("getInfo")) => this.handle_get_info(&params),
(Post, Some("addUser")) => this.handle_add_user(&params),
_ => this.not_found(),
}
}),
) )
} }
} }
@ -215,45 +190,40 @@ impl Service for Discovery {
pub struct DiscoveryStream { pub struct DiscoveryStream {
credentials: mpsc::UnboundedReceiver<Credentials>, credentials: mpsc::UnboundedReceiver<Credentials>,
_svc: DNSService, _svc: DNSService,
_close_tx: oneshot::Sender<Infallible>,
} }
#[cfg(not(feature = "with-dns-sd"))] #[cfg(not(feature = "with-dns-sd"))]
pub struct DiscoveryStream { pub struct DiscoveryStream {
credentials: mpsc::UnboundedReceiver<Credentials>, credentials: mpsc::UnboundedReceiver<Credentials>,
_svc: libmdns::Service, _svc: libmdns::Service,
_close_tx: oneshot::Sender<Infallible>,
} }
pub fn discovery( pub fn discovery(
handle: &Handle,
config: ConnectConfig, config: ConnectConfig,
device_id: String, device_id: String,
port: u16, port: u16,
) -> io::Result<DiscoveryStream> { ) -> io::Result<DiscoveryStream> {
let (discovery, creds_rx) = Discovery::new(config.clone(), device_id); let (discovery, creds_rx) = Discovery::new(config.clone(), device_id);
let (close_tx, close_rx) = oneshot::channel();
let serve = { let address = SocketAddr::new(Ipv4Addr::UNSPECIFIED.into(), port);
let http = Http::new();
http.serve_addr_handle(
&format!("0.0.0.0:{}", port).parse().unwrap(),
&handle,
move || Ok(discovery.clone()),
)
.unwrap()
};
let s_port = serve.incoming_ref().local_addr().port(); let make_service = make_service_fn(move |_| {
let discovery = discovery.clone();
async move { Ok::<_, hyper::Error>(service_fn(move |request| discovery.clone().call(request))) }
});
let server = hyper::Server::bind(&address).serve(make_service);
let s_port = server.local_addr().port();
debug!("Zeroconf server listening on 0.0.0.0:{}", s_port); debug!("Zeroconf server listening on 0.0.0.0:{}", s_port);
let server_future = { tokio::spawn(server.with_graceful_shutdown(async {
let handle = handle.clone(); close_rx.await.unwrap_err();
serve debug!("Shutting down discovery server");
.for_each(move |connection| { }));
handle.spawn(connection.then(|_| Ok(())));
Ok(())
})
.then(|_| Ok(()))
};
handle.spawn(server_future);
#[cfg(feature = "with-dns-sd")] #[cfg(feature = "with-dns-sd")]
let svc = DNSService::register( let svc = DNSService::register(
@ -267,7 +237,7 @@ pub fn discovery(
.unwrap(); .unwrap();
#[cfg(not(feature = "with-dns-sd"))] #[cfg(not(feature = "with-dns-sd"))]
let responder = libmdns::Responder::spawn(&handle)?; let responder = libmdns::Responder::spawn(&tokio::runtime::Handle::current())?;
#[cfg(not(feature = "with-dns-sd"))] #[cfg(not(feature = "with-dns-sd"))]
let svc = responder.register( let svc = responder.register(
@ -280,14 +250,14 @@ pub fn discovery(
Ok(DiscoveryStream { Ok(DiscoveryStream {
credentials: creds_rx, credentials: creds_rx,
_svc: svc, _svc: svc,
_close_tx: close_tx,
}) })
} }
impl Stream for DiscoveryStream { impl Stream for DiscoveryStream {
type Item = Credentials; type Item = Credentials;
type Error = ();
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
self.credentials.poll() self.credentials.poll_recv(cx)
} }
} }

View file

@ -1,34 +1,9 @@
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate serde_derive;
extern crate serde;
extern crate base64; use librespot_core as core;
extern crate futures; use librespot_playback as playback;
extern crate hyper; use librespot_protocol as protocol;
extern crate num_bigint;
extern crate protobuf;
extern crate rand;
extern crate tokio_core;
extern crate url;
extern crate aes_ctr;
extern crate block_modes;
extern crate hmac;
extern crate sha1;
#[cfg(feature = "with-dns-sd")]
extern crate dns_sd;
#[cfg(not(feature = "with-dns-sd"))]
extern crate libmdns;
extern crate librespot_core;
extern crate librespot_playback as playback;
extern crate librespot_protocol as protocol;
pub mod context; pub mod context;
pub mod discovery; pub mod discovery;

View file

@ -1,26 +1,26 @@
use std; use std::future::Future;
use std::pin::Pin;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use futures::future;
use futures::sync::mpsc;
use futures::{Async, Future, Poll, Sink, Stream};
use protobuf::{self, Message};
use rand;
use rand::seq::SliceRandom;
use serde_json;
use crate::context::StationContext; use crate::context::StationContext;
use crate::core::config::{ConnectConfig, VolumeCtrl};
use crate::core::mercury::{MercuryError, MercurySender};
use crate::core::session::Session;
use crate::core::spotify_id::{SpotifyAudioType, SpotifyId, SpotifyIdError};
use crate::core::util::SeqGenerator;
use crate::core::version;
use crate::playback::mixer::Mixer; use crate::playback::mixer::Mixer;
use crate::playback::player::{Player, PlayerEvent, PlayerEventChannel}; use crate::playback::player::{Player, PlayerEvent, PlayerEventChannel};
use crate::protocol; use crate::protocol;
use crate::protocol::spirc::{DeviceState, Frame, MessageType, PlayStatus, State, TrackRef}; use crate::protocol::spirc::{DeviceState, Frame, MessageType, PlayStatus, State, TrackRef};
use librespot_core::config::{ConnectConfig, VolumeCtrl};
use librespot_core::mercury::MercuryError; use futures_util::future::{self, FusedFuture};
use librespot_core::session::Session; use futures_util::stream::FusedStream;
use librespot_core::spotify_id::{SpotifyAudioType, SpotifyId, SpotifyIdError}; use futures_util::{FutureExt, StreamExt};
use librespot_core::util::url_encode; use protobuf::{self, Message};
use librespot_core::util::SeqGenerator; use rand::seq::SliceRandom;
use librespot_core::version; use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
enum SpircPlayStatus { enum SpircPlayStatus {
Stopped, Stopped,
@ -40,7 +40,10 @@ enum SpircPlayStatus {
}, },
} }
pub struct SpircTask { type BoxedFuture<T> = Pin<Box<dyn FusedFuture<Output = T> + Send>>;
type BoxedStream<T> = Pin<Box<dyn FusedStream<Item = T> + Send>>;
struct SpircTask {
player: Player, player: Player,
mixer: Box<dyn Mixer>, mixer: Box<dyn Mixer>,
config: SpircTaskConfig, config: SpircTaskConfig,
@ -54,15 +57,15 @@ pub struct SpircTask {
mixer_started: bool, mixer_started: bool,
play_status: SpircPlayStatus, play_status: SpircPlayStatus,
subscription: Box<dyn Stream<Item = Frame, Error = MercuryError>>, subscription: BoxedStream<Frame>,
sender: Box<dyn Sink<SinkItem = Frame, SinkError = MercuryError>>, sender: MercurySender,
commands: mpsc::UnboundedReceiver<SpircCommand>, commands: Option<mpsc::UnboundedReceiver<SpircCommand>>,
player_events: PlayerEventChannel, player_events: Option<PlayerEventChannel>,
shutdown: bool, shutdown: bool,
session: Session, session: Session,
context_fut: Box<dyn Future<Item = serde_json::Value, Error = MercuryError>>, context_fut: BoxedFuture<Result<serde_json::Value, MercuryError>>,
autoplay_fut: Box<dyn Future<Item = String, Error = MercuryError>>, autoplay_fut: BoxedFuture<Result<String, MercuryError>>,
context: Option<StationContext>, context: Option<StationContext>,
} }
@ -240,38 +243,41 @@ fn volume_to_mixer(volume: u16, volume_ctrl: &VolumeCtrl) -> u16 {
} }
} }
fn url_encode(bytes: impl AsRef<[u8]>) -> String {
form_urlencoded::byte_serialize(bytes.as_ref()).collect()
}
impl Spirc { impl Spirc {
pub fn new( pub fn new(
config: ConnectConfig, config: ConnectConfig,
session: Session, session: Session,
player: Player, player: Player,
mixer: Box<dyn Mixer>, mixer: Box<dyn Mixer>,
) -> (Spirc, SpircTask) { ) -> (Spirc, impl Future<Output = ()>) {
debug!("new Spirc[{}]", session.session_id()); debug!("new Spirc[{}]", session.session_id());
let ident = session.device_id().to_owned(); let ident = session.device_id().to_owned();
// Uri updated in response to issue #288 // Uri updated in response to issue #288
debug!("canonical_username: {}", url_encode(&session.username())); debug!("canonical_username: {}", &session.username());
let uri = format!("hm://remote/user/{}/", url_encode(&session.username())); let uri = format!("hm://remote/user/{}/", url_encode(&session.username()));
let subscription = session.mercury().subscribe(&uri as &str); let subscription = Box::pin(
let subscription = subscription
.map(|stream| stream.map_err(|_| MercuryError))
.flatten_stream();
let subscription = Box::new(subscription.map(|response| -> Frame {
let data = response.payload.first().unwrap();
protobuf::parse_from_bytes(data).unwrap()
}));
let sender = Box::new(
session session
.mercury() .mercury()
.sender(uri) .subscribe(uri.clone())
.with(|frame: Frame| Ok(frame.write_to_bytes().unwrap())), .map(Result::unwrap)
.map(UnboundedReceiverStream::new)
.flatten_stream()
.map(|response| -> Frame {
let data = response.payload.first().unwrap();
protobuf::parse_from_bytes(data).unwrap()
}),
); );
let (cmd_tx, cmd_rx) = mpsc::unbounded(); let sender = session.mercury().sender(uri);
let (cmd_tx, cmd_rx) = mpsc::unbounded_channel();
let volume = config.volume; let volume = config.volume;
let task_config = SpircTaskConfig { let task_config = SpircTaskConfig {
@ -284,30 +290,30 @@ impl Spirc {
let player_events = player.get_player_event_channel(); let player_events = player.get_player_event_channel();
let mut task = SpircTask { let mut task = SpircTask {
player: player, player,
mixer: mixer, mixer,
config: task_config, config: task_config,
sequence: SeqGenerator::new(1), sequence: SeqGenerator::new(1),
ident: ident, ident,
device: device, device,
state: initial_state(), state: initial_state(),
play_request_id: None, play_request_id: None,
mixer_started: false, mixer_started: false,
play_status: SpircPlayStatus::Stopped, play_status: SpircPlayStatus::Stopped,
subscription: subscription, subscription,
sender: sender, sender,
commands: cmd_rx, commands: Some(cmd_rx),
player_events: player_events, player_events: Some(player_events),
shutdown: false, shutdown: false,
session: session.clone(), session,
context_fut: Box::new(future::empty()), context_fut: Box::pin(future::pending()),
autoplay_fut: Box::new(future::empty()), autoplay_fut: Box::pin(future::pending()),
context: None, context: None,
}; };
@ -317,83 +323,62 @@ impl Spirc {
task.hello(); task.hello();
(spirc, task) (spirc, task.run())
} }
pub fn play(&self) { pub fn play(&self) {
let _ = self.commands.unbounded_send(SpircCommand::Play); let _ = self.commands.send(SpircCommand::Play);
} }
pub fn play_pause(&self) { pub fn play_pause(&self) {
let _ = self.commands.unbounded_send(SpircCommand::PlayPause); let _ = self.commands.send(SpircCommand::PlayPause);
} }
pub fn pause(&self) { pub fn pause(&self) {
let _ = self.commands.unbounded_send(SpircCommand::Pause); let _ = self.commands.send(SpircCommand::Pause);
} }
pub fn prev(&self) { pub fn prev(&self) {
let _ = self.commands.unbounded_send(SpircCommand::Prev); let _ = self.commands.send(SpircCommand::Prev);
} }
pub fn next(&self) { pub fn next(&self) {
let _ = self.commands.unbounded_send(SpircCommand::Next); let _ = self.commands.send(SpircCommand::Next);
} }
pub fn volume_up(&self) { pub fn volume_up(&self) {
let _ = self.commands.unbounded_send(SpircCommand::VolumeUp); let _ = self.commands.send(SpircCommand::VolumeUp);
} }
pub fn volume_down(&self) { pub fn volume_down(&self) {
let _ = self.commands.unbounded_send(SpircCommand::VolumeDown); let _ = self.commands.send(SpircCommand::VolumeDown);
} }
pub fn shutdown(&self) { pub fn shutdown(&self) {
let _ = self.commands.unbounded_send(SpircCommand::Shutdown); let _ = self.commands.send(SpircCommand::Shutdown);
} }
} }
impl Future for SpircTask { impl SpircTask {
type Item = (); async fn run(mut self) {
type Error = (); while !self.session.is_invalid() && !self.shutdown {
let commands = self.commands.as_mut();
fn poll(&mut self) -> Poll<(), ()> { let player_events = self.player_events.as_mut();
loop { tokio::select! {
let mut progress = false; frame = self.subscription.next() => match frame {
Some(frame) => self.handle_frame(frame),
if self.session.is_invalid() { None => {
return Ok(Async::Ready(()));
}
if !self.shutdown {
match self.subscription.poll().unwrap() {
Async::Ready(Some(frame)) => {
progress = true;
self.handle_frame(frame);
}
Async::Ready(None) => {
error!("subscription terminated"); error!("subscription terminated");
self.shutdown = true; break;
self.commands.close();
} }
Async::NotReady => (), },
} cmd = async { commands.unwrap().recv().await }, if commands.is_some() => if let Some(cmd) = cmd {
self.handle_command(cmd);
match self.commands.poll().unwrap() { },
Async::Ready(Some(command)) => { event = async { player_events.unwrap().recv().await }, if player_events.is_some() => if let Some(event) = event {
progress = true; self.handle_player_event(event)
self.handle_command(command); },
} result = self.sender.flush(), if !self.sender.is_flushed() => if result.is_err() {
Async::Ready(None) => (), error!("Cannot flush spirc event sender.");
Async::NotReady => (), break;
} },
context = &mut self.context_fut, if !self.context_fut.is_terminated() => {
match self.player_events.poll() { match context {
Ok(Async::NotReady) => (), Ok(value) => {
Ok(Async::Ready(None)) => (), let r_context = serde_json::from_value::<StationContext>(value);
Err(_) => (),
Ok(Async::Ready(Some(event))) => {
progress = true;
self.handle_player_event(event);
}
}
// TODO: Refactor
match self.context_fut.poll() {
Ok(Async::Ready(value)) => {
let r_context = serde_json::from_value::<StationContext>(value.clone());
self.context = match r_context { self.context = match r_context {
Ok(context) => { Ok(context) => {
info!( info!(
@ -404,7 +389,7 @@ impl Future for SpircTask {
Some(context) Some(context)
} }
Err(e) => { Err(e) => {
error!("Unable to parse JSONContext {:?}\n{:?}", e, value); error!("Unable to parse JSONContext {:?}", e);
None None
} }
}; };
@ -413,54 +398,39 @@ impl Future for SpircTask {
// info!("Got {:?} tracks from <{}>", context.tracks.len(), context.uri); // info!("Got {:?} tracks from <{}>", context.tracks.len(), context.uri);
// } // }
// self.context = r_context; // self.context = r_context;
},
progress = true;
self.context_fut = Box::new(future::empty());
}
Ok(Async::NotReady) => (),
Err(err) => { Err(err) => {
self.context_fut = Box::new(future::empty());
error!("ContextError: {:?}", err) error!("ContextError: {:?}", err)
} }
} }
},
match self.autoplay_fut.poll() { autoplay = &mut self.autoplay_fut, if !self.autoplay_fut.is_terminated() => {
Ok(Async::Ready(autoplay_station_uri)) => { match autoplay {
Ok(autoplay_station_uri) => {
info!("Autoplay uri resolved to <{:?}>", autoplay_station_uri); info!("Autoplay uri resolved to <{:?}>", autoplay_station_uri);
self.context_fut = self.resolve_station(&autoplay_station_uri); self.context_fut = self.resolve_station(&autoplay_station_uri);
progress = true; },
self.autoplay_fut = Box::new(future::empty());
}
Ok(Async::NotReady) => (),
Err(err) => { Err(err) => {
self.autoplay_fut = Box::new(future::empty());
error!("AutoplayError: {:?}", err) error!("AutoplayError: {:?}", err)
} }
} }
},
else => break
}
} }
let poll_sender = self.sender.poll_complete().unwrap(); if self.sender.flush().await.is_err() {
warn!("Cannot flush spirc event sender.");
// Only shutdown once we've flushed out all our messages }
if self.shutdown && poll_sender.is_ready() {
return Ok(Async::Ready(()));
} }
if !progress {
return Ok(Async::NotReady);
}
}
}
}
impl SpircTask {
fn now_ms(&mut self) -> i64 { fn now_ms(&mut self) -> i64 {
let dur = match SystemTime::now().duration_since(UNIX_EPOCH) { let dur = match SystemTime::now().duration_since(UNIX_EPOCH) {
Ok(dur) => dur, Ok(dur) => dur,
Err(err) => err.duration(), Err(err) => err.duration(),
}; };
(dur.as_secs() as i64 + self.session.time_delta()) * 1000
+ (dur.subsec_nanos() / 1000_000) as i64 dur.as_millis() as i64 + 1000 * self.session.time_delta()
} }
fn ensure_mixer_started(&mut self) { fn ensure_mixer_started(&mut self) {
@ -545,7 +515,9 @@ impl SpircTask {
SpircCommand::Shutdown => { SpircCommand::Shutdown => {
CommandSender::new(self, MessageType::kMessageTypeGoodbye).send(); CommandSender::new(self, MessageType::kMessageTypeGoodbye).send();
self.shutdown = true; self.shutdown = true;
self.commands.close(); if let Some(rx) = self.commands.as_mut() {
rx.close()
}
} }
} }
} }
@ -653,7 +625,7 @@ impl SpircTask {
); );
if frame.get_ident() == self.ident if frame.get_ident() == self.ident
|| (frame.get_recipient().len() > 0 && !frame.get_recipient().contains(&self.ident)) || (!frame.get_recipient().is_empty() && !frame.get_recipient().contains(&self.ident))
{ {
return; return;
} }
@ -672,7 +644,7 @@ impl SpircTask {
self.update_tracks(&frame); self.update_tracks(&frame);
if self.state.get_track().len() > 0 { if !self.state.get_track().is_empty() {
let start_playing = let start_playing =
frame.get_state().get_status() == PlayStatus::kPlayStatusPlay; frame.get_state().get_status() == PlayStatus::kPlayStatusPlay;
self.load_track(start_playing, frame.get_state().get_position_ms()); self.load_track(start_playing, frame.get_state().get_position_ms());
@ -895,7 +867,7 @@ impl SpircTask {
fn preview_next_track(&mut self) -> Option<SpotifyId> { fn preview_next_track(&mut self) -> Option<SpotifyId> {
self.get_track_id_to_play_from_playlist(self.state.get_playing_track_index() + 1) self.get_track_id_to_play_from_playlist(self.state.get_playing_track_index() + 1)
.and_then(|(track_id, _)| Some(track_id)) .map(|(track_id, _)| track_id)
} }
fn handle_preload_next_track(&mut self) { fn handle_preload_next_track(&mut self) {
@ -1014,7 +986,7 @@ impl SpircTask {
}; };
// Reinsert queued tracks after the new playing track. // Reinsert queued tracks after the new playing track.
let mut pos = (new_index + 1) as usize; let mut pos = (new_index + 1) as usize;
for track in queue_tracks.into_iter() { for track in queue_tracks {
self.state.mut_track().insert(pos, track); self.state.mut_track().insert(pos, track);
pos += 1; pos += 1;
} }
@ -1060,22 +1032,19 @@ impl SpircTask {
} }
} }
fn resolve_station( fn resolve_station(&self, uri: &str) -> BoxedFuture<Result<serde_json::Value, MercuryError>> {
&self,
uri: &str,
) -> Box<dyn Future<Item = serde_json::Value, Error = MercuryError>> {
let radio_uri = format!("hm://radio-apollo/v3/stations/{}", uri); let radio_uri = format!("hm://radio-apollo/v3/stations/{}", uri);
self.resolve_uri(&radio_uri) self.resolve_uri(&radio_uri)
} }
fn resolve_autoplay_uri( fn resolve_autoplay_uri(&self, uri: &str) -> BoxedFuture<Result<String, MercuryError>> {
&self,
uri: &str,
) -> Box<dyn Future<Item = String, Error = MercuryError>> {
let query_uri = format!("hm://autoplay-enabled/query?uri={}", uri); let query_uri = format!("hm://autoplay-enabled/query?uri={}", uri);
let request = self.session.mercury().get(query_uri); let request = self.session.mercury().get(query_uri);
Box::new(request.and_then(move |response| { Box::pin(
async {
let response = request.await?;
if response.status_code == 200 { if response.status_code == 200 {
let data = response let data = response
.payload .payload
@ -1088,16 +1057,18 @@ impl SpircTask {
warn!("No autoplay_uri found"); warn!("No autoplay_uri found");
Err(MercuryError) Err(MercuryError)
} }
})) }
.fuse(),
)
} }
fn resolve_uri( fn resolve_uri(&self, uri: &str) -> BoxedFuture<Result<serde_json::Value, MercuryError>> {
&self,
uri: &str,
) -> Box<dyn Future<Item = serde_json::Value, Error = MercuryError>> {
let request = self.session.mercury().get(uri); let request = self.session.mercury().get(uri);
Box::new(request.and_then(move |response| { Box::pin(
async move {
let response = request.await?;
let data = response let data = response
.payload .payload
.first() .first()
@ -1105,7 +1076,9 @@ impl SpircTask {
let response: serde_json::Value = serde_json::from_slice(&data).unwrap(); let response: serde_json::Value = serde_json::from_slice(&data).unwrap();
Ok(response) Ok(response)
})) }
.fuse(),
)
} }
fn update_tracks_from_context(&mut self) { fn update_tracks_from_context(&mut self) {
@ -1152,7 +1125,7 @@ impl SpircTask {
} }
self.state.set_playing_track_index(index); self.state.set_playing_track_index(index);
self.state.set_track(tracks.into_iter().cloned().collect()); self.state.set_track(tracks.iter().cloned().collect());
self.state.set_context_uri(context_uri); self.state.set_context_uri(context_uri);
// has_shuffle/repeat seem to always be true in these replace msgs, // has_shuffle/repeat seem to always be true in these replace msgs,
// but to replicate the behaviour of the Android client we have to // but to replicate the behaviour of the Android client we have to
@ -1323,10 +1296,7 @@ impl<'a> CommandSender<'a> {
frame.set_typ(cmd); frame.set_typ(cmd);
frame.set_device_state(spirc.device.clone()); frame.set_device_state(spirc.device.clone());
frame.set_state_update_id(spirc.now_ms()); frame.set_state_update_id(spirc.now_ms());
CommandSender { CommandSender { spirc, frame }
spirc: spirc,
frame: frame,
}
} }
fn recipient(mut self, recipient: &'a str) -> CommandSender { fn recipient(mut self, recipient: &'a str) -> CommandSender {
@ -1345,7 +1315,6 @@ impl<'a> CommandSender<'a> {
self.frame.set_state(self.spirc.state.clone()); self.frame.set_state(self.spirc.state.clone());
} }
let send = self.spirc.sender.start_send(self.frame).unwrap(); self.spirc.sender.send(self.frame.write_to_bytes().unwrap());
assert!(send.is_ready());
} }
} }

View file

@ -15,33 +15,42 @@ version = "0.1.6"
[dependencies] [dependencies]
aes = "0.6" aes = "0.6"
base64 = "0.13" base64 = "0.13"
byteorder = "1.3" byteorder = "1.4"
bytes = "0.4" bytes = "1.0"
error-chain = { version = "0.12", default_features = false } form_urlencoded = "1.0"
futures = "0.1" futures-core = { version = "0.3", default-features = false }
futures-util = { version = "0.3", default-features = false, features = ["alloc", "bilock", "unstable", "sink"] }
hmac = "0.10" hmac = "0.10"
httparse = "1.3" httparse = "1.3"
hyper = "0.11" http = "0.2"
hyper-proxy = { version = "0.4", default_features = false } hyper = { version = "0.14", optional = true, features = ["client", "tcp", "http1"] }
lazy_static = "1.3" hyper-proxy = { version = "0.9.1", optional = true, default-features = false }
log = "0.4" log = "0.4"
num-bigint = "0.3" num-bigint = { version = "0.4", features = ["rand"] }
num-integer = "0.1" num-integer = "0.1"
num-traits = "0.2" num-traits = "0.2"
pbkdf2 = { version = "0.7", default_features = false, features = ["hmac"] } once_cell = "1.5.2"
pbkdf2 = { version = "0.7", default-features = false, features = ["hmac"] }
protobuf = "~2.14.0" protobuf = "~2.14.0"
rand = "0.7" rand = "0.8"
serde = "1.0" serde = { version = "1.0", features = ["derive"] }
serde_derive = "1.0"
serde_json = "1.0" serde_json = "1.0"
sha-1 = "0.9" sha-1 = "0.9"
shannon = "0.2.0" shannon = "0.2.0"
tokio-codec = "0.1" thiserror = "1"
tokio-core = "0.1" tokio = { version = "1.0", features = ["io-util", "net", "rt", "sync"] }
tokio-io = "0.1" tokio-stream = "0.1"
url = "1.7" tokio-util = { version = "0.6", features = ["codec"] }
uuid = { version = "0.8", features = ["v4"] } url = "2.1"
uuid = { version = "0.8", default-features = false, features = ["v4"] }
[build-dependencies] [build-dependencies]
rand = "0.7" rand = "0.8"
vergen = "3.0.4" vergen = "3.0.4"
[dev-dependencies]
env_logger = "*"
tokio = {version = "1.0", features = ["macros"] }
[features]
apresolve = ["hyper", "hyper-proxy"]

View file

@ -1,6 +1,3 @@
extern crate rand;
extern crate vergen;
use rand::distributions::Alphanumeric; use rand::distributions::Alphanumeric;
use rand::Rng; use rand::Rng;
use vergen::{generate_cargo_keys, ConstantsFlags}; use vergen::{generate_cargo_keys, ConstantsFlags};
@ -10,10 +7,10 @@ fn main() {
flags.toggle(ConstantsFlags::REBUILD_ON_HEAD_CHANGE); flags.toggle(ConstantsFlags::REBUILD_ON_HEAD_CHANGE);
generate_cargo_keys(ConstantsFlags::all()).expect("Unable to generate the cargo keys!"); generate_cargo_keys(ConstantsFlags::all()).expect("Unable to generate the cargo keys!");
let mut rng = rand::thread_rng(); let build_id: String = rand::thread_rng()
let build_id: String = ::std::iter::repeat(()) .sample_iter(Alphanumeric)
.map(|()| rng.sample(Alphanumeric))
.take(8) .take(8)
.map(char::from)
.collect(); .collect();
println!("cargo:rustc-env=LIBRESPOT_BUILD_ID={}", build_id); println!("cargo:rustc-env=LIBRESPOT_BUILD_ID={}", build_id);

View file

@ -1,101 +1,91 @@
const AP_FALLBACK: &'static str = "ap.spotify.com:443"; use std::error::Error;
const APRESOLVE_ENDPOINT: &'static str = "http://apresolve.spotify.com/";
use futures::{Future, Stream};
use hyper::client::HttpConnector; use hyper::client::HttpConnector;
use hyper::{self, Client, Method, Request, Uri}; use hyper::{Body, Client, Method, Request, Uri};
use hyper_proxy::{Intercept, Proxy, ProxyConnector}; use hyper_proxy::{Intercept, Proxy, ProxyConnector};
use serde_json; use serde::Deserialize;
use std::str::FromStr;
use tokio_core::reactor::Handle;
use url::Url; use url::Url;
error_chain! {} use super::AP_FALLBACK;
#[derive(Clone, Debug, Serialize, Deserialize)] const APRESOLVE_ENDPOINT: &str = "http://apresolve.spotify.com:80";
pub struct APResolveData {
#[derive(Clone, Debug, Deserialize)]
struct ApResolveData {
ap_list: Vec<String>, ap_list: Vec<String>,
} }
fn apresolve( async fn try_apresolve(
handle: &Handle, proxy: Option<&Url>,
proxy: &Option<Url>, ap_port: Option<u16>,
ap_port: &Option<u16>, ) -> Result<String, Box<dyn Error>> {
) -> Box<dyn Future<Item = String, Error = Error>> { let port = ap_port.unwrap_or(443);
let url = Uri::from_str(APRESOLVE_ENDPOINT).expect("invalid AP resolve URL");
let use_proxy = proxy.is_some();
let mut req = Request::new(Method::Get, url.clone()); let mut req = Request::new(Body::empty());
let response = match *proxy { *req.method_mut() = Method::GET;
Some(ref val) => { // panic safety: APRESOLVE_ENDPOINT above is valid url.
let proxy_url = Uri::from_str(val.as_str()).expect("invalid http proxy"); *req.uri_mut() = APRESOLVE_ENDPOINT.parse().expect("invalid AP resolve URL");
let proxy = Proxy::new(Intercept::All, proxy_url);
let connector = HttpConnector::new(4, handle); let response = if let Some(url) = proxy {
// Panic safety: all URLs are valid URIs
let uri = url.to_string().parse().unwrap();
let proxy = Proxy::new(Intercept::All, uri);
let connector = HttpConnector::new();
let proxy_connector = ProxyConnector::from_proxy_unsecured(connector, proxy); let proxy_connector = ProxyConnector::from_proxy_unsecured(connector, proxy);
if let Some(headers) = proxy_connector.http_headers(&url) { Client::builder()
req.headers_mut().extend(headers.iter()); .build(proxy_connector)
req.set_proxy(true); .request(req)
} .await?
let client = Client::configure().connector(proxy_connector).build(handle); } else {
client.request(req) Client::new().request(req).await?
}
_ => {
let client = Client::new(handle);
client.request(req)
}
}; };
let body = response.and_then(|response| { let body = hyper::body::to_bytes(response.into_body()).await?;
response.body().fold(Vec::new(), |mut acc, chunk| { let data: ApResolveData = serde_json::from_slice(body.as_ref())?;
acc.extend_from_slice(chunk.as_ref());
Ok::<_, hyper::Error>(acc)
})
});
let body = body.then(|result| result.chain_err(|| "HTTP error"));
let body =
body.and_then(|body| String::from_utf8(body).chain_err(|| "invalid UTF8 in response"));
let data = body let ap = if ap_port.is_some() || proxy.is_some() {
.and_then(|body| serde_json::from_str::<APResolveData>(&body).chain_err(|| "invalid JSON")); data.ap_list.into_iter().find_map(|ap| {
if ap.parse::<Uri>().ok()?.port()? == port {
let p = ap_port.clone(); Some(ap)
let ap = data.and_then(move |data| {
let mut aps = data.ap_list.iter().filter(|ap| {
if p.is_some() {
Uri::from_str(ap).ok().map_or(false, |uri| {
uri.port().map_or(false, |port| port == p.unwrap())
})
} else if use_proxy {
// It is unlikely that the proxy will accept CONNECT on anything other than 443.
Uri::from_str(ap)
.ok()
.map_or(false, |uri| uri.port().map_or(false, |port| port == 443))
} else { } else {
true None
} }
}); })
} else {
data.ap_list.into_iter().next()
}
.ok_or("empty AP List")?;
let ap = aps.next().ok_or("empty AP List")?; Ok(ap)
Ok(ap.clone())
});
Box::new(ap)
} }
pub(crate) fn apresolve_or_fallback<E>( pub async fn apresolve(proxy: Option<&Url>, ap_port: Option<u16>) -> String {
handle: &Handle, try_apresolve(proxy, ap_port).await.unwrap_or_else(|e| {
proxy: &Option<Url>, warn!("Failed to resolve Access Point: {}", e);
ap_port: &Option<u16>,
) -> Box<dyn Future<Item = String, Error = E>>
where
E: 'static,
{
let ap = apresolve(handle, proxy, ap_port).or_else(|e| {
warn!("Failed to resolve Access Point: {}", e.description());
warn!("Using fallback \"{}\"", AP_FALLBACK); warn!("Using fallback \"{}\"", AP_FALLBACK);
Ok(AP_FALLBACK.into()) AP_FALLBACK.into()
}); })
}
Box::new(ap)
#[cfg(test)]
mod test {
use std::net::ToSocketAddrs;
use super::try_apresolve;
#[tokio::test]
async fn test_apresolve() {
let ap = try_apresolve(None, None).await.unwrap();
// Assert that the result contains a valid host and port
ap.to_socket_addrs().unwrap().next().unwrap();
}
#[tokio::test]
async fn test_apresolve_port_443() {
let ap = try_apresolve(None, Some(443)).await.unwrap();
let port = ap.to_socket_addrs().unwrap().next().unwrap().port();
assert_eq!(port, 443);
}
} }

View file

@ -1,9 +1,8 @@
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use bytes::Bytes; use bytes::Bytes;
use futures::sync::oneshot;
use futures::{Async, Future, Poll};
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write; use std::io::Write;
use tokio::sync::oneshot;
use crate::spotify_id::{FileId, SpotifyId}; use crate::spotify_id::{FileId, SpotifyId};
use crate::util::SeqGenerator; use crate::util::SeqGenerator;
@ -47,7 +46,7 @@ impl AudioKeyManager {
} }
} }
pub fn request(&self, track: SpotifyId, file: FileId) -> AudioKeyFuture<AudioKey> { pub async fn request(&self, track: SpotifyId, file: FileId) -> Result<AudioKey, AudioKeyError> {
let (tx, rx) = oneshot::channel(); let (tx, rx) = oneshot::channel();
let seq = self.lock(move |inner| { let seq = self.lock(move |inner| {
@ -57,7 +56,7 @@ impl AudioKeyManager {
}); });
self.send_key_request(seq, track, file); self.send_key_request(seq, track, file);
AudioKeyFuture(rx) rx.await.map_err(|_| AudioKeyError)?
} }
fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) { fn send_key_request(&self, seq: u32, track: SpotifyId, file: FileId) {
@ -70,18 +69,3 @@ impl AudioKeyManager {
self.session().send_packet(0xc, data) self.session().send_packet(0xc, data)
} }
} }
pub struct AudioKeyFuture<T>(oneshot::Receiver<Result<T, AudioKeyError>>);
impl<T> Future for AudioKeyFuture<T> {
type Item = T;
type Error = AudioKeyError;
fn poll(&mut self) -> Poll<T, AudioKeyError> {
match self.0.poll() {
Ok(Async::Ready(Ok(value))) => Ok(Async::Ready(value)),
Ok(Async::Ready(Err(err))) => Err(err),
Ok(Async::NotReady) => Ok(Async::NotReady),
Err(oneshot::Canceled) => Err(AudioKeyError),
}
}
}

View file

@ -1,14 +1,16 @@
use std::io::{self, Read};
use aes::Aes192; use aes::Aes192;
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use hmac::Hmac; use hmac::Hmac;
use pbkdf2::pbkdf2; use pbkdf2::pbkdf2;
use protobuf::ProtobufEnum; use protobuf::ProtobufEnum;
use serde::{Deserialize, Serialize};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use std::io::{self, Read};
use crate::protocol::authentication::AuthenticationType; use crate::protocol::authentication::AuthenticationType;
use crate::protocol::keyexchange::{APLoginFailed, ErrorCode};
/// The credentials are used to log into the Spotify API.
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Credentials { pub struct Credentials {
pub username: String, pub username: String,
@ -24,11 +26,19 @@ pub struct Credentials {
} }
impl Credentials { impl Credentials {
pub fn with_password(username: String, password: String) -> Credentials { /// Intialize these credentials from a username and a password.
///
/// ### Example
/// ```rust
/// use librespot_core::authentication::Credentials;
///
/// let creds = Credentials::with_password("my account", "my password");
/// ```
pub fn with_password(username: impl Into<String>, password: impl Into<String>) -> Credentials {
Credentials { Credentials {
username: username, username: username.into(),
auth_type: AuthenticationType::AUTHENTICATION_USER_PASS, auth_type: AuthenticationType::AUTHENTICATION_USER_PASS,
auth_data: password.into_bytes(), auth_data: password.into().into_bytes(),
} }
} }
@ -102,9 +112,9 @@ impl Credentials {
let auth_data = read_bytes(&mut cursor).unwrap(); let auth_data = read_bytes(&mut cursor).unwrap();
Credentials { Credentials {
username: username, username,
auth_type: auth_type, auth_type,
auth_data: auth_data, auth_data,
} }
} }
} }
@ -141,61 +151,3 @@ where
let v: String = serde::Deserialize::deserialize(de)?; let v: String = serde::Deserialize::deserialize(de)?;
base64::decode(&v).map_err(|e| serde::de::Error::custom(e.to_string())) base64::decode(&v).map_err(|e| serde::de::Error::custom(e.to_string()))
} }
pub fn get_credentials<F: FnOnce(&String) -> String>(
username: Option<String>,
password: Option<String>,
cached_credentials: Option<Credentials>,
prompt: F,
) -> Option<Credentials> {
match (username, password, cached_credentials) {
(Some(username), Some(password), _) => Some(Credentials::with_password(username, password)),
(Some(ref username), _, Some(ref credentials)) if *username == credentials.username => {
Some(credentials.clone())
}
(Some(username), None, _) => Some(Credentials::with_password(
username.clone(),
prompt(&username),
)),
(None, _, Some(credentials)) => Some(credentials),
(None, _, None) => None,
}
}
error_chain! {
types {
AuthenticationError, AuthenticationErrorKind, AuthenticationResultExt, AuthenticationResult;
}
foreign_links {
Io(::std::io::Error);
}
errors {
BadCredentials {
description("Bad credentials")
display("Authentication failed with error: Bad credentials")
}
PremiumAccountRequired {
description("Premium account required")
display("Authentication failed with error: Premium account required")
}
}
}
impl From<APLoginFailed> for AuthenticationError {
fn from(login_failure: APLoginFailed) -> Self {
let error_code = login_failure.get_error_code();
match error_code {
ErrorCode::BadCredentials => Self::from_kind(AuthenticationErrorKind::BadCredentials),
ErrorCode::PremiumAccountRequired => {
Self::from_kind(AuthenticationErrorKind::PremiumAccountRequired)
}
_ => format!("Authentication failed with error: {:?}", error_code).into(),
}
}
}

View file

@ -1,9 +1,14 @@
use std::collections::HashMap;
use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::Instant;
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
use futures::sync::{mpsc, BiLock}; use futures_core::Stream;
use futures::{Async, Poll, Stream}; use futures_util::lock::BiLock;
use std::collections::HashMap; use futures_util::StreamExt;
use std::time::Instant; use tokio::sync::mpsc;
use crate::util::SeqGenerator; use crate::util::SeqGenerator;
@ -43,7 +48,7 @@ enum ChannelState {
impl ChannelManager { impl ChannelManager {
pub fn allocate(&self) -> (u16, Channel) { pub fn allocate(&self) -> (u16, Channel) {
let (tx, rx) = mpsc::unbounded(); let (tx, rx) = mpsc::unbounded_channel();
let seq = self.lock(|inner| { let seq = self.lock(|inner| {
let seq = inner.sequence.get(); let seq = inner.sequence.get();
@ -82,13 +87,13 @@ impl ChannelManager {
inner.download_measurement_bytes += data.len(); inner.download_measurement_bytes += data.len();
if let Entry::Occupied(entry) = inner.channels.entry(id) { if let Entry::Occupied(entry) = inner.channels.entry(id) {
let _ = entry.get().unbounded_send((cmd, data)); let _ = entry.get().send((cmd, data));
} }
}); });
} }
pub fn get_download_rate_estimate(&self) -> usize { pub fn get_download_rate_estimate(&self) -> usize {
return self.lock(|inner| inner.download_rate_estimate); self.lock(|inner| inner.download_rate_estimate)
} }
pub(crate) fn shutdown(&self) { pub(crate) fn shutdown(&self) {
@ -101,12 +106,10 @@ impl ChannelManager {
} }
impl Channel { impl Channel {
fn recv_packet(&mut self) -> Poll<Bytes, ChannelError> { fn recv_packet(&mut self, cx: &mut Context<'_>) -> Poll<Result<Bytes, ChannelError>> {
let (cmd, packet) = match self.receiver.poll() { let (cmd, packet) = match self.receiver.poll_recv(cx) {
Ok(Async::Ready(Some(t))) => t, Poll::Pending => return Poll::Pending,
Ok(Async::Ready(None)) => return Err(ChannelError), // The channel has been closed. Poll::Ready(o) => o.ok_or(ChannelError)?,
Ok(Async::NotReady) => return Ok(Async::NotReady),
Err(()) => unreachable!(),
}; };
if cmd == 0xa { if cmd == 0xa {
@ -115,9 +118,9 @@ impl Channel {
self.state = ChannelState::Closed; self.state = ChannelState::Closed;
Err(ChannelError) Poll::Ready(Err(ChannelError))
} else { } else {
Ok(Async::Ready(packet)) Poll::Ready(Ok(packet))
} }
} }
@ -129,16 +132,19 @@ impl Channel {
} }
impl Stream for Channel { impl Stream for Channel {
type Item = ChannelEvent; type Item = Result<ChannelEvent, ChannelError>;
type Error = ChannelError;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
loop { loop {
match self.state.clone() { match self.state.clone() {
ChannelState::Closed => panic!("Polling already terminated channel"), ChannelState::Closed => panic!("Polling already terminated channel"),
ChannelState::Header(mut data) => { ChannelState::Header(mut data) => {
if data.len() == 0 { if data.is_empty() {
data = try_ready!(self.recv_packet()); data = match self.recv_packet(cx) {
Poll::Ready(Ok(x)) => x,
Poll::Ready(Err(x)) => return Poll::Ready(Some(Err(x))),
Poll::Pending => return Poll::Pending,
};
} }
let length = BigEndian::read_u16(data.split_to(2).as_ref()) as usize; let length = BigEndian::read_u16(data.split_to(2).as_ref()) as usize;
@ -152,19 +158,23 @@ impl Stream for Channel {
self.state = ChannelState::Header(data); self.state = ChannelState::Header(data);
let event = ChannelEvent::Header(header_id, header_data); let event = ChannelEvent::Header(header_id, header_data);
return Ok(Async::Ready(Some(event))); return Poll::Ready(Some(Ok(event)));
} }
} }
ChannelState::Data => { ChannelState::Data => {
let data = try_ready!(self.recv_packet()); let data = match self.recv_packet(cx) {
if data.len() == 0 { Poll::Ready(Ok(x)) => x,
Poll::Ready(Err(x)) => return Poll::Ready(Some(Err(x))),
Poll::Pending => return Poll::Pending,
};
if data.is_empty() {
self.receiver.close(); self.receiver.close();
self.state = ChannelState::Closed; self.state = ChannelState::Closed;
return Ok(Async::Ready(None)); return Poll::Ready(None);
} else { } else {
let event = ChannelEvent::Data(data); let event = ChannelEvent::Data(data);
return Ok(Async::Ready(Some(event))); return Poll::Ready(Some(Ok(event)));
} }
} }
} }
@ -173,38 +183,46 @@ impl Stream for Channel {
} }
impl Stream for ChannelData { impl Stream for ChannelData {
type Item = Bytes; type Item = Result<Bytes, ChannelError>;
type Error = ChannelError;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut channel = match self.0.poll_lock() { let mut channel = match self.0.poll_lock(cx) {
Async::Ready(c) => c, Poll::Ready(c) => c,
Async::NotReady => return Ok(Async::NotReady), Poll::Pending => return Poll::Pending,
}; };
loop { loop {
match try_ready!(channel.poll()) { let event = match channel.poll_next_unpin(cx) {
Poll::Ready(x) => x.transpose()?,
Poll::Pending => return Poll::Pending,
};
match event {
Some(ChannelEvent::Header(..)) => (), Some(ChannelEvent::Header(..)) => (),
Some(ChannelEvent::Data(data)) => return Ok(Async::Ready(Some(data))), Some(ChannelEvent::Data(data)) => return Poll::Ready(Some(Ok(data))),
None => return Ok(Async::Ready(None)), None => return Poll::Ready(None),
} }
} }
} }
} }
impl Stream for ChannelHeaders { impl Stream for ChannelHeaders {
type Item = (u8, Vec<u8>); type Item = Result<(u8, Vec<u8>), ChannelError>;
type Error = ChannelError;
fn poll(&mut self) -> Poll<Option<Self::Item>, Self::Error> { fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let mut channel = match self.0.poll_lock() { let mut channel = match self.0.poll_lock(cx) {
Async::Ready(c) => c, Poll::Ready(c) => c,
Async::NotReady => return Ok(Async::NotReady), Poll::Pending => return Poll::Pending,
}; };
match try_ready!(channel.poll()) { let event = match channel.poll_next_unpin(cx) {
Some(ChannelEvent::Header(id, data)) => Ok(Async::Ready(Some((id, data)))), Poll::Ready(x) => x.transpose()?,
Some(ChannelEvent::Data(..)) | None => Ok(Async::Ready(None)), Poll::Pending => return Poll::Pending,
};
match event {
Some(ChannelEvent::Header(id, data)) => Poll::Ready(Some(Ok((id, data)))),
Some(ChannelEvent::Data(..)) | None => Poll::Ready(None),
} }
} }
} }

View file

@ -35,29 +35,3 @@ macro_rules! component {
} }
} }
} }
use std::cell::UnsafeCell;
use std::sync::Mutex;
pub(crate) struct Lazy<T>(Mutex<bool>, UnsafeCell<Option<T>>);
unsafe impl<T: Sync> Sync for Lazy<T> {}
unsafe impl<T: Send> Send for Lazy<T> {}
#[cfg_attr(feature = "cargo-clippy", allow(mutex_atomic))]
impl<T> Lazy<T> {
pub(crate) fn new() -> Lazy<T> {
Lazy(Mutex::new(false), UnsafeCell::new(None))
}
pub(crate) fn get<F: FnOnce() -> T>(&self, f: F) -> &T {
let mut inner = self.0.lock().unwrap();
if !*inner {
unsafe {
*self.1.get() = Some(f());
}
*inner = true;
}
unsafe { &*self.1.get() }.as_ref().unwrap()
}
}

View file

@ -1,9 +1,6 @@
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use url::Url; use url::Url;
use uuid::Uuid;
use crate::version;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct SessionConfig { pub struct SessionConfig {
@ -15,10 +12,10 @@ pub struct SessionConfig {
impl Default for SessionConfig { impl Default for SessionConfig {
fn default() -> SessionConfig { fn default() -> SessionConfig {
let device_id = Uuid::new_v4().to_hyphenated().to_string(); let device_id = uuid::Uuid::new_v4().to_hyphenated().to_string();
SessionConfig { SessionConfig {
user_agent: version::VERSION_STRING.to_string(), user_agent: crate::version::VERSION_STRING.to_string(),
device_id: device_id, device_id,
proxy: None, proxy: None,
ap_port: None, ap_port: None,
} }
@ -32,9 +29,9 @@ pub enum DeviceType {
Tablet = 2, Tablet = 2,
Smartphone = 3, Smartphone = 3,
Speaker = 4, Speaker = 4,
TV = 5, Tv = 5,
AVR = 6, Avr = 6,
STB = 7, Stb = 7,
AudioDongle = 8, AudioDongle = 8,
GameConsole = 9, GameConsole = 9,
CastAudio = 10, CastAudio = 10,
@ -57,9 +54,9 @@ impl FromStr for DeviceType {
"tablet" => Ok(Tablet), "tablet" => Ok(Tablet),
"smartphone" => Ok(Smartphone), "smartphone" => Ok(Smartphone),
"speaker" => Ok(Speaker), "speaker" => Ok(Speaker),
"tv" => Ok(TV), "tv" => Ok(Tv),
"avr" => Ok(AVR), "avr" => Ok(Avr),
"stb" => Ok(STB), "stb" => Ok(Stb),
"audiodongle" => Ok(AudioDongle), "audiodongle" => Ok(AudioDongle),
"gameconsole" => Ok(GameConsole), "gameconsole" => Ok(GameConsole),
"castaudio" => Ok(CastAudio), "castaudio" => Ok(CastAudio),
@ -83,9 +80,9 @@ impl fmt::Display for DeviceType {
Tablet => f.write_str("Tablet"), Tablet => f.write_str("Tablet"),
Smartphone => f.write_str("Smartphone"), Smartphone => f.write_str("Smartphone"),
Speaker => f.write_str("Speaker"), Speaker => f.write_str("Speaker"),
TV => f.write_str("TV"), Tv => f.write_str("TV"),
AVR => f.write_str("AVR"), Avr => f.write_str("AVR"),
STB => f.write_str("STB"), Stb => f.write_str("STB"),
AudioDongle => f.write_str("AudioDongle"), AudioDongle => f.write_str("AudioDongle"),
GameConsole => f.write_str("GameConsole"), GameConsole => f.write_str("GameConsole"),
CastAudio => f.write_str("CastAudio"), CastAudio => f.write_str("CastAudio"),

View file

@ -2,7 +2,7 @@ use byteorder::{BigEndian, ByteOrder};
use bytes::{BufMut, Bytes, BytesMut}; use bytes::{BufMut, Bytes, BytesMut};
use shannon::Shannon; use shannon::Shannon;
use std::io; use std::io;
use tokio_io::codec::{Decoder, Encoder}; use tokio_util::codec::{Decoder, Encoder};
const HEADER_SIZE: usize = 3; const HEADER_SIZE: usize = 3;
const MAC_SIZE: usize = 4; const MAC_SIZE: usize = 4;
@ -13,7 +13,7 @@ enum DecodeState {
Payload(u8, usize), Payload(u8, usize),
} }
pub struct APCodec { pub struct ApCodec {
encode_nonce: u32, encode_nonce: u32,
encode_cipher: Shannon, encode_cipher: Shannon,
@ -22,9 +22,9 @@ pub struct APCodec {
decode_state: DecodeState, decode_state: DecodeState,
} }
impl APCodec { impl ApCodec {
pub fn new(send_key: &[u8], recv_key: &[u8]) -> APCodec { pub fn new(send_key: &[u8], recv_key: &[u8]) -> ApCodec {
APCodec { ApCodec {
encode_nonce: 0, encode_nonce: 0,
encode_cipher: Shannon::new(send_key), encode_cipher: Shannon::new(send_key),
@ -35,8 +35,7 @@ impl APCodec {
} }
} }
impl Encoder for APCodec { impl Encoder<(u8, Vec<u8>)> for ApCodec {
type Item = (u8, Vec<u8>);
type Error = io::Error; type Error = io::Error;
fn encode(&mut self, item: (u8, Vec<u8>), buf: &mut BytesMut) -> io::Result<()> { fn encode(&mut self, item: (u8, Vec<u8>), buf: &mut BytesMut) -> io::Result<()> {
@ -45,7 +44,7 @@ impl Encoder for APCodec {
buf.reserve(3 + payload.len()); buf.reserve(3 + payload.len());
buf.put_u8(cmd); buf.put_u8(cmd);
buf.put_u16_be(payload.len() as u16); buf.put_u16(payload.len() as u16);
buf.extend_from_slice(&payload); buf.extend_from_slice(&payload);
self.encode_cipher.nonce_u32(self.encode_nonce); self.encode_cipher.nonce_u32(self.encode_nonce);
@ -61,7 +60,7 @@ impl Encoder for APCodec {
} }
} }
impl Decoder for APCodec { impl Decoder for ApCodec {
type Item = (u8, Bytes); type Item = (u8, Bytes);
type Error = io::Error; type Error = io::Error;

View file

@ -1,59 +1,24 @@
use byteorder::{BigEndian, ByteOrder, WriteBytesExt}; use byteorder::{BigEndian, ByteOrder, WriteBytesExt};
use futures::{Async, Future, Poll};
use hmac::{Hmac, Mac, NewMac}; use hmac::{Hmac, Mac, NewMac};
use protobuf::{self, Message}; use protobuf::{self, Message};
use rand::thread_rng; use rand::{thread_rng, RngCore};
use sha1::Sha1; use sha1::Sha1;
use std::io::{self, Read}; use std::io;
use std::marker::PhantomData; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use tokio_codec::{Decoder, Framed}; use tokio_util::codec::{Decoder, Framed};
use tokio_io::io::{read_exact, write_all, ReadExact, Window, WriteAll};
use tokio_io::{AsyncRead, AsyncWrite};
use super::codec::APCodec; use super::codec::ApCodec;
use crate::diffie_hellman::DHLocalKeys; use crate::diffie_hellman::DhLocalKeys;
use crate::protocol; use crate::protocol;
use crate::protocol::keyexchange::{APResponseMessage, ClientHello, ClientResponsePlaintext}; use crate::protocol::keyexchange::{APResponseMessage, ClientHello, ClientResponsePlaintext};
use crate::util;
pub struct Handshake<T> { pub async fn handshake<T: AsyncRead + AsyncWrite + Unpin>(
keys: DHLocalKeys, mut connection: T,
state: HandshakeState<T>, ) -> io::Result<Framed<T, ApCodec>> {
} let local_keys = DhLocalKeys::random(&mut thread_rng());
let gc = local_keys.public_key();
enum HandshakeState<T> { let mut accumulator = client_hello(&mut connection, gc).await?;
ClientHello(WriteAll<T, Vec<u8>>), let message: APResponseMessage = recv_packet(&mut connection, &mut accumulator).await?;
APResponse(RecvPacket<T, APResponseMessage>),
ClientResponse(Option<APCodec>, WriteAll<T, Vec<u8>>),
}
pub fn handshake<T: AsyncRead + AsyncWrite>(connection: T) -> Handshake<T> {
let local_keys = DHLocalKeys::random(&mut thread_rng());
let client_hello = client_hello(connection, local_keys.public_key());
Handshake {
keys: local_keys,
state: HandshakeState::ClientHello(client_hello),
}
}
impl<T: AsyncRead + AsyncWrite> Future for Handshake<T> {
type Item = Framed<T, APCodec>;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, io::Error> {
use self::HandshakeState::*;
loop {
self.state = match self.state {
ClientHello(ref mut write) => {
let (connection, accumulator) = try_ready!(write.poll());
let read = recv_packet(connection, accumulator);
APResponse(read)
}
APResponse(ref mut read) => {
let (connection, message, accumulator) = try_ready!(read.poll());
let remote_key = message let remote_key = message
.get_challenge() .get_challenge()
.get_login_crypto_challenge() .get_login_crypto_challenge()
@ -61,27 +26,22 @@ impl<T: AsyncRead + AsyncWrite> Future for Handshake<T> {
.get_gs() .get_gs()
.to_owned(); .to_owned();
let shared_secret = self.keys.shared_secret(&remote_key); let shared_secret = local_keys.shared_secret(&remote_key);
let (challenge, send_key, recv_key) = let (challenge, send_key, recv_key) = compute_keys(&shared_secret, &accumulator);
compute_keys(&shared_secret, &accumulator); let codec = ApCodec::new(&send_key, &recv_key);
let codec = APCodec::new(&send_key, &recv_key);
let write = client_response(connection, challenge); client_response(&mut connection, challenge).await?;
ClientResponse(Some(codec), write)
}
ClientResponse(ref mut codec, ref mut write) => { Ok(codec.framed(connection))
let (connection, _) = try_ready!(write.poll());
let codec = codec.take().unwrap();
let framed = codec.framed(connection);
return Ok(Async::Ready(framed));
}
}
}
}
} }
fn client_hello<T: AsyncWrite>(connection: T, gc: Vec<u8>) -> WriteAll<T, Vec<u8>> { async fn client_hello<T>(connection: &mut T, gc: Vec<u8>) -> io::Result<Vec<u8>>
where
T: AsyncWrite + Unpin,
{
let mut client_nonce = vec![0; 0x10];
thread_rng().fill_bytes(&mut client_nonce);
let mut packet = ClientHello::new(); let mut packet = ClientHello::new();
packet packet
.mut_build_info() .mut_build_info()
@ -101,18 +61,22 @@ fn client_hello<T: AsyncWrite>(connection: T, gc: Vec<u8>) -> WriteAll<T, Vec<u8
.mut_login_crypto_hello() .mut_login_crypto_hello()
.mut_diffie_hellman() .mut_diffie_hellman()
.set_server_keys_known(1); .set_server_keys_known(1);
packet.set_client_nonce(util::rand_vec(&mut thread_rng(), 0x10)); packet.set_client_nonce(client_nonce);
packet.set_padding(vec![0x1e]); packet.set_padding(vec![0x1e]);
let mut buffer = vec![0, 4]; let mut buffer = vec![0, 4];
let size = 2 + 4 + packet.compute_size(); let size = 2 + 4 + packet.compute_size();
buffer.write_u32::<BigEndian>(size).unwrap(); <Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size).unwrap();
packet.write_to_vec(&mut buffer).unwrap(); packet.write_to_vec(&mut buffer).unwrap();
write_all(connection, buffer) connection.write_all(&buffer[..]).await?;
Ok(buffer)
} }
fn client_response<T: AsyncWrite>(connection: T, challenge: Vec<u8>) -> WriteAll<T, Vec<u8>> { async fn client_response<T>(connection: &mut T, challenge: Vec<u8>) -> io::Result<()>
where
T: AsyncWrite + Unpin,
{
let mut packet = ClientResponsePlaintext::new(); let mut packet = ClientResponsePlaintext::new();
packet packet
.mut_login_crypto_response() .mut_login_crypto_response()
@ -123,70 +87,35 @@ fn client_response<T: AsyncWrite>(connection: T, challenge: Vec<u8>) -> WriteAll
let mut buffer = vec![]; let mut buffer = vec![];
let size = 4 + packet.compute_size(); let size = 4 + packet.compute_size();
buffer.write_u32::<BigEndian>(size).unwrap(); <Vec<u8> as WriteBytesExt>::write_u32::<BigEndian>(&mut buffer, size).unwrap();
packet.write_to_vec(&mut buffer).unwrap(); packet.write_to_vec(&mut buffer).unwrap();
write_all(connection, buffer) connection.write_all(&buffer[..]).await?;
Ok(())
} }
enum RecvPacket<T, M: Message> { async fn recv_packet<T, M>(connection: &mut T, acc: &mut Vec<u8>) -> io::Result<M>
Header(ReadExact<T, Window<Vec<u8>>>, PhantomData<M>),
Body(ReadExact<T, Window<Vec<u8>>>, PhantomData<M>),
}
fn recv_packet<T: AsyncRead, M>(connection: T, acc: Vec<u8>) -> RecvPacket<T, M>
where where
T: Read, T: AsyncRead + Unpin,
M: Message, M: Message,
{ {
RecvPacket::Header(read_into_accumulator(connection, 4, acc), PhantomData) let header = read_into_accumulator(connection, 4, acc).await?;
let size = BigEndian::read_u32(header) as usize;
let data = read_into_accumulator(connection, size - 4, acc).await?;
let message = protobuf::parse_from_bytes(data).unwrap();
Ok(message)
} }
impl<T: AsyncRead, M> Future for RecvPacket<T, M> async fn read_into_accumulator<'a, 'b, T: AsyncRead + Unpin>(
where connection: &'a mut T,
T: Read,
M: Message,
{
type Item = (T, M, Vec<u8>);
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, io::Error> {
use self::RecvPacket::*;
loop {
*self = match *self {
Header(ref mut read, _) => {
let (connection, header) = try_ready!(read.poll());
let size = BigEndian::read_u32(header.as_ref()) as usize;
let acc = header.into_inner();
let read = read_into_accumulator(connection, size - 4, acc);
RecvPacket::Body(read, PhantomData)
}
Body(ref mut read, _) => {
let (connection, data) = try_ready!(read.poll());
let message = protobuf::parse_from_bytes(data.as_ref()).unwrap();
let acc = data.into_inner();
return Ok(Async::Ready((connection, message, acc)));
}
}
}
}
}
fn read_into_accumulator<T: AsyncRead>(
connection: T,
size: usize, size: usize,
mut acc: Vec<u8>, acc: &'b mut Vec<u8>,
) -> ReadExact<T, Window<Vec<u8>>> { ) -> io::Result<&'b mut [u8]> {
let offset = acc.len(); let offset = acc.len();
acc.resize(offset + size, 0); acc.resize(offset + size, 0);
let mut window = Window::new(acc); connection.read_exact(&mut acc[offset..]).await?;
window.set_start(offset); Ok(&mut acc[offset..])
read_exact(connection, window)
} }
fn compute_keys(shared_secret: &[u8], packets: &[u8]) -> (Vec<u8>, Vec<u8>, Vec<u8>) { fn compute_keys(shared_secret: &[u8], packets: &[u8]) -> (Vec<u8>, Vec<u8>, Vec<u8>) {

View file

@ -1,74 +1,117 @@
mod codec; mod codec;
mod handshake; mod handshake;
pub use self::codec::APCodec; pub use self::codec::ApCodec;
pub use self::handshake::handshake; pub use self::handshake::handshake;
use futures::{Future, Sink, Stream}; use std::io::{self, ErrorKind};
use protobuf::{self, Message};
use std::io;
use std::net::ToSocketAddrs; use std::net::ToSocketAddrs;
use tokio_codec::Framed;
use tokio_core::net::TcpStream; use futures_util::{SinkExt, StreamExt};
use tokio_core::reactor::Handle; use protobuf::{self, Message, ProtobufError};
use thiserror::Error;
use tokio::net::TcpStream;
use tokio_util::codec::Framed;
use url::Url; use url::Url;
use crate::authentication::{AuthenticationError, Credentials}; use crate::authentication::Credentials;
use crate::protocol::keyexchange::{APLoginFailed, ErrorCode};
use crate::proxytunnel;
use crate::version; use crate::version;
use crate::proxytunnel; pub type Transport = Framed<TcpStream, ApCodec>;
pub type Transport = Framed<TcpStream, APCodec>; fn login_error_message(code: &ErrorCode) -> &'static str {
pub use ErrorCode::*;
pub fn connect( match code {
addr: String, ProtocolError => "Protocol error",
handle: &Handle, TryAnotherAP => "Try another AP",
proxy: &Option<Url>, BadConnectionId => "Bad connection id",
) -> Box<dyn Future<Item = Transport, Error = io::Error>> { TravelRestriction => "Travel restriction",
let (addr, connect_url) = match *proxy { PremiumAccountRequired => "Premium account required",
Some(ref url) => { BadCredentials => "Bad credentials",
info!("Using proxy \"{}\"", url); CouldNotValidateCredentials => "Could not validate credentials",
match url.to_socket_addrs().and_then(|mut iter| { AccountExists => "Account exists",
iter.next().ok_or(io::Error::new( ExtraVerificationRequired => "Extra verification required",
io::ErrorKind::NotFound, InvalidAppKey => "Invalid app key",
"Can't resolve proxy server address", ApplicationBanned => "Application banned",
))
}) {
Ok(socket_addr) => (socket_addr, Some(addr)),
Err(error) => return Box::new(futures::future::err(error)),
}
}
None => {
match addr.to_socket_addrs().and_then(|mut iter| {
iter.next().ok_or(io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve server address",
))
}) {
Ok(socket_addr) => (socket_addr, None),
Err(error) => return Box::new(futures::future::err(error)),
}
}
};
let socket = TcpStream::connect(&addr, handle);
if let Some(connect_url) = connect_url {
let connection = socket
.and_then(move |socket| proxytunnel::connect(socket, &connect_url).and_then(handshake));
Box::new(connection)
} else {
let connection = socket.and_then(handshake);
Box::new(connection)
} }
} }
pub fn authenticate( #[derive(Debug, Error)]
transport: Transport, pub enum AuthenticationError {
#[error("Login failed with reason: {}", login_error_message(.0))]
LoginFailed(ErrorCode),
#[error("Authentication failed: {0}")]
IoError(#[from] io::Error),
}
impl From<ProtobufError> for AuthenticationError {
fn from(e: ProtobufError) -> Self {
io::Error::new(ErrorKind::InvalidData, e).into()
}
}
impl From<APLoginFailed> for AuthenticationError {
fn from(login_failure: APLoginFailed) -> Self {
Self::LoginFailed(login_failure.get_error_code())
}
}
pub async fn connect(addr: String, proxy: Option<&Url>) -> io::Result<Transport> {
let socket = if let Some(proxy_url) = proxy {
info!("Using proxy \"{}\"", proxy_url);
let socket_addr = proxy_url.socket_addrs(|| None).and_then(|addrs| {
addrs.into_iter().next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve proxy server address",
)
})
})?;
let socket = TcpStream::connect(&socket_addr).await?;
let uri = addr.parse::<http::Uri>().map_err(|_| {
io::Error::new(
io::ErrorKind::InvalidData,
"Can't parse access point address",
)
})?;
let host = uri.host().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"The access point address contains no hostname",
)
})?;
let port = uri.port().ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
"The access point address contains no port",
)
})?;
proxytunnel::proxy_connect(socket, host, port.as_str()).await?
} else {
let socket_addr = addr.to_socket_addrs()?.next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::NotFound,
"Can't resolve access point address",
)
})?;
TcpStream::connect(&socket_addr).await?
};
handshake(socket).await
}
pub async fn authenticate(
transport: &mut Transport,
credentials: Credentials, credentials: Credentials,
device_id: String, device_id: &str,
) -> Box<dyn Future<Item = (Transport, Credentials), Error = AuthenticationError>> { ) -> Result<Credentials, AuthenticationError> {
use crate::protocol::authentication::{APWelcome, ClientResponseEncrypted, CpuFamily, Os}; use crate::protocol::authentication::{APWelcome, ClientResponseEncrypted, CpuFamily, Os};
use crate::protocol::keyexchange::APLoginFailed;
let mut packet = ClientResponseEncrypted::new(); let mut packet = ClientResponseEncrypted::new();
packet packet
@ -91,21 +134,19 @@ pub fn authenticate(
version::SHA_SHORT, version::SHA_SHORT,
version::BUILD_ID version::BUILD_ID
)); ));
packet.mut_system_info().set_device_id(device_id); packet
.mut_system_info()
.set_device_id(device_id.to_string());
packet.set_version_string(version::VERSION_STRING.to_string()); packet.set_version_string(version::VERSION_STRING.to_string());
let cmd = 0xab; let cmd = 0xab;
let data = packet.write_to_bytes().unwrap(); let data = packet.write_to_bytes().unwrap();
Box::new( transport.send((cmd, data)).await?;
transport let (cmd, data) = transport.next().await.expect("EOF")?;
.send((cmd, data)) match cmd {
.and_then(|transport| transport.into_future().map_err(|(err, _stream)| err)) 0xac => {
.map_err(|io_err| io_err.into()) let welcome_data: APWelcome = protobuf::parse_from_bytes(data.as_ref())?;
.and_then(|(packet, transport)| match packet {
Some((0xac, data)) => {
let welcome_data: APWelcome =
protobuf::parse_from_bytes(data.as_ref()).unwrap();
let reusable_credentials = Credentials { let reusable_credentials = Credentials {
username: welcome_data.get_canonical_username().to_owned(), username: welcome_data.get_canonical_username().to_owned(),
@ -113,17 +154,15 @@ pub fn authenticate(
auth_data: welcome_data.get_reusable_auth_credentials().to_owned(), auth_data: welcome_data.get_reusable_auth_credentials().to_owned(),
}; };
Ok((transport, reusable_credentials)) Ok(reusable_credentials)
} }
0xad => {
Some((0xad, data)) => { let error_data: APLoginFailed = protobuf::parse_from_bytes(data.as_ref())?;
let error_data: APLoginFailed =
protobuf::parse_from_bytes(data.as_ref()).unwrap();
Err(error_data.into()) Err(error_data.into())
} }
_ => {
Some((cmd, _)) => panic!("Unexpected packet {:?}", cmd), let msg = format!("Received invalid packet: {}", cmd);
None => panic!("EOF"), Err(io::Error::new(ErrorKind::InvalidData, msg).into())
}), }
) }
} }

View file

@ -1,12 +1,12 @@
use num_bigint::BigUint; use num_bigint::{BigUint, RandBigInt};
use num_traits::FromPrimitive; use num_integer::Integer;
use rand::Rng; use num_traits::{One, Zero};
use once_cell::sync::Lazy;
use rand::{CryptoRng, Rng};
use crate::util; static DH_GENERATOR: Lazy<BigUint> = Lazy::new(|| BigUint::from_bytes_be(&[0x02]));
static DH_PRIME: Lazy<BigUint> = Lazy::new(|| {
lazy_static! { BigUint::from_bytes_be(&[
pub static ref DH_GENERATOR: BigUint = BigUint::from_u64(0x2).unwrap();
pub static ref DH_PRIME: BigUint = BigUint::from_bytes_be(&[
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc9, 0x0f, 0xda, 0xa2, 0x21, 0x68, 0xc2, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xc9, 0x0f, 0xda, 0xa2, 0x21, 0x68, 0xc2,
0x34, 0xc4, 0xc6, 0x62, 0x8b, 0x80, 0xdc, 0x1c, 0xd1, 0x29, 0x02, 0x4e, 0x08, 0x8a, 0x67, 0x34, 0xc4, 0xc6, 0x62, 0x8b, 0x80, 0xdc, 0x1c, 0xd1, 0x29, 0x02, 0x4e, 0x08, 0x8a, 0x67,
0xcc, 0x74, 0x02, 0x0b, 0xbe, 0xa6, 0x3b, 0x13, 0x9b, 0x22, 0x51, 0x4a, 0x08, 0x79, 0x8e, 0xcc, 0x74, 0x02, 0x0b, 0xbe, 0xa6, 0x3b, 0x13, 0x9b, 0x22, 0x51, 0x4a, 0x08, 0x79, 0x8e,
@ -14,24 +14,38 @@ lazy_static! {
0xf2, 0x5f, 0x14, 0x37, 0x4f, 0xe1, 0x35, 0x6d, 0x6d, 0x51, 0xc2, 0x45, 0xe4, 0x85, 0xb5, 0xf2, 0x5f, 0x14, 0x37, 0x4f, 0xe1, 0x35, 0x6d, 0x6d, 0x51, 0xc2, 0x45, 0xe4, 0x85, 0xb5,
0x76, 0x62, 0x5e, 0x7e, 0xc6, 0xf4, 0x4c, 0x42, 0xe9, 0xa6, 0x3a, 0x36, 0x20, 0xff, 0xff, 0x76, 0x62, 0x5e, 0x7e, 0xc6, 0xf4, 0x4c, 0x42, 0xe9, 0xa6, 0x3a, 0x36, 0x20, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
]); ])
});
fn powm(base: &BigUint, exp: &BigUint, modulus: &BigUint) -> BigUint {
let mut base = base.clone();
let mut exp = exp.clone();
let mut result: BigUint = One::one();
while !exp.is_zero() {
if exp.is_odd() {
result = (result * &base) % modulus;
}
exp >>= 1;
base = (&base * &base) % modulus;
}
result
} }
pub struct DHLocalKeys { pub struct DhLocalKeys {
private_key: BigUint, private_key: BigUint,
public_key: BigUint, public_key: BigUint,
} }
impl DHLocalKeys { impl DhLocalKeys {
pub fn random<R: Rng>(rng: &mut R) -> DHLocalKeys { pub fn random<R: Rng + CryptoRng>(rng: &mut R) -> DhLocalKeys {
let key_data = util::rand_vec(rng, 95); let private_key = rng.gen_biguint(95 * 8);
let public_key = powm(&DH_GENERATOR, &private_key, &DH_PRIME);
let private_key = BigUint::from_bytes_be(&key_data); DhLocalKeys {
let public_key = util::powm(&DH_GENERATOR, &private_key, &DH_PRIME); private_key,
public_key,
DHLocalKeys {
private_key: private_key,
public_key: public_key,
} }
} }
@ -40,7 +54,7 @@ impl DHLocalKeys {
} }
pub fn shared_secret(&self, remote_key: &[u8]) -> Vec<u8> { pub fn shared_secret(&self, remote_key: &[u8]) -> Vec<u8> {
let shared_key = util::powm( let shared_key = powm(
&BigUint::from_bytes_be(remote_key), &BigUint::from_bytes_be(remote_key),
&self.private_key, &self.private_key,
&DH_PRIME, &DH_PRIME,

View file

@ -1,8 +1,6 @@
use futures::Future; use serde::Deserialize;
use serde_json;
use crate::mercury::MercuryError; use crate::{mercury::MercuryError, session::Session};
use crate::session::Session;
#[derive(Deserialize, Debug, Clone)] #[derive(Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -13,20 +11,16 @@ pub struct Token {
pub scope: Vec<String>, pub scope: Vec<String>,
} }
pub fn get_token( pub async fn get_token(
session: &Session, session: &Session,
client_id: &str, client_id: &str,
scopes: &str, scopes: &str,
) -> Box<dyn Future<Item = Token, Error = MercuryError>> { ) -> Result<Token, MercuryError> {
let url = format!( let url = format!(
"hm://keymaster/token/authenticated?client_id={}&scope={}", "hm://keymaster/token/authenticated?client_id={}&scope={}",
client_id, scopes client_id, scopes
); );
Box::new(session.mercury().get(url).map(move |response| { let response = session.mercury().get(url).await?;
let data = response.payload.first().expect("Empty payload"); let data = response.payload.first().expect("Empty payload");
let data = String::from_utf8(data.clone()).unwrap(); serde_json::from_slice(data.as_ref()).map_err(|_| MercuryError)
let token: Token = serde_json::from_str(&data).unwrap();
token
}))
} }

View file

@ -1,56 +1,38 @@
#![cfg_attr(feature = "cargo-clippy", allow(unused_io_amount))] #![allow(clippy::unused_io_amount)]
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate futures;
#[macro_use]
extern crate lazy_static;
#[macro_use] #[macro_use]
extern crate log; extern crate log;
#[macro_use]
extern crate serde_derive;
extern crate aes; use librespot_protocol as protocol;
extern crate base64;
extern crate byteorder;
extern crate bytes;
extern crate hmac;
extern crate httparse;
extern crate hyper;
extern crate hyper_proxy;
extern crate num_bigint;
extern crate num_integer;
extern crate num_traits;
extern crate pbkdf2;
extern crate protobuf;
extern crate rand;
extern crate serde;
extern crate serde_json;
extern crate sha1;
extern crate shannon;
extern crate tokio_codec;
extern crate tokio_core;
extern crate tokio_io;
extern crate url;
extern crate uuid;
extern crate librespot_protocol as protocol;
#[macro_use] #[macro_use]
mod component; mod component;
mod apresolve;
pub mod audio_key; pub mod audio_key;
pub mod authentication; pub mod authentication;
pub mod cache; pub mod cache;
pub mod channel; pub mod channel;
pub mod config; pub mod config;
mod connection; mod connection;
#[doc(hidden)]
pub mod diffie_hellman; pub mod diffie_hellman;
pub mod keymaster; pub mod keymaster;
pub mod mercury; pub mod mercury;
mod proxytunnel; mod proxytunnel;
pub mod session; pub mod session;
pub mod spotify_id; pub mod spotify_id;
#[doc(hidden)]
pub mod util; pub mod util;
pub mod version; pub mod version;
const AP_FALLBACK: &str = "ap.spotify.com:443";
#[cfg(feature = "apresolve")]
mod apresolve;
#[cfg(not(feature = "apresolve"))]
mod apresolve {
pub async fn apresolve(_: Option<&url::Url>, _: Option<u16>) -> String {
return super::AP_FALLBACK.into();
}
}

View file

@ -1,13 +1,15 @@
use crate::protocol; use std::collections::HashMap;
use crate::util::url_encode; use std::future::Future;
use std::mem;
use std::pin::Pin;
use std::task::Context;
use std::task::Poll;
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
use futures::sync::{mpsc, oneshot}; use tokio::sync::{mpsc, oneshot};
use futures::{Async, Future, Poll};
use protobuf;
use std::collections::HashMap;
use std::mem;
use crate::protocol;
use crate::util::SeqGenerator; use crate::util::SeqGenerator;
mod types; mod types;
@ -31,17 +33,18 @@ pub struct MercuryPending {
callback: Option<oneshot::Sender<Result<MercuryResponse, MercuryError>>>, callback: Option<oneshot::Sender<Result<MercuryResponse, MercuryError>>>,
} }
pub struct MercuryFuture<T>(oneshot::Receiver<Result<T, MercuryError>>); pub struct MercuryFuture<T> {
impl<T> Future for MercuryFuture<T> { receiver: oneshot::Receiver<Result<T, MercuryError>>,
type Item = T; }
type Error = MercuryError;
fn poll(&mut self) -> Poll<T, MercuryError> { impl<T> Future for MercuryFuture<T> {
match self.0.poll() { type Output = Result<T, MercuryError>;
Ok(Async::Ready(Ok(value))) => Ok(Async::Ready(value)),
Ok(Async::Ready(Err(err))) => Err(err), fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Ok(Async::NotReady) => Ok(Async::NotReady), match Pin::new(&mut self.receiver).poll(cx) {
Err(oneshot::Canceled) => Err(MercuryError), Poll::Ready(Ok(x)) => Poll::Ready(x),
Poll::Ready(Err(_)) => Poll::Ready(Err(MercuryError)),
Poll::Pending => Poll::Pending,
} }
} }
} }
@ -73,12 +76,12 @@ impl MercuryManager {
let data = req.encode(&seq); let data = req.encode(&seq);
self.session().send_packet(cmd, data); self.session().send_packet(cmd, data);
MercuryFuture(rx) MercuryFuture { receiver: rx }
} }
pub fn get<T: Into<String>>(&self, uri: T) -> MercuryFuture<MercuryResponse> { pub fn get<T: Into<String>>(&self, uri: T) -> MercuryFuture<MercuryResponse> {
self.request(MercuryRequest { self.request(MercuryRequest {
method: MercuryMethod::GET, method: MercuryMethod::Get,
uri: uri.into(), uri: uri.into(),
content_type: None, content_type: None,
payload: Vec::new(), payload: Vec::new(),
@ -87,7 +90,7 @@ impl MercuryManager {
pub fn send<T: Into<String>>(&self, uri: T, data: Vec<u8>) -> MercuryFuture<MercuryResponse> { pub fn send<T: Into<String>>(&self, uri: T, data: Vec<u8>) -> MercuryFuture<MercuryResponse> {
self.request(MercuryRequest { self.request(MercuryRequest {
method: MercuryMethod::SEND, method: MercuryMethod::Send,
uri: uri.into(), uri: uri.into(),
content_type: None, content_type: None,
payload: vec![data], payload: vec![data],
@ -101,24 +104,26 @@ impl MercuryManager {
pub fn subscribe<T: Into<String>>( pub fn subscribe<T: Into<String>>(
&self, &self,
uri: T, uri: T,
) -> Box<dyn Future<Item = mpsc::UnboundedReceiver<MercuryResponse>, Error = MercuryError>> ) -> impl Future<Output = Result<mpsc::UnboundedReceiver<MercuryResponse>, MercuryError>> + 'static
{ {
let uri = uri.into(); let uri = uri.into();
let request = self.request(MercuryRequest { let request = self.request(MercuryRequest {
method: MercuryMethod::SUB, method: MercuryMethod::Sub,
uri: uri.clone(), uri: uri.clone(),
content_type: None, content_type: None,
payload: Vec::new(), payload: Vec::new(),
}); });
let manager = self.clone(); let manager = self.clone();
Box::new(request.map(move |response| { async move {
let (tx, rx) = mpsc::unbounded(); let response = request.await?;
let (tx, rx) = mpsc::unbounded_channel();
manager.lock(move |inner| { manager.lock(move |inner| {
if !inner.invalid { if !inner.invalid {
debug!("subscribed uri={} count={}", uri, response.payload.len()); debug!("subscribed uri={} count={}", uri, response.payload.len());
if response.payload.len() > 0 { if !response.payload.is_empty() {
// Old subscription protocol, watch the provided list of URIs // Old subscription protocol, watch the provided list of URIs
for sub in response.payload { for sub in response.payload {
let mut sub: protocol::pubsub::Subscription = let mut sub: protocol::pubsub::Subscription =
@ -136,8 +141,8 @@ impl MercuryManager {
} }
}); });
rx Ok(rx)
})) }
} }
pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) { pub(crate) fn dispatch(&self, cmd: u8, mut data: Bytes) {
@ -193,7 +198,7 @@ impl MercuryManager {
let header: protocol::mercury::Header = protobuf::parse_from_bytes(&header_data).unwrap(); let header: protocol::mercury::Header = protobuf::parse_from_bytes(&header_data).unwrap();
let response = MercuryResponse { let response = MercuryResponse {
uri: url_encode(header.get_uri()).to_owned(), uri: header.get_uri().to_string(),
status_code: header.get_status_code(), status_code: header.get_status_code(),
payload: pending.parts, payload: pending.parts,
}; };
@ -205,17 +210,29 @@ impl MercuryManager {
if let Some(cb) = pending.callback { if let Some(cb) = pending.callback {
let _ = cb.send(Err(MercuryError)); let _ = cb.send(Err(MercuryError));
} }
} else { } else if cmd == 0xb5 {
if cmd == 0xb5 {
self.lock(|inner| { self.lock(|inner| {
let mut found = false; let mut found = false;
// TODO: This is just a workaround to make utf-8 encoded usernames work.
// A better solution would be to use an uri struct and urlencode it directly
// before sending while saving the subscription under its unencoded form.
let mut uri_split = response.uri.split('/');
let encoded_uri = std::iter::once(uri_split.next().unwrap().to_string())
.chain(uri_split.map(|component| {
form_urlencoded::byte_serialize(component.as_bytes()).collect::<String>()
}))
.collect::<Vec<String>>()
.join("/");
inner.subscriptions.retain(|&(ref prefix, ref sub)| { inner.subscriptions.retain(|&(ref prefix, ref sub)| {
if response.uri.starts_with(prefix) { if encoded_uri.starts_with(prefix) {
found = true; found = true;
// if send fails, remove from list of subs // if send fails, remove from list of subs
// TODO: send unsub message // TODO: send unsub message
sub.unbounded_send(response.clone()).is_ok() sub.send(response.clone()).is_ok()
} else { } else {
// URI doesn't match // URI doesn't match
true true
@ -230,7 +247,6 @@ impl MercuryManager {
let _ = cb.send(Ok(response)); let _ = cb.send(Ok(response));
} }
} }
}
pub(crate) fn shutdown(&self) { pub(crate) fn shutdown(&self) {
self.lock(|inner| { self.lock(|inner| {

View file

@ -1,4 +1,3 @@
use futures::{Async, AsyncSink, Future, Poll, Sink, StartSend};
use std::collections::VecDeque; use std::collections::VecDeque;
use super::*; use super::*;
@ -13,11 +12,27 @@ impl MercurySender {
// TODO: pub(super) when stable // TODO: pub(super) when stable
pub(crate) fn new(mercury: MercuryManager, uri: String) -> MercurySender { pub(crate) fn new(mercury: MercuryManager, uri: String) -> MercurySender {
MercurySender { MercurySender {
mercury: mercury, mercury,
uri: uri, uri,
pending: VecDeque::new(), pending: VecDeque::new(),
} }
} }
pub fn is_flushed(&self) -> bool {
self.pending.is_empty()
}
pub fn send(&mut self, item: Vec<u8>) {
let task = self.mercury.send(self.uri.clone(), item);
self.pending.push_back(task);
}
pub async fn flush(&mut self) -> Result<(), MercuryError> {
for fut in self.pending.drain(..) {
fut.await?;
}
Ok(())
}
} }
impl Clone for MercurySender { impl Clone for MercurySender {
@ -29,28 +44,3 @@ impl Clone for MercurySender {
} }
} }
} }
impl Sink for MercurySender {
type SinkItem = Vec<u8>;
type SinkError = MercuryError;
fn start_send(&mut self, item: Self::SinkItem) -> StartSend<Self::SinkItem, Self::SinkError> {
let task = self.mercury.send(self.uri.clone(), item);
self.pending.push_back(task);
Ok(AsyncSink::Ready)
}
fn poll_complete(&mut self) -> Poll<(), Self::SinkError> {
loop {
match self.pending.front_mut() {
Some(task) => {
try_ready!(task.poll());
}
None => {
return Ok(Async::Ready(()));
}
}
self.pending.pop_front();
}
}
}

View file

@ -6,10 +6,10 @@ use crate::protocol;
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
pub enum MercuryMethod { pub enum MercuryMethod {
GET, Get,
SUB, Sub,
UNSUB, Unsub,
SEND, Send,
} }
#[derive(Debug)] #[derive(Debug)]
@ -33,10 +33,10 @@ pub struct MercuryError;
impl ToString for MercuryMethod { impl ToString for MercuryMethod {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match *self { match *self {
MercuryMethod::GET => "GET", MercuryMethod::Get => "GET",
MercuryMethod::SUB => "SUB", MercuryMethod::Sub => "SUB",
MercuryMethod::UNSUB => "UNSUB", MercuryMethod::Unsub => "UNSUB",
MercuryMethod::SEND => "SEND", MercuryMethod::Send => "SEND",
} }
.to_owned() .to_owned()
} }
@ -45,9 +45,9 @@ impl ToString for MercuryMethod {
impl MercuryMethod { impl MercuryMethod {
pub fn command(&self) -> u8 { pub fn command(&self) -> u8 {
match *self { match *self {
MercuryMethod::GET | MercuryMethod::SEND => 0xb2, MercuryMethod::Get | MercuryMethod::Send => 0xb2,
MercuryMethod::SUB => 0xb3, MercuryMethod::Sub => 0xb3,
MercuryMethod::UNSUB => 0xb4, MercuryMethod::Unsub => 0xb4,
} }
} }
} }

View file

@ -1,110 +1,55 @@
use std::io; use std::io;
use std::str::FromStr;
use futures::{Async, Future, Poll}; use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt};
use httparse;
use hyper::Uri;
use tokio_io::io::{read, write_all, Read, Window, WriteAll};
use tokio_io::{AsyncRead, AsyncWrite};
pub struct ProxyTunnel<T> { pub async fn proxy_connect<T: AsyncRead + AsyncWrite + Unpin>(
state: ProxyState<T>, mut proxy_connection: T,
} connect_host: &str,
connect_port: &str,
) -> io::Result<T> {
let mut buffer = Vec::new();
buffer.extend_from_slice(b"CONNECT ");
buffer.extend_from_slice(connect_host.as_bytes());
buffer.push(b':');
buffer.extend_from_slice(connect_port.as_bytes());
buffer.extend_from_slice(b" HTTP/1.1\r\n\r\n");
enum ProxyState<T> { proxy_connection.write_all(buffer.as_ref()).await?;
ProxyConnect(WriteAll<T, Vec<u8>>),
ProxyResponse(Read<T, Window<Vec<u8>>>),
}
pub fn connect<T: AsyncRead + AsyncWrite>(connection: T, connect_url: &str) -> ProxyTunnel<T> { buffer.resize(buffer.capacity(), 0);
let proxy = proxy_connect(connection, connect_url);
ProxyTunnel {
state: ProxyState::ProxyConnect(proxy),
}
}
impl<T: AsyncRead + AsyncWrite> Future for ProxyTunnel<T> { let mut offset = 0;
type Item = T;
type Error = io::Error;
fn poll(&mut self) -> Poll<Self::Item, io::Error> {
use self::ProxyState::*;
loop { loop {
self.state = match self.state { let bytes_read = proxy_connection.read(&mut buffer[offset..]).await?;
ProxyConnect(ref mut write) => {
let (connection, mut accumulator) = try_ready!(write.poll());
let capacity = accumulator.capacity();
accumulator.resize(capacity, 0);
let window = Window::new(accumulator);
let read = read(connection, window);
ProxyResponse(read)
}
ProxyResponse(ref mut read_f) => {
let (connection, mut window, bytes_read) = try_ready!(read_f.poll());
if bytes_read == 0 { if bytes_read == 0 {
return Err(io::Error::new(io::ErrorKind::Other, "Early EOF from proxy")); return Err(io::Error::new(io::ErrorKind::Other, "Early EOF from proxy"));
} }
offset += bytes_read;
let data_end = window.start() + bytes_read;
let buf = window.get_ref()[0..data_end].to_vec();
let mut headers = [httparse::EMPTY_HEADER; 16]; let mut headers = [httparse::EMPTY_HEADER; 16];
let mut response = httparse::Response::new(&mut headers); let mut response = httparse::Response::new(&mut headers);
let status = match response.parse(&buf) {
Ok(status) => status, let status = response
Err(err) => { .parse(&buffer[..offset])
return Err(io::Error::new(io::ErrorKind::Other, err.to_string())); .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;
}
};
if status.is_complete() { if status.is_complete() {
if let Some(code) = response.code { return match response.code {
if code == 200 { Some(200) => Ok(proxy_connection), // Proxy says all is well
// Proxy says all is well Some(code) => {
return Ok(Async::Ready(connection));
} else {
let reason = response.reason.unwrap_or("no reason"); let reason = response.reason.unwrap_or("no reason");
let msg = format!("Proxy responded with {}: {}", code, reason); let msg = format!("Proxy responded with {}: {}", code, reason);
Err(io::Error::new(io::ErrorKind::Other, msg))
return Err(io::Error::new(io::ErrorKind::Other, msg));
} }
} else { None => Err(io::Error::new(
return Err(io::Error::new(
io::ErrorKind::Other, io::ErrorKind::Other,
"Malformed response from proxy", "Malformed response from proxy",
)); )),
} };
} else {
if data_end >= window.end() {
// Allocate some more buffer space
let newsize = data_end + 100;
window.get_mut().resize(newsize, 0);
window.set_end(newsize);
}
// We did not get a full header
window.set_start(data_end);
let read = read(connection, window);
ProxyResponse(read)
}
}
} }
if offset >= buffer.len() {
buffer.resize(buffer.len() + 100, 0);
} }
} }
} }
fn proxy_connect<T: AsyncWrite>(connection: T, connect_url: &str) -> WriteAll<T, Vec<u8>> {
let uri = Uri::from_str(connect_url).unwrap();
let buffer = format!(
"CONNECT {0}:{1} HTTP/1.1\r\n\
\r\n",
uri.host().expect(&format!("No host in {}", uri)),
uri.port().expect(&format!("No port in {}", uri))
)
.into_bytes();
write_all(connection, buffer)
}

View file

@ -1,25 +1,37 @@
use std::future::Future;
use std::io; use std::io;
use std::pin::Pin;
use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::atomic::{AtomicUsize, Ordering};
use std::sync::{Arc, RwLock, Weak}; use std::sync::{Arc, RwLock, Weak};
use std::task::Context;
use std::task::Poll;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use byteorder::{BigEndian, ByteOrder}; use byteorder::{BigEndian, ByteOrder};
use bytes::Bytes; use bytes::Bytes;
use futures::sync::mpsc; use futures_core::TryStream;
use futures::{Async, Future, IntoFuture, Poll, Stream}; use futures_util::{future, StreamExt, TryStreamExt};
use tokio_core::reactor::{Handle, Remote}; use once_cell::sync::OnceCell;
use thiserror::Error;
use tokio::sync::mpsc;
use tokio_stream::wrappers::UnboundedReceiverStream;
use crate::apresolve::apresolve_or_fallback; use crate::apresolve::apresolve;
use crate::audio_key::AudioKeyManager; use crate::audio_key::AudioKeyManager;
use crate::authentication::Credentials; use crate::authentication::Credentials;
use crate::cache::Cache; use crate::cache::Cache;
use crate::channel::ChannelManager; use crate::channel::ChannelManager;
use crate::component::Lazy;
use crate::config::SessionConfig; use crate::config::SessionConfig;
use crate::connection; use crate::connection::{self, AuthenticationError};
use crate::mercury::MercuryManager; use crate::mercury::MercuryManager;
pub use crate::authentication::{AuthenticationError, AuthenticationErrorKind}; #[derive(Debug, Error)]
pub enum SessionError {
#[error(transparent)]
AuthenticationError(#[from] AuthenticationError),
#[error("Cannot create session: {0}")]
IoError(#[from] io::Error),
}
struct SessionData { struct SessionData {
country: String, country: String,
@ -34,12 +46,12 @@ struct SessionInternal {
tx_connection: mpsc::UnboundedSender<(u8, Vec<u8>)>, tx_connection: mpsc::UnboundedSender<(u8, Vec<u8>)>,
audio_key: Lazy<AudioKeyManager>, audio_key: OnceCell<AudioKeyManager>,
channel: Lazy<ChannelManager>, channel: OnceCell<ChannelManager>,
mercury: Lazy<MercuryManager>, mercury: OnceCell<MercuryManager>,
cache: Option<Arc<Cache>>, cache: Option<Arc<Cache>>,
handle: Remote, handle: tokio::runtime::Handle,
session_id: usize, session_id: usize,
} }
@ -50,127 +62,109 @@ static SESSION_COUNTER: AtomicUsize = AtomicUsize::new(0);
pub struct Session(Arc<SessionInternal>); pub struct Session(Arc<SessionInternal>);
impl Session { impl Session {
pub fn connect( pub async fn connect(
config: SessionConfig, config: SessionConfig,
credentials: Credentials, credentials: Credentials,
cache: Option<Cache>, cache: Option<Cache>,
handle: Handle, ) -> Result<Session, SessionError> {
) -> Box<dyn Future<Item = Session, Error = AuthenticationError>> { let ap = apresolve(config.proxy.as_ref(), config.ap_port).await;
let access_point =
apresolve_or_fallback::<io::Error>(&handle, &config.proxy, &config.ap_port);
let handle_ = handle.clone(); info!("Connecting to AP \"{}\"", ap);
let proxy = config.proxy.clone(); let mut conn = connection::connect(ap, config.proxy.as_ref()).await?;
let connection = access_point
.and_then(move |addr| {
info!("Connecting to AP \"{}\"", addr);
connection::connect(addr, &handle_, &proxy)
})
.map_err(|io_err| io_err.into());
let device_id = config.device_id.clone(); let reusable_credentials =
let authentication = connection.and_then(move |connection| { connection::authenticate(&mut conn, credentials, &config.device_id).await?;
connection::authenticate(connection, credentials, device_id)
});
let result = authentication.map(move |(transport, reusable_credentials)| {
info!("Authenticated as \"{}\" !", reusable_credentials.username); info!("Authenticated as \"{}\" !", reusable_credentials.username);
if let Some(ref cache) = cache { if let Some(cache) = &cache {
cache.save_credentials(&reusable_credentials); cache.save_credentials(&reusable_credentials);
} }
let (session, task) = Session::create( let session = Session::create(
&handle, conn,
transport,
config, config,
cache, cache,
reusable_credentials.username.clone(), reusable_credentials.username,
tokio::runtime::Handle::current(),
); );
handle.spawn(task.map_err(|e| { Ok(session)
error!("{:?}", e);
}));
session
});
Box::new(result)
} }
fn create( fn create(
handle: &Handle,
transport: connection::Transport, transport: connection::Transport,
config: SessionConfig, config: SessionConfig,
cache: Option<Cache>, cache: Option<Cache>,
username: String, username: String,
) -> (Session, Box<dyn Future<Item = (), Error = io::Error>>) { handle: tokio::runtime::Handle,
) -> Session {
let (sink, stream) = transport.split(); let (sink, stream) = transport.split();
let (sender_tx, sender_rx) = mpsc::unbounded(); let (sender_tx, sender_rx) = mpsc::unbounded_channel();
let session_id = SESSION_COUNTER.fetch_add(1, Ordering::Relaxed); let session_id = SESSION_COUNTER.fetch_add(1, Ordering::Relaxed);
debug!("new Session[{}]", session_id); debug!("new Session[{}]", session_id);
let session = Session(Arc::new(SessionInternal { let session = Session(Arc::new(SessionInternal {
config: config, config,
data: RwLock::new(SessionData { data: RwLock::new(SessionData {
country: String::new(), country: String::new(),
canonical_username: username, canonical_username: username,
invalid: false, invalid: false,
time_delta: 0, time_delta: 0,
}), }),
tx_connection: sender_tx, tx_connection: sender_tx,
cache: cache.map(Arc::new), cache: cache.map(Arc::new),
audio_key: OnceCell::new(),
audio_key: Lazy::new(), channel: OnceCell::new(),
channel: Lazy::new(), mercury: OnceCell::new(),
mercury: Lazy::new(), handle,
session_id,
handle: handle.remote().clone(),
session_id: session_id,
})); }));
let sender_task = sender_rx let sender_task = UnboundedReceiverStream::new(sender_rx)
.map_err(|e| -> io::Error { panic!(e) }) .map(Ok)
.forward(sink) .forward(sink);
.map(|_| ());
let receiver_task = DispatchTask(stream, session.weak()); let receiver_task = DispatchTask(stream, session.weak());
let task = Box::new( tokio::spawn(async move {
(receiver_task, sender_task) let result = future::try_join(sender_task, receiver_task).await;
.into_future()
.map(|((), ())| ()),
);
(session, task) if let Err(e) = result {
error!("{}", e);
}
});
session
} }
pub fn audio_key(&self) -> &AudioKeyManager { pub fn audio_key(&self) -> &AudioKeyManager {
self.0.audio_key.get(|| AudioKeyManager::new(self.weak())) self.0
.audio_key
.get_or_init(|| AudioKeyManager::new(self.weak()))
} }
pub fn channel(&self) -> &ChannelManager { pub fn channel(&self) -> &ChannelManager {
self.0.channel.get(|| ChannelManager::new(self.weak())) self.0
.channel
.get_or_init(|| ChannelManager::new(self.weak()))
} }
pub fn mercury(&self) -> &MercuryManager { pub fn mercury(&self) -> &MercuryManager {
self.0.mercury.get(|| MercuryManager::new(self.weak())) self.0
.mercury
.get_or_init(|| MercuryManager::new(self.weak()))
} }
pub fn time_delta(&self) -> i64 { pub fn time_delta(&self) -> i64 {
self.0.data.read().unwrap().time_delta self.0.data.read().unwrap().time_delta
} }
pub fn spawn<F, R>(&self, f: F) pub fn spawn<T>(&self, task: T)
where where
F: FnOnce(&Handle) -> R + Send + 'static, T: Future + Send + 'static,
R: IntoFuture<Item = (), Error = ()>, T::Output: Send + 'static,
R::Future: 'static,
{ {
self.0.handle.spawn(f) self.0.handle.spawn(task);
} }
fn debug_info(&self) { fn debug_info(&self) {
@ -182,7 +176,7 @@ impl Session {
); );
} }
#[cfg_attr(feature = "cargo-clippy", allow(match_same_arms))] #[allow(clippy::match_same_arms)]
fn dispatch(&self, cmd: u8, data: Bytes) { fn dispatch(&self, cmd: u8, data: Bytes) {
match cmd { match cmd {
0x4 => { 0x4 => {
@ -213,7 +207,7 @@ impl Session {
} }
pub fn send_packet(&self, cmd: u8, data: Vec<u8>) { pub fn send_packet(&self, cmd: u8, data: Vec<u8>) {
self.0.tx_connection.unbounded_send((cmd, data)).unwrap(); self.0.tx_connection.send((cmd, data)).unwrap();
} }
pub fn cache(&self) -> Option<&Arc<Cache>> { pub fn cache(&self) -> Option<&Arc<Cache>> {
@ -277,35 +271,34 @@ impl Drop for SessionInternal {
struct DispatchTask<S>(S, SessionWeak) struct DispatchTask<S>(S, SessionWeak)
where where
S: Stream<Item = (u8, Bytes)>; S: TryStream<Ok = (u8, Bytes)> + Unpin;
impl<S> Future for DispatchTask<S> impl<S> Future for DispatchTask<S>
where where
S: Stream<Item = (u8, Bytes)>, S: TryStream<Ok = (u8, Bytes)> + Unpin,
<S as Stream>::Error: ::std::fmt::Debug, <S as TryStream>::Ok: std::fmt::Debug,
{ {
type Item = (); type Output = Result<(), S::Error>;
type Error = S::Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> { fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let session = match self.1.try_upgrade() { let session = match self.1.try_upgrade() {
Some(session) => session, Some(session) => session,
None => return Ok(Async::Ready(())), None => return Poll::Ready(Ok(())),
}; };
loop { loop {
let (cmd, data) = match self.0.poll() { let (cmd, data) = match self.0.try_poll_next_unpin(cx) {
Ok(Async::Ready(Some(t))) => t, Poll::Ready(Some(Ok(t))) => t,
Ok(Async::Ready(None)) => { Poll::Ready(None) => {
warn!("Connection to server closed."); warn!("Connection to server closed.");
session.shutdown(); session.shutdown();
return Ok(Async::Ready(())); return Poll::Ready(Ok(()));
} }
Ok(Async::NotReady) => return Ok(Async::NotReady), Poll::Ready(Some(Err(e))) => {
Err(e) => {
session.shutdown(); session.shutdown();
return Err(From::from(e)); return Poll::Ready(Err(e));
} }
Poll::Pending => return Poll::Pending,
}; };
session.dispatch(cmd, data); session.dispatch(cmd, data);
@ -315,7 +308,7 @@ where
impl<S> Drop for DispatchTask<S> impl<S> Drop for DispatchTask<S>
where where
S: Stream<Item = (u8, Bytes)>, S: TryStream<Ok = (u8, Bytes)> + Unpin,
{ {
fn drop(&mut self) { fn drop(&mut self) {
debug!("drop Dispatch"); debug!("drop Dispatch");

View file

@ -18,9 +18,9 @@ impl From<&str> for SpotifyAudioType {
} }
} }
impl Into<&str> for SpotifyAudioType { impl From<SpotifyAudioType> for &str {
fn into(self) -> &'static str { fn from(audio_type: SpotifyAudioType) -> &'static str {
match self { match audio_type {
SpotifyAudioType::Track => "track", SpotifyAudioType::Track => "track",
SpotifyAudioType::Podcast => "episode", SpotifyAudioType::Podcast => "episode",
SpotifyAudioType::NonPlayable => "unknown", SpotifyAudioType::NonPlayable => "unknown",
@ -45,7 +45,7 @@ impl SpotifyId {
const SIZE_BASE16: usize = 32; const SIZE_BASE16: usize = 32;
const SIZE_BASE62: usize = 22; const SIZE_BASE62: usize = 22;
fn as_track(n: u128) -> SpotifyId { fn track(n: u128) -> SpotifyId {
SpotifyId { SpotifyId {
id: n, id: n,
audio_type: SpotifyAudioType::Track, audio_type: SpotifyAudioType::Track,
@ -71,7 +71,7 @@ impl SpotifyId {
dst += p; dst += p;
} }
Ok(SpotifyId::as_track(dst)) Ok(SpotifyId::track(dst))
} }
/// Parses a base62 encoded [Spotify ID] into a `SpotifyId`. /// Parses a base62 encoded [Spotify ID] into a `SpotifyId`.
@ -94,7 +94,7 @@ impl SpotifyId {
dst += p; dst += p;
} }
Ok(SpotifyId::as_track(dst)) Ok(SpotifyId::track(dst))
} }
/// Creates a `SpotifyId` from a copy of `SpotifyId::SIZE` (16) bytes in big-endian order. /// Creates a `SpotifyId` from a copy of `SpotifyId::SIZE` (16) bytes in big-endian order.
@ -102,7 +102,7 @@ impl SpotifyId {
/// The resulting `SpotifyId` will default to a `SpotifyAudioType::TRACK`. /// The resulting `SpotifyId` will default to a `SpotifyAudioType::TRACK`.
pub fn from_raw(src: &[u8]) -> Result<SpotifyId, SpotifyIdError> { pub fn from_raw(src: &[u8]) -> Result<SpotifyId, SpotifyIdError> {
match src.try_into() { match src.try_into() {
Ok(dst) => Ok(SpotifyId::as_track(u128::from_be_bytes(dst))), Ok(dst) => Ok(SpotifyId::track(u128::from_be_bytes(dst))),
Err(_) => Err(SpotifyIdError), Err(_) => Err(SpotifyIdError),
} }
} }

29
core/src/util.rs Normal file
View file

@ -0,0 +1,29 @@
use std::mem;
pub trait Seq {
fn next(&self) -> Self;
}
macro_rules! impl_seq {
($($ty:ty)*) => { $(
impl Seq for $ty {
fn next(&self) -> Self { (*self).wrapping_add(1) }
}
)* }
}
impl_seq!(u8 u16 u32 u64 usize);
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]
pub struct SeqGenerator<T: Seq>(T);
impl<T: Seq> SeqGenerator<T> {
pub fn new(value: T) -> Self {
SeqGenerator(value)
}
pub fn get(&mut self) -> T {
let value = self.0.next();
mem::replace(&mut self.0, value)
}
}

View file

@ -1,75 +0,0 @@
use num_bigint::BigUint;
use num_integer::Integer;
use num_traits::{One, Zero};
use rand::Rng;
use std::mem;
use std::ops::{Mul, Rem, Shr};
pub fn rand_vec<G: Rng>(rng: &mut G, size: usize) -> Vec<u8> {
::std::iter::repeat(())
.map(|()| rng.gen())
.take(size)
.collect()
}
pub fn url_encode(inp: &str) -> String {
let mut encoded = String::new();
for c in inp.as_bytes().iter() {
match *c as char {
'A'..='Z' | 'a'..='z' | '0'..='9' | '-' | '_' | '.' | '~' | ':' | '/' => {
encoded.push(*c as char)
}
c => encoded.push_str(format!("%{:02X}", c as u32).as_str()),
};
}
encoded
}
pub fn powm(base: &BigUint, exp: &BigUint, modulus: &BigUint) -> BigUint {
let mut base = base.clone();
let mut exp = exp.clone();
let mut result: BigUint = One::one();
while !exp.is_zero() {
if exp.is_odd() {
result = result.mul(&base).rem(modulus);
}
exp = exp.shr(1);
base = (&base).mul(&base).rem(modulus);
}
result
}
pub trait ReadSeek: ::std::io::Read + ::std::io::Seek {}
impl<T: ::std::io::Read + ::std::io::Seek> ReadSeek for T {}
pub trait Seq {
fn next(&self) -> Self;
}
macro_rules! impl_seq {
($($ty:ty)*) => { $(
impl Seq for $ty {
fn next(&self) -> Self { (*self).wrapping_add(1) }
}
)* }
}
impl_seq!(u8 u16 u32 u64 usize);
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord, Default)]
pub struct SeqGenerator<T: Seq>(T);
impl<T: Seq> SeqGenerator<T> {
pub fn new(value: T) -> Self {
SeqGenerator(value)
}
pub fn get(&mut self) -> T {
let value = self.0.next();
mem::replace(&mut self.0, value)
}
}

18
core/tests/connect.rs Normal file
View file

@ -0,0 +1,18 @@
use librespot_core::authentication::Credentials;
use librespot_core::config::SessionConfig;
use librespot_core::session::Session;
#[tokio::test]
async fn test_connection() {
let result = Session::connect(
SessionConfig::default(),
Credentials::with_password("test", "test"),
None,
)
.await;
match result {
Ok(_) => panic!("Authentication succeeded despite of bad credentials."),
Err(e) => assert_eq!(e.to_string(), "Login failed with reason: Bad credentials"),
};
}

View file

@ -1,5 +1,4 @@
use std::env; use std::env;
use tokio_core::reactor::Core;
use librespot::core::authentication::Credentials; use librespot::core::authentication::Credentials;
use librespot::core::config::SessionConfig; use librespot::core::config::SessionConfig;
@ -9,29 +8,26 @@ use librespot::core::session::Session;
const SCOPES: &str = const SCOPES: &str =
"streaming,user-read-playback-state,user-modify-playback-state,user-read-currently-playing"; "streaming,user-read-playback-state,user-modify-playback-state,user-read-currently-playing";
fn main() { #[tokio::main]
let mut core = Core::new().unwrap(); async fn main() {
let handle = core.handle();
let session_config = SessionConfig::default(); let session_config = SessionConfig::default();
let args: Vec<_> = env::args().collect(); let args: Vec<_> = env::args().collect();
if args.len() != 4 { if args.len() != 4 {
println!("Usage: {} USERNAME PASSWORD CLIENT_ID", args[0]); eprintln!("Usage: {} USERNAME PASSWORD CLIENT_ID", args[0]);
return;
} }
let username = args[1].to_owned();
let password = args[2].to_owned();
let client_id = &args[3];
println!("Connecting.."); println!("Connecting..");
let credentials = Credentials::with_password(username, password); let credentials = Credentials::with_password(&args[1], &args[2]);
let session = core let session = Session::connect(session_config, credentials, None)
.run(Session::connect(session_config, credentials, None, handle)) .await
.unwrap(); .unwrap();
println!( println!(
"Token: {:#?}", "Token: {:#?}",
core.run(keymaster::get_token(&session, &client_id, SCOPES)) keymaster::get_token(&session, &args[3], SCOPES)
.await
.unwrap() .unwrap()
); );
} }

View file

@ -1,47 +1,44 @@
use std::env; use std::env;
use tokio_core::reactor::Core;
use librespot::core::authentication::Credentials; use librespot::core::authentication::Credentials;
use librespot::core::config::SessionConfig; use librespot::core::config::SessionConfig;
use librespot::core::session::Session; use librespot::core::session::Session;
use librespot::core::spotify_id::SpotifyId; use librespot::core::spotify_id::SpotifyId;
use librespot::playback::config::PlayerConfig;
use librespot::playback::audio_backend; use librespot::playback::audio_backend;
use librespot::playback::config::{AudioFormat, PlayerConfig};
use librespot::playback::player::Player; use librespot::playback::player::Player;
fn main() { #[tokio::main]
let mut core = Core::new().unwrap(); async fn main() {
let handle = core.handle();
let session_config = SessionConfig::default(); let session_config = SessionConfig::default();
let player_config = PlayerConfig::default(); let player_config = PlayerConfig::default();
let audio_format = AudioFormat::default();
let args: Vec<_> = env::args().collect(); let args: Vec<_> = env::args().collect();
if args.len() != 4 { if args.len() != 4 {
println!("Usage: {} USERNAME PASSWORD TRACK", args[0]); eprintln!("Usage: {} USERNAME PASSWORD TRACK", args[0]);
return;
} }
let username = args[1].to_owned(); let credentials = Credentials::with_password(&args[1], &args[2]);
let password = args[2].to_owned();
let credentials = Credentials::with_password(username, password);
let track = SpotifyId::from_base62(&args[3]).unwrap(); let track = SpotifyId::from_base62(&args[3]).unwrap();
let backend = audio_backend::find(None).unwrap(); let backend = audio_backend::find(None).unwrap();
println!("Connecting .."); println!("Connecting ..");
let session = core let session = Session::connect(session_config, credentials, None)
.run(Session::connect(session_config, credentials, None, handle)) .await
.unwrap(); .unwrap();
let (mut player, _) = Player::new(player_config, session.clone(), None, move || { let (mut player, _) = Player::new(player_config, session, None, move || {
(backend)(None) backend(None, audio_format)
}); });
player.load(track, true, 0); player.load(track, true, 0);
println!("Playing..."); println!("Playing...");
core.run(player.get_end_of_track_future()).unwrap();
player.await_end_of_track().await;
println!("Done"); println!("Done");
} }

View file

@ -1,6 +1,5 @@
use env_logger; use env_logger;
use std::env; use std::env;
use tokio_core::reactor::Core;
use librespot::core::authentication::Credentials; use librespot::core::authentication::Credentials;
use librespot::core::config::SessionConfig; use librespot::core::config::SessionConfig;
@ -8,35 +7,32 @@ use librespot::core::session::Session;
use librespot::core::spotify_id::SpotifyId; use librespot::core::spotify_id::SpotifyId;
use librespot::metadata::{Metadata, Playlist, Track}; use librespot::metadata::{Metadata, Playlist, Track};
fn main() { #[tokio::main]
async fn main() {
env_logger::init(); env_logger::init();
let mut core = Core::new().unwrap();
let handle = core.handle();
let session_config = SessionConfig::default(); let session_config = SessionConfig::default();
let args: Vec<_> = env::args().collect(); let args: Vec<_> = env::args().collect();
if args.len() != 4 { if args.len() != 4 {
println!("Usage: {} USERNAME PASSWORD PLAYLIST", args[0]); eprintln!("Usage: {} USERNAME PASSWORD PLAYLIST", args[0]);
return;
} }
let username = args[1].to_owned(); let credentials = Credentials::with_password(&args[1], &args[2]);
let password = args[2].to_owned();
let credentials = Credentials::with_password(username, password);
let uri_split = args[3].split(":"); let uri_split = args[3].split(':');
let uri_parts: Vec<&str> = uri_split.collect(); let uri_parts: Vec<&str> = uri_split.collect();
println!("{}, {}, {}", uri_parts[0], uri_parts[1], uri_parts[2]); println!("{}, {}, {}", uri_parts[0], uri_parts[1], uri_parts[2]);
let plist_uri = SpotifyId::from_base62(uri_parts[2]).unwrap(); let plist_uri = SpotifyId::from_base62(uri_parts[2]).unwrap();
let session = core let session = Session::connect(session_config, credentials, None)
.run(Session::connect(session_config, credentials, None, handle)) .await
.unwrap(); .unwrap();
let plist = core.run(Playlist::get(&session, plist_uri)).unwrap(); let plist = Playlist::get(&session, plist_uri).await.unwrap();
println!("{:?}", plist); println!("{:?}", plist);
for track_id in plist.tracks { for track_id in plist.tracks {
let plist_track = core.run(Track::get(&session, track_id)).unwrap(); let plist_track = Track::get(&session, track_id).await.unwrap();
println!("track: {} ", plist_track.name); println!("track: {} ", plist_track.name);
} }
} }

View file

@ -8,9 +8,8 @@ repository = "https://github.com/librespot-org/librespot"
edition = "2018" edition = "2018"
[dependencies] [dependencies]
async-trait = "0.1"
byteorder = "1.3" byteorder = "1.3"
futures = "0.1"
linear-map = "1.2"
protobuf = "~2.14.0" protobuf = "~2.14.0"
log = "0.4" log = "0.4"

View file

@ -1,23 +1,19 @@
#![allow(clippy::unused_io_amount)]
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate byteorder; #[macro_use]
extern crate futures; extern crate async_trait;
extern crate linear_map;
extern crate protobuf;
extern crate librespot_core;
extern crate librespot_protocol as protocol;
pub mod cover; pub mod cover;
use futures::future; use std::collections::HashMap;
use futures::Future;
use linear_map::LinearMap;
use librespot_core::mercury::MercuryError; use librespot_core::mercury::MercuryError;
use librespot_core::session::Session; use librespot_core::session::Session;
use librespot_core::spotify_id::{FileId, SpotifyAudioType, SpotifyId}; use librespot_core::spotify_id::{FileId, SpotifyAudioType, SpotifyId};
use librespot_protocol as protocol;
pub use crate::protocol::metadata::AudioFile_Format as FileFormat; pub use crate::protocol::metadata::AudioFile_Format as FileFormat;
@ -61,7 +57,7 @@ where
pub struct AudioItem { pub struct AudioItem {
pub id: SpotifyId, pub id: SpotifyId,
pub uri: String, pub uri: String,
pub files: LinearMap<FileFormat, FileId>, pub files: HashMap<FileFormat, FileId>,
pub name: String, pub name: String,
pub duration: i32, pub duration: i32,
pub available: bool, pub available: bool,
@ -69,35 +65,26 @@ pub struct AudioItem {
} }
impl AudioItem { impl AudioItem {
pub fn get_audio_item( pub async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<Self, MercuryError> {
session: &Session,
id: SpotifyId,
) -> Box<dyn Future<Item = AudioItem, Error = MercuryError>> {
match id.audio_type { match id.audio_type {
SpotifyAudioType::Track => Track::get_audio_item(session, id), SpotifyAudioType::Track => Track::get_audio_item(session, id).await,
SpotifyAudioType::Podcast => Episode::get_audio_item(session, id), SpotifyAudioType::Podcast => Episode::get_audio_item(session, id).await,
SpotifyAudioType::NonPlayable => { SpotifyAudioType::NonPlayable => Err(MercuryError),
Box::new(future::err::<AudioItem, MercuryError>(MercuryError))
}
} }
} }
} }
#[async_trait]
trait AudioFiles { trait AudioFiles {
fn get_audio_item( async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError>;
session: &Session,
id: SpotifyId,
) -> Box<dyn Future<Item = AudioItem, Error = MercuryError>>;
} }
#[async_trait]
impl AudioFiles for Track { impl AudioFiles for Track {
fn get_audio_item( async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError> {
session: &Session, let item = Self::get(session, id).await?;
id: SpotifyId,
) -> Box<dyn Future<Item = AudioItem, Error = MercuryError>> {
Box::new(Self::get(session, id).and_then(move |item| {
Ok(AudioItem { Ok(AudioItem {
id: id, id,
uri: format!("spotify:track:{}", id.to_base62()), uri: format!("spotify:track:{}", id.to_base62()),
files: item.files, files: item.files,
name: item.name, name: item.name,
@ -105,18 +92,16 @@ impl AudioFiles for Track {
available: item.available, available: item.available,
alternatives: Some(item.alternatives), alternatives: Some(item.alternatives),
}) })
}))
} }
} }
#[async_trait]
impl AudioFiles for Episode { impl AudioFiles for Episode {
fn get_audio_item( async fn get_audio_item(session: &Session, id: SpotifyId) -> Result<AudioItem, MercuryError> {
session: &Session, let item = Self::get(session, id).await?;
id: SpotifyId,
) -> Box<dyn Future<Item = AudioItem, Error = MercuryError>> {
Box::new(Self::get(session, id).and_then(move |item| {
Ok(AudioItem { Ok(AudioItem {
id: id, id,
uri: format!("spotify:episode:{}", id.to_base62()), uri: format!("spotify:episode:{}", id.to_base62()),
files: item.files, files: item.files,
name: item.name, name: item.name,
@ -124,26 +109,23 @@ impl AudioFiles for Episode {
available: item.available, available: item.available,
alternatives: None, alternatives: None,
}) })
}))
} }
} }
#[async_trait]
pub trait Metadata: Send + Sized + 'static { pub trait Metadata: Send + Sized + 'static {
type Message: protobuf::Message; type Message: protobuf::Message;
fn request_url(id: SpotifyId) -> String; fn request_url(id: SpotifyId) -> String;
fn parse(msg: &Self::Message, session: &Session) -> Self; fn parse(msg: &Self::Message, session: &Session) -> Self;
fn get(session: &Session, id: SpotifyId) -> Box<dyn Future<Item = Self, Error = MercuryError>> { async fn get(session: &Session, id: SpotifyId) -> Result<Self, MercuryError> {
let uri = Self::request_url(id); let uri = Self::request_url(id);
let request = session.mercury().get(uri); let response = session.mercury().get(uri).await?;
let session = session.clone();
Box::new(request.and_then(move |response| {
let data = response.payload.first().expect("Empty payload"); let data = response.payload.first().expect("Empty payload");
let msg: Self::Message = protobuf::parse_from_bytes(data).unwrap(); let msg: Self::Message = protobuf::parse_from_bytes(data).unwrap();
Ok(Self::parse(&msg, &session)) Ok(Self::parse(&msg, &session))
}))
} }
} }
@ -154,7 +136,7 @@ pub struct Track {
pub duration: i32, pub duration: i32,
pub album: SpotifyId, pub album: SpotifyId,
pub artists: Vec<SpotifyId>, pub artists: Vec<SpotifyId>,
pub files: LinearMap<FileFormat, FileId>, pub files: HashMap<FileFormat, FileId>,
pub alternatives: Vec<SpotifyId>, pub alternatives: Vec<SpotifyId>,
pub available: bool, pub available: bool,
} }
@ -176,7 +158,7 @@ pub struct Episode {
pub duration: i32, pub duration: i32,
pub language: String, pub language: String,
pub show: SpotifyId, pub show: SpotifyId,
pub files: LinearMap<FileFormat, FileId>, pub files: HashMap<FileFormat, FileId>,
pub covers: Vec<FileId>, pub covers: Vec<FileId>,
pub available: bool, pub available: bool,
pub explicit: bool, pub explicit: bool,
@ -239,8 +221,8 @@ impl Metadata for Track {
name: msg.get_name().to_owned(), name: msg.get_name().to_owned(),
duration: msg.get_duration(), duration: msg.get_duration(),
album: SpotifyId::from_raw(msg.get_album().get_gid()).unwrap(), album: SpotifyId::from_raw(msg.get_album().get_gid()).unwrap(),
artists: artists, artists,
files: files, files,
alternatives: msg alternatives: msg
.get_alternative() .get_alternative()
.iter() .iter()
@ -289,9 +271,9 @@ impl Metadata for Album {
Album { Album {
id: SpotifyId::from_raw(msg.get_gid()).unwrap(), id: SpotifyId::from_raw(msg.get_gid()).unwrap(),
name: msg.get_name().to_owned(), name: msg.get_name().to_owned(),
artists: artists, artists,
tracks: tracks, tracks,
covers: covers, covers,
} }
} }
} }
@ -309,7 +291,7 @@ impl Metadata for Playlist {
.get_items() .get_items()
.iter() .iter()
.map(|item| { .map(|item| {
let uri_split = item.get_uri().split(":"); let uri_split = item.get_uri().split(':');
let uri_parts: Vec<&str> = uri_split.collect(); let uri_parts: Vec<&str> = uri_split.collect();
SpotifyId::from_base62(uri_parts[2]).unwrap() SpotifyId::from_base62(uri_parts[2]).unwrap()
}) })
@ -326,7 +308,7 @@ impl Metadata for Playlist {
Playlist { Playlist {
revision: msg.get_revision().to_vec(), revision: msg.get_revision().to_vec(),
name: msg.get_attributes().get_name().to_owned(), name: msg.get_attributes().get_name().to_owned(),
tracks: tracks, tracks,
user: msg.get_owner_username().to_string(), user: msg.get_owner_username().to_string(),
} }
} }
@ -359,7 +341,7 @@ impl Metadata for Artist {
Artist { Artist {
id: SpotifyId::from_raw(msg.get_gid()).unwrap(), id: SpotifyId::from_raw(msg.get_gid()).unwrap(),
name: msg.get_name().to_owned(), name: msg.get_name().to_owned(),
top_tracks: top_tracks, top_tracks,
} }
} }
} }
@ -405,8 +387,8 @@ impl Metadata for Episode {
duration: msg.get_duration().to_owned(), duration: msg.get_duration().to_owned(),
language: msg.get_language().to_owned(), language: msg.get_language().to_owned(),
show: SpotifyId::from_raw(msg.get_show().get_gid()).unwrap(), show: SpotifyId::from_raw(msg.get_show().get_gid()).unwrap(),
covers: covers, covers,
files: files, files,
available: parse_restrictions(msg.get_restriction(), &country, "premium"), available: parse_restrictions(msg.get_restriction(), &country, "premium"),
explicit: msg.get_explicit().to_owned(), explicit: msg.get_explicit().to_owned(),
} }
@ -444,8 +426,8 @@ impl Metadata for Show {
id: SpotifyId::from_raw(msg.get_gid()).unwrap(), id: SpotifyId::from_raw(msg.get_gid()).unwrap(),
name: msg.get_name().to_owned(), name: msg.get_name().to_owned(),
publisher: msg.get_publisher().to_owned(), publisher: msg.get_publisher().to_owned(),
episodes: episodes, episodes,
covers: covers, covers,
} }
} }
} }

View file

@ -18,31 +18,36 @@ path = "../metadata"
version = "0.1.6" version = "0.1.6"
[dependencies] [dependencies]
futures = "0.1" futures-executor = "0.3"
futures-util = { version = "0.3", default_features = false, features = ["alloc"] }
log = "0.4" log = "0.4"
byteorder = "1.3" byteorder = "1.4"
shell-words = "1.0.0" shell-words = "1.0.0"
tokio = { version = "1", features = ["sync"] }
alsa = { version = "0.4", optional = true } alsa = { version = "0.5", optional = true }
portaudio-rs = { version = "0.3", optional = true } portaudio-rs = { version = "0.3", optional = true }
libpulse-binding = { version = "2.13", optional = true, default-features = false } libpulse-binding = { version = "2", optional = true, default-features = false }
libpulse-simple-binding = { version = "2.13", optional = true, default-features = false } libpulse-simple-binding = { version = "2", optional = true, default-features = false }
jack = { version = "0.6", optional = true } jack = { version = "0.6", optional = true }
libc = { version = "0.2", optional = true } libc = { version = "0.2", optional = true }
rodio = { version = "0.13", optional = true, default-features = false } sdl2 = { version = "0.34.3", optional = true }
cpal = { version = "0.13", optional = true }
sdl2 = { version = "0.34", optional = true }
gstreamer = { version = "0.16", optional = true } gstreamer = { version = "0.16", optional = true }
gstreamer-app = { version = "0.16", optional = true } gstreamer-app = { version = "0.16", optional = true }
glib = { version = "0.10", optional = true } glib = { version = "0.10", optional = true }
zerocopy = { version = "0.3", optional = true } zerocopy = { version = "0.3" }
# Rodio dependencies
rodio = { version = "0.13", optional = true, default-features = false }
cpal = { version = "0.13", optional = true }
thiserror = { version = "1", optional = true }
[features] [features]
alsa-backend = ["alsa"] alsa-backend = ["alsa"]
portaudio-backend = ["portaudio-rs"] portaudio-backend = ["portaudio-rs"]
pulseaudio-backend = ["libpulse-binding", "libpulse-simple-binding"] pulseaudio-backend = ["libpulse-binding", "libpulse-simple-binding"]
jackaudio-backend = ["jack"] jackaudio-backend = ["jack"]
rodiojack-backend = ["rodio", "cpal/jack"] rodio-backend = ["rodio", "cpal", "thiserror"]
rodio-backend = ["rodio", "cpal"] rodiojack-backend = ["rodio", "cpal/jack", "thiserror"]
sdl-backend = ["sdl2"] sdl-backend = ["sdl2"]
gstreamer-backend = ["gstreamer", "gstreamer-app", "glib", "zerocopy"] gstreamer-backend = ["gstreamer", "gstreamer-app", "glib"]

View file

@ -1,5 +1,7 @@
use super::{Open, Sink}; use super::{Open, Sink, SinkAsBytes};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLES_PER_SECOND, SAMPLE_RATE};
use alsa::device_name::HintIter; use alsa::device_name::HintIter;
use alsa::pcm::{Access, Format, Frames, HwParams, PCM}; use alsa::pcm::{Access, Format, Frames, HwParams, PCM};
use alsa::{Direction, Error, ValueOr}; use alsa::{Direction, Error, ValueOr};
@ -8,13 +10,14 @@ use std::ffi::CString;
use std::io; use std::io;
use std::process::exit; use std::process::exit;
const PREFERED_PERIOD_SIZE: Frames = 5512; // Period of roughly 125ms const BUFFERED_LATENCY: f32 = 0.125; // seconds
const BUFFERED_PERIODS: Frames = 4; const BUFFERED_PERIODS: Frames = 4;
pub struct AlsaSink { pub struct AlsaSink {
pcm: Option<PCM>, pcm: Option<PCM>,
format: AudioFormat,
device: String, device: String,
buffer: Vec<i16>, buffer: Vec<u8>,
} }
fn list_outputs() { fn list_outputs() {
@ -34,23 +37,27 @@ fn list_outputs() {
} }
} }
fn open_device(dev_name: &str) -> Result<(PCM, Frames), Box<Error>> { fn open_device(dev_name: &str, format: AudioFormat) -> Result<(PCM, Frames), Box<Error>> {
let pcm = PCM::new(dev_name, Direction::Playback, false)?; let pcm = PCM::new(dev_name, Direction::Playback, false)?;
let mut period_size = PREFERED_PERIOD_SIZE; let alsa_format = match format {
AudioFormat::F32 => Format::float(),
AudioFormat::S32 => Format::s32(),
AudioFormat::S24 => Format::s24(),
AudioFormat::S24_3 => Format::S243LE,
AudioFormat::S16 => Format::s16(),
};
// http://www.linuxjournal.com/article/6735?page=0,1#N0x19ab2890.0x19ba78d8 // http://www.linuxjournal.com/article/6735?page=0,1#N0x19ab2890.0x19ba78d8
// latency = period_size * periods / (rate * bytes_per_frame) // latency = period_size * periods / (rate * bytes_per_frame)
// For 16 Bit stereo data, one frame has a length of four bytes. // For stereo samples encoded as 32-bit float, one frame has a length of eight bytes.
// 500ms = buffer_size / (44100 * 4) let mut period_size = ((SAMPLES_PER_SECOND * format.size() as u32) as f32
// buffer_size_bytes = 0.5 * 44100 / 4 * (BUFFERED_LATENCY / BUFFERED_PERIODS as f32)) as Frames;
// buffer_size_frames = 0.5 * 44100 = 22050
{ {
// Set hardware parameters: 44100 Hz / Stereo / 16 bit
let hwp = HwParams::any(&pcm)?; let hwp = HwParams::any(&pcm)?;
hwp.set_access(Access::RWInterleaved)?; hwp.set_access(Access::RWInterleaved)?;
hwp.set_format(Format::s16())?; hwp.set_format(alsa_format)?;
hwp.set_rate(44100, ValueOr::Nearest)?; hwp.set_rate(SAMPLE_RATE, ValueOr::Nearest)?;
hwp.set_channels(2)?; hwp.set_channels(NUM_CHANNELS as u32)?;
period_size = hwp.set_period_size_near(period_size, ValueOr::Greater)?; period_size = hwp.set_period_size_near(period_size, ValueOr::Greater)?;
hwp.set_buffer_size_near(period_size * BUFFERED_PERIODS)?; hwp.set_buffer_size_near(period_size * BUFFERED_PERIODS)?;
pcm.hw_params(&hwp)?; pcm.hw_params(&hwp)?;
@ -64,12 +71,12 @@ fn open_device(dev_name: &str) -> Result<(PCM, Frames), Box<Error>> {
} }
impl Open for AlsaSink { impl Open for AlsaSink {
fn open(device: Option<String>) -> AlsaSink { fn open(device: Option<String>, format: AudioFormat) -> Self {
info!("Using alsa sink"); info!("Using Alsa sink with format: {:?}", format);
let name = match device.as_ref().map(AsRef::as_ref) { let name = match device.as_ref().map(AsRef::as_ref) {
Some("?") => { Some("?") => {
println!("Listing available alsa outputs"); println!("Listing available Alsa outputs:");
list_outputs(); list_outputs();
exit(0) exit(0)
} }
@ -78,8 +85,9 @@ impl Open for AlsaSink {
} }
.to_string(); .to_string();
AlsaSink { Self {
pcm: None, pcm: None,
format,
device: name, device: name,
buffer: vec![], buffer: vec![],
} }
@ -89,12 +97,14 @@ impl Open for AlsaSink {
impl Sink for AlsaSink { impl Sink for AlsaSink {
fn start(&mut self) -> io::Result<()> { fn start(&mut self) -> io::Result<()> {
if self.pcm.is_none() { if self.pcm.is_none() {
let pcm = open_device(&self.device); let pcm = open_device(&self.device, self.format);
match pcm { match pcm {
Ok((p, period_size)) => { Ok((p, period_size)) => {
self.pcm = Some(p); self.pcm = Some(p);
// Create a buffer for all samples for a full period // Create a buffer for all samples for a full period
self.buffer = Vec::with_capacity((period_size * 2) as usize); self.buffer = Vec::with_capacity(
period_size as usize * BUFFERED_PERIODS as usize * self.format.size(),
);
} }
Err(e) => { Err(e) => {
error!("Alsa error PCM open {}", e); error!("Alsa error PCM open {}", e);
@ -111,23 +121,22 @@ impl Sink for AlsaSink {
fn stop(&mut self) -> io::Result<()> { fn stop(&mut self) -> io::Result<()> {
{ {
let pcm = self.pcm.as_mut().unwrap();
// Write any leftover data in the period buffer // Write any leftover data in the period buffer
// before draining the actual buffer // before draining the actual buffer
let io = pcm.io_i16().unwrap(); self.write_bytes(&[]).expect("could not flush buffer");
match io.writei(&self.buffer[..]) { let pcm = self.pcm.as_mut().unwrap();
Ok(_) => (),
Err(err) => pcm.try_recover(err, false).unwrap(),
}
pcm.drain().unwrap(); pcm.drain().unwrap();
} }
self.pcm = None; self.pcm = None;
Ok(()) Ok(())
} }
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { sink_as_bytes!();
}
impl SinkAsBytes for AlsaSink {
fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
let mut processed_data = 0; let mut processed_data = 0;
let data = packet.samples();
while processed_data < data.len() { while processed_data < data.len() {
let data_to_buffer = min( let data_to_buffer = min(
self.buffer.capacity() - self.buffer.len(), self.buffer.capacity() - self.buffer.len(),
@ -137,12 +146,7 @@ impl Sink for AlsaSink {
.extend_from_slice(&data[processed_data..processed_data + data_to_buffer]); .extend_from_slice(&data[processed_data..processed_data + data_to_buffer]);
processed_data += data_to_buffer; processed_data += data_to_buffer;
if self.buffer.len() == self.buffer.capacity() { if self.buffer.len() == self.buffer.capacity() {
let pcm = self.pcm.as_mut().unwrap(); self.write_buf();
let io = pcm.io_i16().unwrap();
match io.writei(&self.buffer) {
Ok(_) => (),
Err(err) => pcm.try_recover(err, false).unwrap(),
}
self.buffer.clear(); self.buffer.clear();
} }
} }
@ -150,3 +154,14 @@ impl Sink for AlsaSink {
Ok(()) Ok(())
} }
} }
impl AlsaSink {
fn write_buf(&mut self) {
let pcm = self.pcm.as_mut().unwrap();
let io = pcm.io_bytes();
match io.writei(&self.buffer) {
Ok(_) => (),
Err(err) => pcm.try_recover(err, false).unwrap(),
};
}
}

View file

@ -1,21 +1,42 @@
use super::{Open, Sink}; use super::{Open, Sink, SinkAsBytes};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use gstreamer as gst;
use gstreamer_app as gst_app;
use gst::prelude::*; use gst::prelude::*;
use gst::*; use zerocopy::AsBytes;
use std::sync::mpsc::{sync_channel, SyncSender}; use std::sync::mpsc::{sync_channel, SyncSender};
use std::{io, thread}; use std::{io, thread};
use zerocopy::*;
#[allow(dead_code)] #[allow(dead_code)]
pub struct GstreamerSink { pub struct GstreamerSink {
tx: SyncSender<Vec<u8>>, tx: SyncSender<Vec<u8>>,
pipeline: gst::Pipeline, pipeline: gst::Pipeline,
format: AudioFormat,
} }
impl Open for GstreamerSink { impl Open for GstreamerSink {
fn open(device: Option<String>) -> GstreamerSink { fn open(device: Option<String>, format: AudioFormat) -> Self {
gst::init().expect("Failed to init gstreamer!"); info!("Using GStreamer sink with format: {:?}", format);
let pipeline_str_preamble = r#"appsrc caps="audio/x-raw,format=S16LE,layout=interleaved,channels=2,rate=44100" block=true max-bytes=4096 name=appsrc0 "#; gst::init().expect("failed to init GStreamer!");
// GStreamer calls S24 and S24_3 different from the rest of the world
let gst_format = match format {
AudioFormat::S24 => "S24_32".to_string(),
AudioFormat::S24_3 => "S24".to_string(),
_ => format!("{:?}", format),
};
let sample_size = format.size();
let gst_bytes = 2048 * sample_size;
let pipeline_str_preamble = format!(
"appsrc caps=\"audio/x-raw,format={}LE,layout=interleaved,channels={},rate={}\" block=true max-bytes={} name=appsrc0 ",
gst_format, NUM_CHANNELS, SAMPLE_RATE, gst_bytes
);
let pipeline_str_rest = r#" ! audioconvert ! autoaudiosink"#; let pipeline_str_rest = r#" ! audioconvert ! autoaudiosink"#;
let pipeline_str: String = match device { let pipeline_str: String = match device {
Some(x) => format!("{}{}", pipeline_str_preamble, x), Some(x) => format!("{}{}", pipeline_str_preamble, x),
@ -27,38 +48,37 @@ impl Open for GstreamerSink {
let pipelinee = gst::parse_launch(&*pipeline_str).expect("Couldn't launch pipeline; likely a GStreamer issue or an error in the pipeline string you specified in the 'device' argument to librespot."); let pipelinee = gst::parse_launch(&*pipeline_str).expect("Couldn't launch pipeline; likely a GStreamer issue or an error in the pipeline string you specified in the 'device' argument to librespot.");
let pipeline = pipelinee let pipeline = pipelinee
.dynamic_cast::<gst::Pipeline>() .dynamic_cast::<gst::Pipeline>()
.expect("Couldn't cast pipeline element at runtime!"); .expect("couldn't cast pipeline element at runtime!");
let bus = pipeline.get_bus().expect("Couldn't get bus from pipeline"); let bus = pipeline.get_bus().expect("couldn't get bus from pipeline");
let mainloop = glib::MainLoop::new(None, false); let mainloop = glib::MainLoop::new(None, false);
let appsrce: gst::Element = pipeline let appsrce: gst::Element = pipeline
.get_by_name("appsrc0") .get_by_name("appsrc0")
.expect("Couldn't get appsrc from pipeline"); .expect("couldn't get appsrc from pipeline");
let appsrc: gst_app::AppSrc = appsrce let appsrc: gst_app::AppSrc = appsrce
.dynamic_cast::<gst_app::AppSrc>() .dynamic_cast::<gst_app::AppSrc>()
.expect("Couldn't cast AppSrc element at runtime!"); .expect("couldn't cast AppSrc element at runtime!");
let bufferpool = gst::BufferPool::new(); let bufferpool = gst::BufferPool::new();
let appsrc_caps = appsrc.get_caps().expect("Couldn't get appsrc caps"); let appsrc_caps = appsrc.get_caps().expect("couldn't get appsrc caps");
let mut conf = bufferpool.get_config(); let mut conf = bufferpool.get_config();
conf.set_params(Some(&appsrc_caps), 8192, 0, 0); conf.set_params(Some(&appsrc_caps), 4096 * sample_size as u32, 0, 0);
bufferpool bufferpool
.set_config(conf) .set_config(conf)
.expect("Couldn't configure the buffer pool"); .expect("couldn't configure the buffer pool");
bufferpool bufferpool
.set_active(true) .set_active(true)
.expect("Couldn't activate buffer pool"); .expect("couldn't activate buffer pool");
let (tx, rx) = sync_channel::<Vec<u8>>(128); let (tx, rx) = sync_channel::<Vec<u8>>(64 * sample_size);
thread::spawn(move || { thread::spawn(move || {
for data in rx { for data in rx {
let buffer = bufferpool.acquire_buffer(None); let buffer = bufferpool.acquire_buffer(None);
if !buffer.is_err() { if let Ok(mut buffer) = buffer {
let mut okbuffer = buffer.unwrap(); let mutbuf = buffer.make_mut();
let mutbuf = okbuffer.make_mut();
mutbuf.set_size(data.len()); mutbuf.set_size(data.len());
mutbuf mutbuf
.copy_from_slice(0, data.as_bytes()) .copy_from_slice(0, data.as_bytes())
.expect("Failed to copy from slice"); .expect("Failed to copy from slice");
let _eat = appsrc.push_buffer(okbuffer); let _eat = appsrc.push_buffer(buffer);
} }
} }
}); });
@ -68,8 +88,8 @@ impl Open for GstreamerSink {
let watch_mainloop = thread_mainloop.clone(); let watch_mainloop = thread_mainloop.clone();
bus.add_watch(move |_, msg| { bus.add_watch(move |_, msg| {
match msg.view() { match msg.view() {
MessageView::Eos(..) => watch_mainloop.quit(), gst::MessageView::Eos(..) => watch_mainloop.quit(),
MessageView::Error(err) => { gst::MessageView::Error(err) => {
println!( println!(
"Error from {:?}: {} ({:?})", "Error from {:?}: {} ({:?})",
err.get_src().map(|s| s.get_path_string()), err.get_src().map(|s| s.get_path_string()),
@ -83,33 +103,32 @@ impl Open for GstreamerSink {
glib::Continue(true) glib::Continue(true)
}) })
.expect("Failed to add bus watch"); .expect("failed to add bus watch");
thread_mainloop.run(); thread_mainloop.run();
}); });
pipeline pipeline
.set_state(gst::State::Playing) .set_state(gst::State::Playing)
.expect("Unable to set the pipeline to the `Playing` state"); .expect("unable to set the pipeline to the `Playing` state");
GstreamerSink { Self {
tx: tx, tx,
pipeline: pipeline, pipeline,
format,
} }
} }
} }
impl Sink for GstreamerSink { impl Sink for GstreamerSink {
fn start(&mut self) -> io::Result<()> { start_stop_noop!();
Ok(()) sink_as_bytes!();
} }
fn stop(&mut self) -> io::Result<()> {
Ok(()) impl SinkAsBytes for GstreamerSink {
} fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
// Copy expensively (in to_vec()) to avoid thread synchronization // Copy expensively (in to_vec()) to avoid thread synchronization
let deighta: &[u8] = packet.samples().as_bytes();
self.tx self.tx
.send(deighta.to_vec()) .send(data.to_vec())
.expect("tx send failed in write function"); .expect("tx send failed in write function");
Ok(()) Ok(())
} }

View file

@ -1,5 +1,7 @@
use super::{Open, Sink}; use super::{Open, Sink};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use crate::player::NUM_CHANNELS;
use jack::{ use jack::{
AsyncClient, AudioOut, Client, ClientOptions, Control, Port, ProcessHandler, ProcessScope, AsyncClient, AudioOut, Client, ClientOptions, Control, Port, ProcessHandler, ProcessScope,
}; };
@ -7,20 +9,18 @@ use std::io;
use std::sync::mpsc::{sync_channel, Receiver, SyncSender}; use std::sync::mpsc::{sync_channel, Receiver, SyncSender};
pub struct JackSink { pub struct JackSink {
send: SyncSender<i16>, send: SyncSender<f32>,
// We have to keep hold of this object, or the Sink can't play...
#[allow(dead_code)]
active_client: AsyncClient<(), JackData>, active_client: AsyncClient<(), JackData>,
} }
pub struct JackData { pub struct JackData {
rec: Receiver<i16>, rec: Receiver<f32>,
port_l: Port<AudioOut>, port_l: Port<AudioOut>,
port_r: Port<AudioOut>, port_r: Port<AudioOut>,
} }
fn pcm_to_f32(sample: i16) -> f32 {
sample as f32 / 32768.0
}
impl ProcessHandler for JackData { impl ProcessHandler for JackData {
fn process(&mut self, _: &Client, ps: &ProcessScope) -> Control { fn process(&mut self, _: &Client, ps: &ProcessScope) -> Control {
// get output port buffers // get output port buffers
@ -33,24 +33,27 @@ impl ProcessHandler for JackData {
let buf_size = buf_r.len(); let buf_size = buf_r.len();
for i in 0..buf_size { for i in 0..buf_size {
buf_r[i] = pcm_to_f32(queue_iter.next().unwrap_or(0)); buf_r[i] = queue_iter.next().unwrap_or(0.0);
buf_l[i] = pcm_to_f32(queue_iter.next().unwrap_or(0)); buf_l[i] = queue_iter.next().unwrap_or(0.0);
} }
Control::Continue Control::Continue
} }
} }
impl Open for JackSink { impl Open for JackSink {
fn open(client_name: Option<String>) -> JackSink { fn open(client_name: Option<String>, format: AudioFormat) -> Self {
info!("Using jack sink!"); if format != AudioFormat::F32 {
warn!("JACK currently does not support {:?} output", format);
}
info!("Using JACK sink with format {:?}", AudioFormat::F32);
let client_name = client_name.unwrap_or("librespot".to_string()); let client_name = client_name.unwrap_or_else(|| "librespot".to_string());
let (client, _status) = let (client, _status) =
Client::new(&client_name[..], ClientOptions::NO_START_SERVER).unwrap(); Client::new(&client_name[..], ClientOptions::NO_START_SERVER).unwrap();
let ch_r = client.register_port("out_0", AudioOut::default()).unwrap(); let ch_r = client.register_port("out_0", AudioOut::default()).unwrap();
let ch_l = client.register_port("out_1", AudioOut::default()).unwrap(); let ch_l = client.register_port("out_1", AudioOut::default()).unwrap();
// buffer for samples from librespot (~10ms) // buffer for samples from librespot (~10ms)
let (tx, rx) = sync_channel(2 * 1024 * 4); let (tx, rx) = sync_channel::<f32>(NUM_CHANNELS as usize * 1024 * AudioFormat::F32.size());
let jack_data = JackData { let jack_data = JackData {
rec: rx, rec: rx,
port_l: ch_l, port_l: ch_l,
@ -58,27 +61,21 @@ impl Open for JackSink {
}; };
let active_client = AsyncClient::new(client, (), jack_data).unwrap(); let active_client = AsyncClient::new(client, (), jack_data).unwrap();
JackSink { Self {
send: tx, send: tx,
active_client: active_client, active_client,
} }
} }
} }
impl Sink for JackSink { impl Sink for JackSink {
fn start(&mut self) -> io::Result<()> { start_stop_noop!();
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
Ok(())
}
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
for s in packet.samples().iter() { for s in packet.samples().iter() {
let res = self.send.send(*s); let res = self.send.send(*s);
if res.is_err() { if res.is_err() {
error!("jackaudio: cannot write to channel"); error!("cannot write to channel");
} }
} }
Ok(()) Ok(())

View file

@ -1,8 +1,9 @@
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use std::io; use std::io;
pub trait Open { pub trait Open {
fn open(_: Option<String>) -> Self; fn open(_: Option<String>, format: AudioFormat) -> Self;
} }
pub trait Sink { pub trait Sink {
@ -11,8 +12,57 @@ pub trait Sink {
fn write(&mut self, packet: &AudioPacket) -> io::Result<()>; fn write(&mut self, packet: &AudioPacket) -> io::Result<()>;
} }
fn mk_sink<S: Sink + Open + 'static>(device: Option<String>) -> Box<dyn Sink> { pub type SinkBuilder = fn(Option<String>, AudioFormat) -> Box<dyn Sink>;
Box::new(S::open(device))
pub trait SinkAsBytes {
fn write_bytes(&mut self, data: &[u8]) -> io::Result<()>;
}
fn mk_sink<S: Sink + Open + 'static>(device: Option<String>, format: AudioFormat) -> Box<dyn Sink> {
Box::new(S::open(device, format))
}
// reuse code for various backends
macro_rules! sink_as_bytes {
() => {
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
use crate::audio::convert::{self, i24};
use zerocopy::AsBytes;
match packet {
AudioPacket::Samples(samples) => match self.format {
AudioFormat::F32 => self.write_bytes(samples.as_bytes()),
AudioFormat::S32 => {
let samples_s32: &[i32] = &convert::to_s32(samples);
self.write_bytes(samples_s32.as_bytes())
}
AudioFormat::S24 => {
let samples_s24: &[i32] = &convert::to_s24(samples);
self.write_bytes(samples_s24.as_bytes())
}
AudioFormat::S24_3 => {
let samples_s24_3: &[i24] = &convert::to_s24_3(samples);
self.write_bytes(samples_s24_3.as_bytes())
}
AudioFormat::S16 => {
let samples_s16: &[i16] = &convert::to_s16(samples);
self.write_bytes(samples_s16.as_bytes())
}
},
AudioPacket::OggData(samples) => self.write_bytes(samples),
}
}
};
}
macro_rules! start_stop_noop {
() => {
fn start(&mut self) -> io::Result<()> {
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
Ok(())
}
};
} }
#[cfg(feature = "alsa-backend")] #[cfg(feature = "alsa-backend")]
@ -35,18 +85,6 @@ mod jackaudio;
#[cfg(feature = "jackaudio-backend")] #[cfg(feature = "jackaudio-backend")]
use self::jackaudio::JackSink; use self::jackaudio::JackSink;
#[cfg(all(
feature = "rodiojack-backend",
not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd"))
))]
compile_error!("Rodio JACK backend is currently only supported on linux.");
#[cfg(all(
feature = "rodiojack-backend",
any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")
))]
use self::rodio::JackRodioSink;
#[cfg(feature = "gstreamer-backend")] #[cfg(feature = "gstreamer-backend")]
mod gstreamer; mod gstreamer;
#[cfg(feature = "gstreamer-backend")] #[cfg(feature = "gstreamer-backend")]
@ -54,8 +92,6 @@ use self::gstreamer::GstreamerSink;
#[cfg(any(feature = "rodio-backend", feature = "rodiojack-backend"))] #[cfg(any(feature = "rodio-backend", feature = "rodiojack-backend"))]
mod rodio; mod rodio;
#[cfg(feature = "rodio-backend")]
use self::rodio::RodioSink;
#[cfg(feature = "sdl-backend")] #[cfg(feature = "sdl-backend")]
mod sdl; mod sdl;
@ -68,7 +104,7 @@ use self::pipe::StdoutSink;
mod subprocess; mod subprocess;
use self::subprocess::SubprocessSink; use self::subprocess::SubprocessSink;
pub const BACKENDS: &'static [(&'static str, fn(Option<String>) -> Box<dyn Sink>)] = &[ pub const BACKENDS: &[(&str, SinkBuilder)] = &[
#[cfg(feature = "alsa-backend")] #[cfg(feature = "alsa-backend")]
("alsa", mk_sink::<AlsaSink>), ("alsa", mk_sink::<AlsaSink>),
#[cfg(feature = "portaudio-backend")] #[cfg(feature = "portaudio-backend")]
@ -77,22 +113,19 @@ pub const BACKENDS: &'static [(&'static str, fn(Option<String>) -> Box<dyn Sink>
("pulseaudio", mk_sink::<PulseAudioSink>), ("pulseaudio", mk_sink::<PulseAudioSink>),
#[cfg(feature = "jackaudio-backend")] #[cfg(feature = "jackaudio-backend")]
("jackaudio", mk_sink::<JackSink>), ("jackaudio", mk_sink::<JackSink>),
#[cfg(all(
feature = "rodiojack-backend",
any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")
))]
("rodiojack", mk_sink::<JackRodioSink>),
#[cfg(feature = "gstreamer-backend")] #[cfg(feature = "gstreamer-backend")]
("gstreamer", mk_sink::<GstreamerSink>), ("gstreamer", mk_sink::<GstreamerSink>),
#[cfg(feature = "rodio-backend")] #[cfg(feature = "rodio-backend")]
("rodio", mk_sink::<RodioSink>), ("rodio", rodio::mk_rodio),
#[cfg(feature = "rodiojack-backend")]
("rodiojack", rodio::mk_rodiojack),
#[cfg(feature = "sdl-backend")] #[cfg(feature = "sdl-backend")]
("sdl", mk_sink::<SdlSink>), ("sdl", mk_sink::<SdlSink>),
("pipe", mk_sink::<StdoutSink>), ("pipe", mk_sink::<StdoutSink>),
("subprocess", mk_sink::<SubprocessSink>), ("subprocess", mk_sink::<SubprocessSink>),
]; ];
pub fn find(name: Option<String>) -> Option<fn(Option<String>) -> Box<dyn Sink>> { pub fn find(name: Option<String>) -> Option<SinkBuilder> {
if let Some(name) = name { if let Some(name) = name {
BACKENDS BACKENDS
.iter() .iter()

View file

@ -1,46 +1,36 @@
use super::{Open, Sink}; use super::{Open, Sink, SinkAsBytes};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::{self, Write}; use std::io::{self, Write};
use std::mem;
use std::slice;
pub struct StdoutSink(Box<dyn Write>); pub struct StdoutSink {
output: Box<dyn Write>,
format: AudioFormat,
}
impl Open for StdoutSink { impl Open for StdoutSink {
fn open(path: Option<String>) -> StdoutSink { fn open(path: Option<String>, format: AudioFormat) -> Self {
if let Some(path) = path { info!("Using pipe sink with format: {:?}", format);
let file = OpenOptions::new().write(true).open(path).unwrap();
StdoutSink(Box::new(file)) let output: Box<dyn Write> = match path {
} else { Some(path) => Box::new(OpenOptions::new().write(true).open(path).unwrap()),
StdoutSink(Box::new(io::stdout())) _ => Box::new(io::stdout()),
} };
Self { output, format }
} }
} }
impl Sink for StdoutSink { impl Sink for StdoutSink {
fn start(&mut self) -> io::Result<()> { start_stop_noop!();
Ok(()) sink_as_bytes!();
} }
fn stop(&mut self) -> io::Result<()> {
Ok(())
}
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
let data: &[u8] = match packet {
AudioPacket::Samples(data) => unsafe {
slice::from_raw_parts(
data.as_ptr() as *const u8,
data.len() * mem::size_of::<i16>(),
)
},
AudioPacket::OggData(data) => data,
};
self.0.write_all(data)?;
self.0.flush()?;
impl SinkAsBytes for StdoutSink {
fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
self.output.write_all(data)?;
self.output.flush()?;
Ok(()) Ok(())
} }
} }

View file

@ -1,16 +1,27 @@
use super::{Open, Sink}; use super::{Open, Sink};
use crate::audio::AudioPacket; use crate::audio::{convert, AudioPacket};
use portaudio_rs; use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use portaudio_rs::device::{get_default_output_index, DeviceIndex, DeviceInfo}; use portaudio_rs::device::{get_default_output_index, DeviceIndex, DeviceInfo};
use portaudio_rs::stream::*; use portaudio_rs::stream::*;
use std::io; use std::io;
use std::process::exit; use std::process::exit;
use std::time::Duration; use std::time::Duration;
pub struct PortAudioSink<'a>( pub enum PortAudioSink<'a> {
F32(
Option<portaudio_rs::stream::Stream<'a, f32, f32>>,
StreamParameters<f32>,
),
S32(
Option<portaudio_rs::stream::Stream<'a, i32, i32>>,
StreamParameters<i32>,
),
S16(
Option<portaudio_rs::stream::Stream<'a, i16, i16>>, Option<portaudio_rs::stream::Stream<'a, i16, i16>>,
StreamParameters<i16>, StreamParameters<i16>,
); ),
}
fn output_devices() -> Box<dyn Iterator<Item = (DeviceIndex, DeviceInfo)>> { fn output_devices() -> Box<dyn Iterator<Item = (DeviceIndex, DeviceInfo)>> {
let count = portaudio_rs::device::get_count().unwrap(); let count = portaudio_rs::device::get_count().unwrap();
@ -40,8 +51,11 @@ fn find_output(device: &str) -> Option<DeviceIndex> {
} }
impl<'a> Open for PortAudioSink<'a> { impl<'a> Open for PortAudioSink<'a> {
fn open(device: Option<String>) -> PortAudioSink<'a> { fn open(device: Option<String>, format: AudioFormat) -> PortAudioSink<'a> {
debug!("Using PortAudio sink"); info!("Using PortAudio sink with format: {:?}", format);
warn!("This backend is known to panic on several platforms.");
warn!("Consider using some other backend, or better yet, contributing a fix.");
portaudio_rs::initialize().unwrap(); portaudio_rs::initialize().unwrap();
@ -53,7 +67,7 @@ impl<'a> Open for PortAudioSink<'a> {
Some(device) => find_output(device), Some(device) => find_output(device),
None => get_default_output_index(), None => get_default_output_index(),
} }
.expect("Could not find device"); .expect("could not find device");
let info = portaudio_rs::device::get_info(device_idx); let info = portaudio_rs::device::get_info(device_idx);
let latency = match info { let latency = match info {
@ -61,25 +75,38 @@ impl<'a> Open for PortAudioSink<'a> {
None => Duration::new(0, 0), None => Duration::new(0, 0),
}; };
macro_rules! open_sink {
($sink: expr, $type: ty) => {{
let params = StreamParameters { let params = StreamParameters {
device: device_idx, device: device_idx,
channel_count: 2, channel_count: NUM_CHANNELS as u32,
suggested_latency: latency, suggested_latency: latency,
data: 0i16, data: 0.0 as $type,
}; };
$sink(None, params)
PortAudioSink(None, params) }};
}
match format {
AudioFormat::F32 => open_sink!(Self::F32, f32),
AudioFormat::S32 => open_sink!(Self::S32, i32),
AudioFormat::S16 => open_sink!(Self::S16, i16),
_ => {
unimplemented!("PortAudio currently does not support {:?} output", format)
}
}
} }
} }
impl<'a> Sink for PortAudioSink<'a> { impl<'a> Sink for PortAudioSink<'a> {
fn start(&mut self) -> io::Result<()> { fn start(&mut self) -> io::Result<()> {
if self.0.is_none() { macro_rules! start_sink {
self.0 = Some( (ref mut $stream: ident, ref $parameters: ident) => {{
if $stream.is_none() {
*$stream = Some(
Stream::open( Stream::open(
None, None,
Some(self.1), Some(*$parameters),
44100.0, SAMPLE_RATE as f64,
FRAMES_PER_BUFFER_UNSPECIFIED, FRAMES_PER_BUFFER_UNSPECIFIED,
StreamFlags::empty(), StreamFlags::empty(),
None, None,
@ -87,20 +114,60 @@ impl<'a> Sink for PortAudioSink<'a> {
.unwrap(), .unwrap(),
); );
} }
$stream.as_mut().unwrap().start().unwrap()
}};
}
match self {
Self::F32(stream, parameters) => start_sink!(ref mut stream, ref parameters),
Self::S32(stream, parameters) => start_sink!(ref mut stream, ref parameters),
Self::S16(stream, parameters) => start_sink!(ref mut stream, ref parameters),
};
self.0.as_mut().unwrap().start().unwrap();
Ok(()) Ok(())
} }
fn stop(&mut self) -> io::Result<()> { fn stop(&mut self) -> io::Result<()> {
self.0.as_mut().unwrap().stop().unwrap(); macro_rules! stop_sink {
self.0 = None; (ref mut $stream: ident) => {{
$stream.as_mut().unwrap().stop().unwrap();
*$stream = None;
}};
}
match self {
Self::F32(stream, _parameters) => stop_sink!(ref mut stream),
Self::S32(stream, _parameters) => stop_sink!(ref mut stream),
Self::S16(stream, _parameters) => stop_sink!(ref mut stream),
};
Ok(()) Ok(())
} }
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
match self.0.as_mut().unwrap().write(packet.samples()) { macro_rules! write_sink {
(ref mut $stream: expr, $samples: expr) => {
$stream.as_mut().unwrap().write($samples)
};
}
let samples = packet.samples();
let result = match self {
Self::F32(stream, _parameters) => {
write_sink!(ref mut stream, samples)
}
Self::S32(stream, _parameters) => {
let samples_s32: &[i32] = &convert::to_s32(samples);
write_sink!(ref mut stream, samples_s32)
}
Self::S16(stream, _parameters) => {
let samples_s16: &[i16] = &convert::to_s16(samples);
write_sink!(ref mut stream, samples_s16)
}
};
match result {
Ok(_) => (), Ok(_) => (),
Err(portaudio_rs::PaError::OutputUnderflowed) => error!("PortAudio write underflow"), Err(portaudio_rs::PaError::OutputUnderflowed) => error!("PortAudio write underflow"),
Err(e) => panic!("PA Error {}", e), Err(e) => panic!("PortAudio error {}", e),
}; };
Ok(()) Ok(())

View file

@ -1,5 +1,7 @@
use super::{Open, Sink}; use super::{Open, Sink, SinkAsBytes};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use libpulse_binding::{self as pulse, stream::Direction}; use libpulse_binding::{self as pulse, stream::Direction};
use libpulse_simple_binding::Simple; use libpulse_simple_binding::Simple;
use std::io; use std::io;
@ -11,23 +13,34 @@ pub struct PulseAudioSink {
s: Option<Simple>, s: Option<Simple>,
ss: pulse::sample::Spec, ss: pulse::sample::Spec,
device: Option<String>, device: Option<String>,
format: AudioFormat,
} }
impl Open for PulseAudioSink { impl Open for PulseAudioSink {
fn open(device: Option<String>) -> PulseAudioSink { fn open(device: Option<String>, format: AudioFormat) -> Self {
debug!("Using PulseAudio sink"); info!("Using PulseAudio sink with format: {:?}", format);
// PulseAudio calls S24 and S24_3 different from the rest of the world
let pulse_format = match format {
AudioFormat::F32 => pulse::sample::Format::F32le,
AudioFormat::S32 => pulse::sample::Format::S32le,
AudioFormat::S24 => pulse::sample::Format::S24_32le,
AudioFormat::S24_3 => pulse::sample::Format::S24le,
AudioFormat::S16 => pulse::sample::Format::S16le,
};
let ss = pulse::sample::Spec { let ss = pulse::sample::Spec {
format: pulse::sample::Format::S16le, format: pulse_format,
channels: 2, // stereo channels: NUM_CHANNELS,
rate: 44100, rate: SAMPLE_RATE,
}; };
debug_assert!(ss.is_valid()); debug_assert!(ss.is_valid());
PulseAudioSink { Self {
s: None, s: None,
ss: ss, ss,
device: device, device,
format,
} }
} }
} }
@ -66,19 +79,13 @@ impl Sink for PulseAudioSink {
Ok(()) Ok(())
} }
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { sink_as_bytes!();
if let Some(s) = &self.s { }
// SAFETY: An i16 consists of two bytes, so that the given slice can be interpreted
// as a byte array of double length. Each byte pointer is validly aligned, and so
// is the newly created slice.
let d: &[u8] = unsafe {
std::slice::from_raw_parts(
packet.samples().as_ptr() as *const u8,
packet.samples().len() * 2,
)
};
match s.write(d) { impl SinkAsBytes for PulseAudioSink {
fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
if let Some(s) = &self.s {
match s.write(data) {
Ok(_) => Ok(()), Ok(_) => Ok(()),
Err(e) => Err(io::Error::new( Err(e) => Err(io::Error::new(
io::ErrorKind::BrokenPipe, io::ErrorKind::BrokenPipe,
@ -88,7 +95,7 @@ impl Sink for PulseAudioSink {
} else { } else {
Err(io::Error::new( Err(io::Error::new(
io::ErrorKind::NotConnected, io::ErrorKind::NotConnected,
"Not connected to pulseaudio", "Not connected to PulseAudio",
)) ))
} }
} }

View file

@ -1,173 +1,199 @@
use super::{Open, Sink};
extern crate cpal;
extern crate rodio;
use crate::audio::AudioPacket;
use cpal::traits::{DeviceTrait, HostTrait};
use std::process::exit; use std::process::exit;
use std::{io, thread, time}; use std::{io, thread, time};
pub struct RodioSink { use cpal::traits::{DeviceTrait, HostTrait};
rodio_sink: rodio::Sink, use thiserror::Error;
// We have to keep hold of this object, or the Sink can't play...
#[allow(dead_code)] use super::Sink;
stream: rodio::OutputStream, use crate::audio::{convert, AudioPacket};
} use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
#[cfg(all( #[cfg(all(
feature = "rodiojack-backend", feature = "rodiojack-backend",
any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd") not(any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd"))
))] ))]
pub struct JackRodioSink { compile_error!("Rodio JACK backend is currently only supported on linux.");
jackrodio_sink: rodio::Sink,
// We have to keep hold of this object, or the Sink can't play... #[cfg(feature = "rodio-backend")]
#[allow(dead_code)] pub fn mk_rodio(device: Option<String>, format: AudioFormat) -> Box<dyn Sink> {
stream: rodio::OutputStream, Box::new(open(cpal::default_host(), device, format))
} }
fn list_formats(ref device: &rodio::Device) { #[cfg(feature = "rodiojack-backend")]
let default_fmt = match device.default_output_config() { pub fn mk_rodiojack(device: Option<String>, format: AudioFormat) -> Box<dyn Sink> {
Ok(fmt) => cpal::SupportedStreamConfig::from(fmt), Box::new(open(
Err(e) => { cpal::host_from_id(cpal::HostId::Jack).unwrap(),
warn!("Error getting default rodio::Sink config: {}", e); device,
return; format,
} ))
}; }
#[derive(Debug, Error)]
pub enum RodioError {
#[error("Rodio: no device available")]
NoDeviceAvailable,
#[error("Rodio: device \"{0}\" is not available")]
DeviceNotAvailable(String),
#[error("Rodio play error: {0}")]
PlayError(#[from] rodio::PlayError),
#[error("Rodio stream error: {0}")]
StreamError(#[from] rodio::StreamError),
#[error("Cannot get audio devices: {0}")]
DevicesError(#[from] cpal::DevicesError),
}
pub struct RodioSink {
rodio_sink: rodio::Sink,
format: AudioFormat,
_stream: rodio::OutputStream,
}
fn list_formats(device: &rodio::Device) {
match device.default_output_config() {
Ok(cfg) => {
debug!(" Default config:"); debug!(" Default config:");
debug!(" {:?}", default_fmt); debug!(" {:?}", cfg);
}
let mut output_configs = match device.supported_output_configs() {
Ok(f) => f.peekable(),
Err(e) => { Err(e) => {
warn!("Error getting supported rodio::Sink configs: {}", e); // Use loglevel debug, since even the output is only debug
return; debug!("Error getting default rodio::Sink config: {}", e);
} }
}; };
if output_configs.peek().is_some() { match device.supported_output_configs() {
Ok(mut cfgs) => {
if let Some(first) = cfgs.next() {
debug!(" Available configs:"); debug!(" Available configs:");
for format in output_configs { debug!(" {:?}", first);
debug!(" {:?}", format); } else {
return;
}
for cfg in cfgs {
debug!(" {:?}", cfg);
}
}
Err(e) => {
debug!("Error getting supported rodio::Sink configs: {}", e);
} }
} }
} }
fn list_outputs(ref host: &cpal::Host) { fn list_outputs(host: &cpal::Host) -> Result<(), cpal::DevicesError> {
let default_device = get_default_device(host); let mut default_device_name = None;
let default_device_name = default_device.name().expect("cannot get output name");
println!("Default Audio Device:\n {}", default_device_name); if let Some(default_device) = host.default_output_device() {
default_device_name = default_device.name().ok();
println!(
"Default Audio Device:\n {}",
default_device_name.as_deref().unwrap_or("[unknown name]")
);
list_formats(&default_device); list_formats(&default_device);
println!("Other Available Audio Devices:"); println!("Other Available Audio Devices:");
} else {
warn!("No default device was found");
}
let found_devices = host.output_devices().expect(&format!( for device in host.output_devices()? {
"Cannot get list of output devices of Host: {:?}", match device.name() {
host.id() Ok(name) if Some(&name) == default_device_name.as_ref() => (),
)); Ok(name) => {
for device in found_devices { println!(" {}", name);
let device_name = device.name().expect("cannot get output name"); list_formats(&device);
if device_name != default_device_name { }
println!(" {}", device_name); Err(e) => {
warn!("Cannot get device name: {}", e);
println!(" [unknown name]");
list_formats(&device); list_formats(&device);
} }
} }
}
Ok(())
} }
fn get_default_device(ref host: &cpal::Host) -> rodio::Device { fn create_sink(
host.default_output_device() host: &cpal::Host,
.expect("no default output device available") device: Option<String>,
} ) -> Result<(rodio::Sink, rodio::OutputStream), RodioError> {
let rodio_device = match device {
fn match_device(ref host: &cpal::Host, device: Option<String>) -> rodio::Device { Some(ask) if &ask == "?" => {
match device { let exit_code = match list_outputs(host) {
Ok(()) => 0,
Err(e) => {
error!("{}", e);
1
}
};
exit(exit_code)
}
Some(device_name) => { Some(device_name) => {
if device_name == "?".to_string() { host.output_devices()?
list_outputs(host); .find(|d| d.name().ok().map_or(false, |name| name == device_name)) // Ignore devices for which getting name fails
exit(0) .ok_or(RodioError::DeviceNotAvailable(device_name))?
} }
None => host
.default_output_device()
.ok_or(RodioError::NoDeviceAvailable)?,
};
let found_devices = host.output_devices().expect(&format!( let name = rodio_device.name().ok();
"Cannot get list of output devices of Host: {:?}", info!(
host.id() "Using audio device: {}",
)); name.as_deref().unwrap_or("[unknown name]")
for d in found_devices { );
if d.name().expect("cannot get output name") == device_name {
return d; let (stream, handle) = rodio::OutputStream::try_from_device(&rodio_device)?;
} let sink = rodio::Sink::try_new(&handle)?;
} Ok((sink, stream))
println!("No output sink matching '{}' found.", device_name);
exit(0)
}
None => return get_default_device(host),
}
} }
impl Open for RodioSink { pub fn open(host: cpal::Host, device: Option<String>, format: AudioFormat) -> RodioSink {
fn open(device: Option<String>) -> RodioSink { info!(
let host = cpal::default_host(); "Using Rodio sink with format {:?} and cpal host: {}",
debug!("Using rodio sink with cpal host: {:?}", host.id()); format,
host.id().name()
);
let rodio_device = match_device(&host, device); if format != AudioFormat::S16 && format != AudioFormat::F32 {
debug!("Using cpal device"); unimplemented!("Rodio currently only supports F32 and S16 formats");
let stream = rodio::OutputStream::try_from_device(&rodio_device) }
.expect("Couldn't open output stream.");
debug!("Using rodio stream");
let sink = rodio::Sink::try_new(&stream.1).expect("Couldn't create output sink.");
debug!("Using rodio sink");
let (sink, stream) = create_sink(&host, device).unwrap();
debug!("Rodio sink was created");
RodioSink { RodioSink {
rodio_sink: sink, rodio_sink: sink,
stream: stream.0, format,
} _stream: stream,
}
}
#[cfg(all(
feature = "rodiojack-backend",
any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")
))]
impl Open for JackRodioSink {
fn open(device: Option<String>) -> JackRodioSink {
let host = cpal::host_from_id(
cpal::available_hosts()
.into_iter()
.find(|id| *id == cpal::HostId::Jack)
.expect("Jack Host not found"),
)
.expect("Jack Host not found");
debug!("Using jack rodio sink with cpal Jack host");
let rodio_device = match_device(&host, device);
debug!("Using cpal device");
let stream = rodio::OutputStream::try_from_device(&rodio_device)
.expect("Couldn't open output stream.");
debug!("Using jack rodio stream");
let sink = rodio::Sink::try_new(&stream.1).expect("Couldn't create output sink.");
debug!("Using jack rodio sink");
JackRodioSink {
jackrodio_sink: sink,
stream: stream.0,
}
} }
} }
impl Sink for RodioSink { impl Sink for RodioSink {
fn start(&mut self) -> io::Result<()> { start_stop_noop!();
// More similar to an "unpause" than "play". Doesn't undo "stop".
// self.rodio_sink.play();
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
// This will immediately stop playback, but the sink is then unusable.
// We just have to let the current buffer play till the end.
// self.rodio_sink.stop();
Ok(())
}
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
let source = rodio::buffer::SamplesBuffer::new(2, 44100, packet.samples()); let samples = packet.samples();
match self.format {
AudioFormat::F32 => {
let source =
rodio::buffer::SamplesBuffer::new(NUM_CHANNELS as u16, SAMPLE_RATE, samples);
self.rodio_sink.append(source); self.rodio_sink.append(source);
}
AudioFormat::S16 => {
let samples_s16: &[i16] = &convert::to_s16(samples);
let source = rodio::buffer::SamplesBuffer::new(
NUM_CHANNELS as u16,
SAMPLE_RATE,
samples_s16,
);
self.rodio_sink.append(source);
}
_ => unreachable!(),
};
// Chunk sizes seem to be about 256 to 3000 ish items long. // Chunk sizes seem to be about 256 to 3000 ish items long.
// Assuming they're on average 1628 then a half second buffer is: // Assuming they're on average 1628 then a half second buffer is:
@ -179,36 +205,3 @@ impl Sink for RodioSink {
Ok(()) Ok(())
} }
} }
#[cfg(all(
feature = "rodiojack-backend",
any(target_os = "linux", target_os = "dragonfly", target_os = "freebsd")
))]
impl Sink for JackRodioSink {
fn start(&mut self) -> io::Result<()> {
// More similar to an "unpause" than "play". Doesn't undo "stop".
// self.rodio_sink.play();
Ok(())
}
fn stop(&mut self) -> io::Result<()> {
// This will immediately stop playback, but the sink is then unusable.
// We just have to let the current buffer play till the end.
// self.rodio_sink.stop();
Ok(())
}
fn write(&mut self, data: &[i16]) -> io::Result<()> {
let source = rodio::buffer::SamplesBuffer::new(2, 44100, data);
self.jackrodio_sink.append(source);
// Chunk sizes seem to be about 256 to 3000 ish items long.
// Assuming they're on average 1628 then a half second buffer is:
// 44100 elements --> about 27 chunks
while self.jackrodio_sink.len() > 26 {
// sleep and wait for rodio to drain a bit
thread::sleep(time::Duration::from_millis(10));
}
Ok(())
}
}

View file

@ -1,57 +1,112 @@
use super::{Open, Sink}; use super::{Open, Sink};
use crate::audio::AudioPacket; use crate::audio::{convert, AudioPacket};
use crate::config::AudioFormat;
use crate::player::{NUM_CHANNELS, SAMPLE_RATE};
use sdl2::audio::{AudioQueue, AudioSpecDesired}; use sdl2::audio::{AudioQueue, AudioSpecDesired};
use std::{io, thread, time}; use std::{io, thread, time};
type Channel = i16; pub enum SdlSink {
F32(AudioQueue<f32>),
pub struct SdlSink { S32(AudioQueue<i32>),
queue: AudioQueue<Channel>, S16(AudioQueue<i16>),
} }
impl Open for SdlSink { impl Open for SdlSink {
fn open(device: Option<String>) -> SdlSink { fn open(device: Option<String>, format: AudioFormat) -> Self {
debug!("Using SDL sink"); info!("Using SDL sink with format: {:?}", format);
if device.is_some() { if device.is_some() {
panic!("SDL sink does not support specifying a device name"); warn!("SDL sink does not support specifying a device name");
} }
let ctx = sdl2::init().expect("Could not init SDL"); let ctx = sdl2::init().expect("could not initialize SDL");
let audio = ctx.audio().expect("Could not init SDL audio subsystem"); let audio = ctx
.audio()
.expect("could not initialize SDL audio subsystem");
let desired_spec = AudioSpecDesired { let desired_spec = AudioSpecDesired {
freq: Some(44_100), freq: Some(SAMPLE_RATE as i32),
channels: Some(2), channels: Some(NUM_CHANNELS),
samples: None, samples: None,
}; };
let queue = audio
.open_queue(None, &desired_spec)
.expect("Could not open SDL audio device");
SdlSink { queue: queue } macro_rules! open_sink {
($sink: expr, $type: ty) => {{
let queue: AudioQueue<$type> = audio
.open_queue(None, &desired_spec)
.expect("could not open SDL audio device");
$sink(queue)
}};
}
match format {
AudioFormat::F32 => open_sink!(Self::F32, f32),
AudioFormat::S32 => open_sink!(Self::S32, i32),
AudioFormat::S16 => open_sink!(Self::S16, i16),
_ => {
unimplemented!("SDL currently does not support {:?} output", format)
}
}
} }
} }
impl Sink for SdlSink { impl Sink for SdlSink {
fn start(&mut self) -> io::Result<()> { fn start(&mut self) -> io::Result<()> {
self.queue.clear(); macro_rules! start_sink {
self.queue.resume(); ($queue: expr) => {{
$queue.clear();
$queue.resume();
}};
}
match self {
Self::F32(queue) => start_sink!(queue),
Self::S32(queue) => start_sink!(queue),
Self::S16(queue) => start_sink!(queue),
};
Ok(()) Ok(())
} }
fn stop(&mut self) -> io::Result<()> { fn stop(&mut self) -> io::Result<()> {
self.queue.pause(); macro_rules! stop_sink {
self.queue.clear(); ($queue: expr) => {{
$queue.pause();
$queue.clear();
}};
}
match self {
Self::F32(queue) => stop_sink!(queue),
Self::S32(queue) => stop_sink!(queue),
Self::S16(queue) => stop_sink!(queue),
};
Ok(()) Ok(())
} }
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { fn write(&mut self, packet: &AudioPacket) -> io::Result<()> {
while self.queue.size() > (2 * 2 * 44_100) { macro_rules! drain_sink {
($queue: expr, $size: expr) => {{
// sleep and wait for sdl thread to drain the queue a bit // sleep and wait for sdl thread to drain the queue a bit
while $queue.size() > (NUM_CHANNELS as u32 * $size as u32 * SAMPLE_RATE) {
thread::sleep(time::Duration::from_millis(10)); thread::sleep(time::Duration::from_millis(10));
} }
self.queue.queue(packet.samples()); }};
}
let samples = packet.samples();
match self {
Self::F32(queue) => {
drain_sink!(queue, AudioFormat::F32.size());
queue.queue(samples)
}
Self::S32(queue) => {
let samples_s32: &[i32] = &convert::to_s32(samples);
drain_sink!(queue, AudioFormat::S32.size());
queue.queue(samples_s32)
}
Self::S16(queue) => {
let samples_s16: &[i16] = &convert::to_s16(samples);
drain_sink!(queue, AudioFormat::S16.size());
queue.queue(samples_s16)
}
};
Ok(()) Ok(())
} }
} }

View file

@ -1,22 +1,26 @@
use super::{Open, Sink}; use super::{Open, Sink, SinkAsBytes};
use crate::audio::AudioPacket; use crate::audio::AudioPacket;
use crate::config::AudioFormat;
use shell_words::split; use shell_words::split;
use std::io::{self, Write}; use std::io::{self, Write};
use std::mem;
use std::process::{Child, Command, Stdio}; use std::process::{Child, Command, Stdio};
use std::slice;
pub struct SubprocessSink { pub struct SubprocessSink {
shell_command: String, shell_command: String,
child: Option<Child>, child: Option<Child>,
format: AudioFormat,
} }
impl Open for SubprocessSink { impl Open for SubprocessSink {
fn open(shell_command: Option<String>) -> SubprocessSink { fn open(shell_command: Option<String>, format: AudioFormat) -> Self {
info!("Using subprocess sink with format: {:?}", format);
if let Some(shell_command) = shell_command { if let Some(shell_command) = shell_command {
SubprocessSink { SubprocessSink {
shell_command: shell_command, shell_command,
child: None, child: None,
format,
} }
} else { } else {
panic!("subprocess sink requires specifying a shell command"); panic!("subprocess sink requires specifying a shell command");
@ -44,16 +48,15 @@ impl Sink for SubprocessSink {
Ok(()) Ok(())
} }
fn write(&mut self, packet: &AudioPacket) -> io::Result<()> { sink_as_bytes!();
let data: &[u8] = unsafe { }
slice::from_raw_parts(
packet.samples().as_ptr() as *const u8, impl SinkAsBytes for SubprocessSink {
packet.samples().len() * mem::size_of::<i16>(), fn write_bytes(&mut self, data: &[u8]) -> io::Result<()> {
)
};
if let Some(child) = &mut self.child { if let Some(child) = &mut self.child {
let child_stdin = child.stdin.as_mut().unwrap(); let child_stdin = child.stdin.as_mut().unwrap();
child_stdin.write_all(data)?; child_stdin.write_all(data)?;
child_stdin.flush()?;
} }
Ok(()) Ok(())
} }

View file

@ -1,3 +1,6 @@
use crate::audio::convert::i24;
use std::convert::TryFrom;
use std::mem;
use std::str::FromStr; use std::str::FromStr;
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)] #[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
@ -11,17 +14,58 @@ impl FromStr for Bitrate {
type Err = (); type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"96" => Ok(Bitrate::Bitrate96), "96" => Ok(Self::Bitrate96),
"160" => Ok(Bitrate::Bitrate160), "160" => Ok(Self::Bitrate160),
"320" => Ok(Bitrate::Bitrate320), "320" => Ok(Self::Bitrate320),
_ => Err(()), _ => Err(()),
} }
} }
} }
impl Default for Bitrate { impl Default for Bitrate {
fn default() -> Bitrate { fn default() -> Self {
Bitrate::Bitrate160 Self::Bitrate160
}
}
#[derive(Clone, Copy, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum AudioFormat {
F32,
S32,
S24,
S24_3,
S16,
}
impl TryFrom<&String> for AudioFormat {
type Error = ();
fn try_from(s: &String) -> Result<Self, Self::Error> {
match s.to_uppercase().as_str() {
"F32" => Ok(Self::F32),
"S32" => Ok(Self::S32),
"S24" => Ok(Self::S24),
"S24_3" => Ok(Self::S24_3),
"S16" => Ok(Self::S16),
_ => Err(()),
}
}
}
impl Default for AudioFormat {
fn default() -> Self {
Self::S16
}
}
impl AudioFormat {
// not used by all backends
#[allow(dead_code)]
pub fn size(&self) -> usize {
match self {
Self::S24_3 => mem::size_of::<i24>(),
Self::S16 => mem::size_of::<i16>(),
_ => mem::size_of::<i32>(), // S32 and S24 are both stored in i32
}
} }
} }
@ -35,16 +79,39 @@ impl FromStr for NormalisationType {
type Err = (); type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"album" => Ok(NormalisationType::Album), "album" => Ok(Self::Album),
"track" => Ok(NormalisationType::Track), "track" => Ok(Self::Track),
_ => Err(()), _ => Err(()),
} }
} }
} }
impl Default for NormalisationType { impl Default for NormalisationType {
fn default() -> NormalisationType { fn default() -> Self {
NormalisationType::Album Self::Album
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum NormalisationMethod {
Basic,
Dynamic,
}
impl FromStr for NormalisationMethod {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"basic" => Ok(Self::Basic),
"dynamic" => Ok(Self::Dynamic),
_ => Err(()),
}
}
}
impl Default for NormalisationMethod {
fn default() -> Self {
Self::Dynamic
} }
} }
@ -53,7 +120,12 @@ pub struct PlayerConfig {
pub bitrate: Bitrate, pub bitrate: Bitrate,
pub normalisation: bool, pub normalisation: bool,
pub normalisation_type: NormalisationType, pub normalisation_type: NormalisationType,
pub normalisation_method: NormalisationMethod,
pub normalisation_pregain: f32, pub normalisation_pregain: f32,
pub normalisation_threshold: f32,
pub normalisation_attack: f32,
pub normalisation_release: f32,
pub normalisation_knee: f32,
pub gapless: bool, pub gapless: bool,
pub passthrough: bool, pub passthrough: bool,
} }
@ -64,7 +136,12 @@ impl Default for PlayerConfig {
bitrate: Bitrate::default(), bitrate: Bitrate::default(),
normalisation: false, normalisation: false,
normalisation_type: NormalisationType::default(), normalisation_type: NormalisationType::default(),
normalisation_method: NormalisationMethod::default(),
normalisation_pregain: 0.0, normalisation_pregain: 0.0,
normalisation_threshold: -1.0,
normalisation_attack: 0.005,
normalisation_release: 0.1,
normalisation_knee: 1.0,
gapless: true, gapless: true,
passthrough: false, passthrough: false,
} }

View file

@ -1,39 +1,9 @@
#[macro_use] #[macro_use]
extern crate log; extern crate log;
extern crate byteorder; use librespot_audio as audio;
extern crate futures; use librespot_core as core;
extern crate shell_words; use librespot_metadata as metadata;
#[cfg(feature = "alsa-backend")]
extern crate alsa;
#[cfg(feature = "portaudio-backend")]
extern crate portaudio_rs;
#[cfg(feature = "pulseaudio-backend")]
extern crate libpulse_binding;
#[cfg(feature = "pulseaudio-backend")]
extern crate libpulse_simple_binding;
#[cfg(feature = "jackaudio-backend")]
extern crate jack;
#[cfg(feature = "gstreamer-backend")]
extern crate glib;
#[cfg(feature = "gstreamer-backend")]
extern crate gstreamer as gst;
#[cfg(feature = "gstreamer-backend")]
extern crate gstreamer_app as gst_app;
#[cfg(feature = "gstreamer-backend")]
extern crate zerocopy;
#[cfg(feature = "sdl-backend")]
extern crate sdl2;
extern crate librespot_audio as audio;
extern crate librespot_core;
extern crate librespot_metadata as metadata;
pub mod audio_backend; pub mod audio_backend;
pub mod config; pub mod config;

View file

@ -1,10 +1,7 @@
use super::AudioFilter; use super::AudioFilter;
use super::{Mixer, MixerConfig}; use super::{Mixer, MixerConfig};
use std;
use std::error::Error; use std::error::Error;
use alsa;
const SND_CTL_TLV_DB_GAIN_MUTE: i64 = -9999999; const SND_CTL_TLV_DB_GAIN_MUTE: i64 = -9999999;
#[derive(Clone)] #[derive(Clone)]
@ -36,13 +33,12 @@ impl AlsaMixer {
let mixer = alsa::mixer::Mixer::new(&config.card, false)?; let mixer = alsa::mixer::Mixer::new(&config.card, false)?;
let sid = alsa::mixer::SelemId::new(&config.mixer, config.index); let sid = alsa::mixer::SelemId::new(&config.mixer, config.index);
let selem = mixer.find_selem(&sid).expect( let selem = mixer.find_selem(&sid).unwrap_or_else(|| {
format!( panic!(
"Couldn't find simple mixer control for {},{}", "Couldn't find simple mixer control for {},{}",
&config.mixer, &config.index, &config.mixer, &config.index,
) )
.as_str(), });
);
let (min, max) = selem.get_playback_volume_range(); let (min, max) = selem.get_playback_volume_range();
let (min_db, max_db) = selem.get_playback_db_range(); let (min_db, max_db) = selem.get_playback_db_range();
let hw_mix = selem let hw_mix = selem
@ -72,14 +68,14 @@ impl AlsaMixer {
} }
Ok(AlsaMixer { Ok(AlsaMixer {
config: config, config,
params: AlsaMixerVolumeParams { params: AlsaMixerVolumeParams {
min: min, min,
max: max, max,
range: (max - min) as f64, range: (max - min) as f64,
min_db: min_db, min_db,
max_db: max_db, max_db,
has_switch: has_switch, has_switch,
}, },
}) })
} }

View file

@ -12,7 +12,7 @@ pub trait Mixer: Send {
} }
pub trait AudioFilter { pub trait AudioFilter {
fn modify_stream(&self, data: &mut [i16]); fn modify_stream(&self, data: &mut [f32]);
} }
#[cfg(feature = "alsa-backend")] #[cfg(feature = "alsa-backend")]
@ -42,11 +42,13 @@ impl Default for MixerConfig {
pub mod softmixer; pub mod softmixer;
use self::softmixer::SoftMixer; use self::softmixer::SoftMixer;
type MixerFn = fn(Option<MixerConfig>) -> Box<dyn Mixer>;
fn mk_sink<M: Mixer + 'static>(device: Option<MixerConfig>) -> Box<dyn Mixer> { fn mk_sink<M: Mixer + 'static>(device: Option<MixerConfig>) -> Box<dyn Mixer> {
Box::new(M::open(device)) Box::new(M::open(device))
} }
pub fn find<T: AsRef<str>>(name: Option<T>) -> Option<fn(Option<MixerConfig>) -> Box<dyn Mixer>> { pub fn find<T: AsRef<str>>(name: Option<T>) -> Option<MixerFn> {
match name.as_ref().map(AsRef::as_ref) { match name.as_ref().map(AsRef::as_ref) {
None | Some("softvol") => Some(mk_sink::<SoftMixer>), None | Some("softvol") => Some(mk_sink::<SoftMixer>),
#[cfg(feature = "alsa-backend")] #[cfg(feature = "alsa-backend")]

View file

@ -35,11 +35,12 @@ struct SoftVolumeApplier {
} }
impl AudioFilter for SoftVolumeApplier { impl AudioFilter for SoftVolumeApplier {
fn modify_stream(&self, data: &mut [i16]) { fn modify_stream(&self, data: &mut [f32]) {
let volume = self.volume.load(Ordering::Relaxed) as u16; let volume = self.volume.load(Ordering::Relaxed) as u16;
if volume != 0xFFFF { if volume != 0xFFFF {
let volume_factor = volume as f64 / 0xFFFF as f64;
for x in data.iter_mut() { for x in data.iter_mut() {
*x = (*x as i32 * volume as i32 / 0xFFFF) as i16; *x = (*x as f64 * volume_factor) as f32;
} }
} }
} }

View file

@ -1,19 +1,15 @@
use byteorder::{LittleEndian, ReadBytesExt};
use futures;
use futures::{future, Async, Future, Poll, Stream};
use std;
use std::borrow::Cow;
use std::cmp::max; use std::cmp::max;
use std::io::{Read, Result, Seek, SeekFrom}; use std::future::Future;
use std::mem; use std::io::{self, Read, Seek, SeekFrom};
use std::thread; use std::pin::Pin;
use std::task::{Context, Poll};
use std::time::{Duration, Instant}; use std::time::{Duration, Instant};
use std::{mem, thread};
use crate::config::{Bitrate, NormalisationType, PlayerConfig}; use byteorder::{LittleEndian, ReadBytesExt};
use librespot_core::session::Session; use futures_util::stream::futures_unordered::FuturesUnordered;
use librespot_core::spotify_id::SpotifyId; use futures_util::{future, StreamExt, TryFutureExt};
use tokio::sync::{mpsc, oneshot};
use librespot_core::util::SeqGenerator;
use crate::audio::{AudioDecoder, AudioError, AudioPacket, PassthroughDecoder, VorbisDecoder}; use crate::audio::{AudioDecoder, AudioError, AudioPacket, PassthroughDecoder, VorbisDecoder};
use crate::audio::{AudioDecrypt, AudioFile, StreamLoaderController}; use crate::audio::{AudioDecrypt, AudioFile, StreamLoaderController};
@ -22,13 +18,22 @@ use crate::audio::{
READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS, READ_AHEAD_DURING_PLAYBACK_ROUNDTRIPS, READ_AHEAD_DURING_PLAYBACK_SECONDS,
}; };
use crate::audio_backend::Sink; use crate::audio_backend::Sink;
use crate::config::{Bitrate, NormalisationMethod, NormalisationType, PlayerConfig};
use crate::core::session::Session;
use crate::core::spotify_id::SpotifyId;
use crate::core::util::SeqGenerator;
use crate::metadata::{AudioItem, FileFormat}; use crate::metadata::{AudioItem, FileFormat};
use crate::mixer::AudioFilter; use crate::mixer::AudioFilter;
pub const SAMPLE_RATE: u32 = 44100;
pub const NUM_CHANNELS: u8 = 2;
pub const SAMPLES_PER_SECOND: u32 = SAMPLE_RATE as u32 * NUM_CHANNELS as u32;
const PRELOAD_NEXT_TRACK_BEFORE_END_DURATION_MS: u32 = 30000; const PRELOAD_NEXT_TRACK_BEFORE_END_DURATION_MS: u32 = 30000;
const DB_VOLTAGE_RATIO: f32 = 20.0;
pub struct Player { pub struct Player {
commands: Option<futures::sync::mpsc::UnboundedSender<PlayerCommand>>, commands: Option<mpsc::UnboundedSender<PlayerCommand>>,
thread_handle: Option<thread::JoinHandle<()>>, thread_handle: Option<thread::JoinHandle<()>>,
play_request_id_generator: SeqGenerator<u64>, play_request_id_generator: SeqGenerator<u64>,
} }
@ -45,7 +50,7 @@ pub type SinkEventCallback = Box<dyn Fn(SinkStatus) + Send>;
struct PlayerInternal { struct PlayerInternal {
session: Session, session: Session,
config: PlayerConfig, config: PlayerConfig,
commands: futures::sync::mpsc::UnboundedReceiver<PlayerCommand>, commands: mpsc::UnboundedReceiver<PlayerCommand>,
state: PlayerState, state: PlayerState,
preload: PlayerPreload, preload: PlayerPreload,
@ -53,7 +58,14 @@ struct PlayerInternal {
sink_status: SinkStatus, sink_status: SinkStatus,
sink_event_callback: Option<SinkEventCallback>, sink_event_callback: Option<SinkEventCallback>,
audio_filter: Option<Box<dyn AudioFilter + Send>>, audio_filter: Option<Box<dyn AudioFilter + Send>>,
event_senders: Vec<futures::sync::mpsc::UnboundedSender<PlayerEvent>>, event_senders: Vec<mpsc::UnboundedSender<PlayerEvent>>,
limiter_active: bool,
limiter_attack_counter: u32,
limiter_release_counter: u32,
limiter_peak_sample: f32,
limiter_factor: f32,
limiter_strength: f32,
} }
enum PlayerCommand { enum PlayerCommand {
@ -70,7 +82,7 @@ enum PlayerCommand {
Pause, Pause,
Stop, Stop,
Seek(u32), Seek(u32),
AddEventSender(futures::sync::mpsc::UnboundedSender<PlayerEvent>), AddEventSender(mpsc::UnboundedSender<PlayerEvent>),
SetSinkEventCallback(Option<SinkEventCallback>), SetSinkEventCallback(Option<SinkEventCallback>),
EmitVolumeSetEvent(u16), EmitVolumeSetEvent(u16),
} }
@ -182,10 +194,10 @@ impl PlayerEvent {
} }
} }
pub type PlayerEventChannel = futures::sync::mpsc::UnboundedReceiver<PlayerEvent>; pub type PlayerEventChannel = mpsc::UnboundedReceiver<PlayerEvent>;
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug)]
struct NormalisationData { pub struct NormalisationData {
track_gain_db: f32, track_gain_db: f32,
track_peak: f32, track_peak: f32,
album_gain_db: f32, album_gain_db: f32,
@ -193,21 +205,28 @@ struct NormalisationData {
} }
impl NormalisationData { impl NormalisationData {
fn parse_from_file<T: Read + Seek>(mut file: T) -> Result<NormalisationData> { pub fn db_to_ratio(db: f32) -> f32 {
const SPOTIFY_NORMALIZATION_HEADER_START_OFFSET: u64 = 144; f32::powf(10.0, db / DB_VOLTAGE_RATIO)
file.seek(SeekFrom::Start(SPOTIFY_NORMALIZATION_HEADER_START_OFFSET)) }
.unwrap();
let track_gain_db = file.read_f32::<LittleEndian>().unwrap(); pub fn ratio_to_db(ratio: f32) -> f32 {
let track_peak = file.read_f32::<LittleEndian>().unwrap(); ratio.log10() * DB_VOLTAGE_RATIO
let album_gain_db = file.read_f32::<LittleEndian>().unwrap(); }
let album_peak = file.read_f32::<LittleEndian>().unwrap();
fn parse_from_file<T: Read + Seek>(mut file: T) -> io::Result<NormalisationData> {
const SPOTIFY_NORMALIZATION_HEADER_START_OFFSET: u64 = 144;
file.seek(SeekFrom::Start(SPOTIFY_NORMALIZATION_HEADER_START_OFFSET))?;
let track_gain_db = file.read_f32::<LittleEndian>()?;
let track_peak = file.read_f32::<LittleEndian>()?;
let album_gain_db = file.read_f32::<LittleEndian>()?;
let album_peak = file.read_f32::<LittleEndian>()?;
let r = NormalisationData { let r = NormalisationData {
track_gain_db: track_gain_db, track_gain_db,
track_peak: track_peak, track_peak,
album_gain_db: album_gain_db, album_gain_db,
album_peak: album_peak, album_peak,
}; };
Ok(r) Ok(r)
@ -218,17 +237,41 @@ impl NormalisationData {
NormalisationType::Album => [data.album_gain_db, data.album_peak], NormalisationType::Album => [data.album_gain_db, data.album_peak],
NormalisationType::Track => [data.track_gain_db, data.track_peak], NormalisationType::Track => [data.track_gain_db, data.track_peak],
}; };
let mut normalisation_factor =
f32::powf(10.0, (gain_db + config.normalisation_pregain) / 20.0);
if normalisation_factor * gain_peak > 1.0 { let normalisation_power = gain_db + config.normalisation_pregain;
warn!("Reducing normalisation factor to prevent clipping. Please add negative pregain to avoid."); let mut normalisation_factor = Self::db_to_ratio(normalisation_power);
normalisation_factor = 1.0 / gain_peak;
if normalisation_factor * gain_peak > config.normalisation_threshold {
let limited_normalisation_factor = config.normalisation_threshold / gain_peak;
let limited_normalisation_power = Self::ratio_to_db(limited_normalisation_factor);
if config.normalisation_method == NormalisationMethod::Basic {
warn!("Limiting gain to {:.2} for the duration of this track to stay under normalisation threshold.", limited_normalisation_power);
normalisation_factor = limited_normalisation_factor;
} else {
warn!(
"This track will at its peak be subject to {:.2} dB of dynamic limiting.",
normalisation_power - limited_normalisation_power
);
}
warn!("Please lower pregain to avoid.");
} }
debug!("Normalisation Data: {:?}", data); debug!("Normalisation Data: {:?}", data);
debug!("Normalisation Type: {:?}", config.normalisation_type); debug!("Normalisation Type: {:?}", config.normalisation_type);
debug!("Applied normalisation factor: {}", normalisation_factor); debug!(
"Normalisation Threshold: {:.1}",
Self::ratio_to_db(config.normalisation_threshold)
);
debug!("Normalisation Method: {:?}", config.normalisation_method);
debug!("Normalisation Factor: {}", normalisation_factor);
if config.normalisation_method == NormalisationMethod::Dynamic {
debug!("Normalisation Attack: {:?}", config.normalisation_attack);
debug!("Normalisation Release: {:?}", config.normalisation_release);
debug!("Normalisation Knee: {:?}", config.normalisation_knee);
}
normalisation_factor normalisation_factor
} }
@ -244,15 +287,15 @@ impl Player {
where where
F: FnOnce() -> Box<dyn Sink> + Send + 'static, F: FnOnce() -> Box<dyn Sink> + Send + 'static,
{ {
let (cmd_tx, cmd_rx) = futures::sync::mpsc::unbounded(); let (cmd_tx, cmd_rx) = mpsc::unbounded_channel();
let (event_sender, event_receiver) = futures::sync::mpsc::unbounded(); let (event_sender, event_receiver) = mpsc::unbounded_channel();
let handle = thread::spawn(move || { let handle = thread::spawn(move || {
debug!("new Player[{}]", session.session_id()); debug!("new Player[{}]", session.session_id());
let internal = PlayerInternal { let internal = PlayerInternal {
session: session, session,
config: config, config,
commands: cmd_rx, commands: cmd_rx,
state: PlayerState::Stopped, state: PlayerState::Stopped,
@ -260,13 +303,20 @@ impl Player {
sink: sink_builder(), sink: sink_builder(),
sink_status: SinkStatus::Closed, sink_status: SinkStatus::Closed,
sink_event_callback: None, sink_event_callback: None,
audio_filter: audio_filter, audio_filter,
event_senders: [event_sender].to_vec(), event_senders: [event_sender].to_vec(),
limiter_active: false,
limiter_attack_counter: 0,
limiter_release_counter: 0,
limiter_peak_sample: 0.0,
limiter_factor: 1.0,
limiter_strength: 0.0,
}; };
// While PlayerInternal is written as a future, it still contains blocking code. // While PlayerInternal is written as a future, it still contains blocking code.
// It must be run by using wait() in a dedicated thread. // It must be run by using block_on() in a dedicated thread.
let _ = internal.wait(); futures_executor::block_on(internal);
debug!("PlayerInternal thread finished."); debug!("PlayerInternal thread finished.");
}); });
@ -281,7 +331,7 @@ impl Player {
} }
fn command(&self, cmd: PlayerCommand) { fn command(&self, cmd: PlayerCommand) {
self.commands.as_ref().unwrap().unbounded_send(cmd).unwrap(); self.commands.as_ref().unwrap().send(cmd).unwrap();
} }
pub fn load(&mut self, track_id: SpotifyId, start_playing: bool, position_ms: u32) -> u64 { pub fn load(&mut self, track_id: SpotifyId, start_playing: bool, position_ms: u32) -> u64 {
@ -317,22 +367,21 @@ impl Player {
} }
pub fn get_player_event_channel(&self) -> PlayerEventChannel { pub fn get_player_event_channel(&self) -> PlayerEventChannel {
let (event_sender, event_receiver) = futures::sync::mpsc::unbounded(); let (event_sender, event_receiver) = mpsc::unbounded_channel();
self.command(PlayerCommand::AddEventSender(event_sender)); self.command(PlayerCommand::AddEventSender(event_sender));
event_receiver event_receiver
} }
pub fn get_end_of_track_future(&self) -> Box<dyn Future<Item = (), Error = ()>> { pub async fn await_end_of_track(&self) {
let result = self let mut channel = self.get_player_event_channel();
.get_player_event_channel() while let Some(event) = channel.recv().await {
.filter(|event| match event { if matches!(
PlayerEvent::EndOfTrack { .. } | PlayerEvent::Stopped { .. } => true, event,
_ => false, PlayerEvent::EndOfTrack { .. } | PlayerEvent::Stopped { .. }
}) ) {
.into_future() return;
.map_err(|_| ()) }
.map(|_| ()); }
Box::new(result)
} }
pub fn set_sink_event_callback(&self, callback: Option<SinkEventCallback>) { pub fn set_sink_event_callback(&self, callback: Option<SinkEventCallback>) {
@ -370,11 +419,11 @@ enum PlayerPreload {
None, None,
Loading { Loading {
track_id: SpotifyId, track_id: SpotifyId,
loader: Box<dyn Future<Item = PlayerLoadedTrackData, Error = ()>>, loader: Pin<Box<dyn Future<Output = Result<PlayerLoadedTrackData, ()>> + Send>>,
}, },
Ready { Ready {
track_id: SpotifyId, track_id: SpotifyId,
loaded_track: PlayerLoadedTrackData, loaded_track: Box<PlayerLoadedTrackData>,
}, },
} }
@ -386,7 +435,7 @@ enum PlayerState {
track_id: SpotifyId, track_id: SpotifyId,
play_request_id: u64, play_request_id: u64,
start_playback: bool, start_playback: bool,
loader: Box<dyn Future<Item = PlayerLoadedTrackData, Error = ()>>, loader: Pin<Box<dyn Future<Output = Result<PlayerLoadedTrackData, ()>> + Send>>,
}, },
Paused { Paused {
track_id: SpotifyId, track_id: SpotifyId,
@ -432,18 +481,12 @@ impl PlayerState {
#[allow(dead_code)] #[allow(dead_code)]
fn is_stopped(&self) -> bool { fn is_stopped(&self) -> bool {
use self::PlayerState::*; use self::PlayerState::*;
match *self { matches!(self, Stopped)
Stopped => true,
_ => false,
}
} }
fn is_loading(&self) -> bool { fn is_loading(&self) -> bool {
use self::PlayerState::*; use self::PlayerState::*;
match *self { matches!(self, Loading { .. })
Loading { .. } => true,
_ => false,
}
} }
fn decoder(&mut self) -> Option<&mut Decoder> { fn decoder(&mut self) -> Option<&mut Decoder> {
@ -576,24 +619,24 @@ struct PlayerTrackLoader {
} }
impl PlayerTrackLoader { impl PlayerTrackLoader {
fn find_available_alternative<'a>(&self, audio: &'a AudioItem) -> Option<Cow<'a, AudioItem>> { async fn find_available_alternative(&self, audio: AudioItem) -> Option<AudioItem> {
if audio.available { if audio.available {
Some(Cow::Borrowed(audio)) Some(audio)
} else { } else if let Some(alternatives) = &audio.alternatives {
if let Some(alternatives) = &audio.alternatives { let alternatives: FuturesUnordered<_> = alternatives
let alternatives = alternatives
.iter() .iter()
.map(|alt_id| AudioItem::get_audio_item(&self.session, *alt_id)); .map(|alt_id| AudioItem::get_audio_item(&self.session, *alt_id))
let alternatives = future::join_all(alternatives).wait().unwrap(); .collect();
alternatives alternatives
.into_iter() .filter_map(|x| future::ready(x.ok()))
.find(|alt| alt.available) .filter(|x| future::ready(x.available))
.map(Cow::Owned) .next()
.await
} else { } else {
None None
} }
} }
}
fn stream_data_rate(&self, format: FileFormat) -> usize { fn stream_data_rate(&self, format: FileFormat) -> usize {
match format { match format {
@ -614,8 +657,12 @@ impl PlayerTrackLoader {
} }
} }
fn load_track(&self, spotify_id: SpotifyId, position_ms: u32) -> Option<PlayerLoadedTrackData> { async fn load_track(
let audio = match AudioItem::get_audio_item(&self.session, spotify_id).wait() { &self,
spotify_id: SpotifyId,
position_ms: u32,
) -> Option<PlayerLoadedTrackData> {
let audio = match AudioItem::get_audio_item(&self.session, spotify_id).await {
Ok(audio) => audio, Ok(audio) => audio,
Err(_) => { Err(_) => {
error!("Unable to load audio item."); error!("Unable to load audio item.");
@ -625,10 +672,10 @@ impl PlayerTrackLoader {
info!("Loading <{}> with Spotify URI <{}>", audio.name, audio.uri); info!("Loading <{}> with Spotify URI <{}>", audio.name, audio.uri);
let audio = match self.find_available_alternative(&audio) { let audio = match self.find_available_alternative(audio).await {
Some(audio) => audio, Some(audio) => audio,
None => { None => {
warn!("<{}> is not available", audio.uri); warn!("<{}> is not available", spotify_id.to_uri());
return None; return None;
} }
}; };
@ -674,7 +721,9 @@ impl PlayerTrackLoader {
let bytes_per_second = self.stream_data_rate(format); let bytes_per_second = self.stream_data_rate(format);
let play_from_beginning = position_ms == 0; let play_from_beginning = position_ms == 0;
let key = self.session.audio_key().request(spotify_id, file_id); // This is only a loop to be able to reload the file if an error occured
// while opening a cached file.
loop {
let encrypted_file = AudioFile::open( let encrypted_file = AudioFile::open(
&self.session, &self.session,
file_id, file_id,
@ -682,7 +731,7 @@ impl PlayerTrackLoader {
play_from_beginning, play_from_beginning,
); );
let encrypted_file = match encrypted_file.wait() { let encrypted_file = match encrypted_file.await {
Ok(encrypted_file) => encrypted_file, Ok(encrypted_file) => encrypted_file,
Err(_) => { Err(_) => {
error!("Unable to load encrypted file."); error!("Unable to load encrypted file.");
@ -691,7 +740,7 @@ impl PlayerTrackLoader {
}; };
let is_cached = encrypted_file.is_cached(); let is_cached = encrypted_file.is_cached();
let mut stream_loader_controller = encrypted_file.get_stream_loader_controller(); let stream_loader_controller = encrypted_file.get_stream_loader_controller();
if play_from_beginning { if play_from_beginning {
// No need to seek -> we stream from the beginning // No need to seek -> we stream from the beginning
@ -701,7 +750,7 @@ impl PlayerTrackLoader {
stream_loader_controller.set_random_access_mode(); stream_loader_controller.set_random_access_mode();
} }
let key = match key.wait() { let key = match self.session.audio_key().request(spotify_id, file_id).await {
Ok(key) => key, Ok(key) => key,
Err(_) => { Err(_) => {
error!("Unable to load decryption key"); error!("Unable to load decryption key");
@ -711,13 +760,14 @@ impl PlayerTrackLoader {
let mut decrypted_file = AudioDecrypt::new(key, encrypted_file); let mut decrypted_file = AudioDecrypt::new(key, encrypted_file);
let normalisation_factor = match NormalisationData::parse_from_file(&mut decrypted_file) { let normalisation_factor = match NormalisationData::parse_from_file(&mut decrypted_file)
{
Ok(normalisation_data) => { Ok(normalisation_data) => {
NormalisationData::get_factor(&self.config, normalisation_data) NormalisationData::get_factor(&self.config, normalisation_data)
} }
Err(_) => { Err(_) => {
warn!("Unable to extract normalisation data, using default value."); warn!("Unable to extract normalisation data, using default value.");
1.0 as f32 1.0_f32
} }
}; };
@ -749,7 +799,7 @@ impl PlayerTrackLoader {
} }
// Just try it again // Just try it again
return self.load_track(spotify_id, position_ms); continue;
} }
Err(e) => { Err(e) => {
error!("Unable to read audio file: {}", e); error!("Unable to read audio file: {}", e);
@ -765,37 +815,38 @@ impl PlayerTrackLoader {
} }
let stream_position_pcm = PlayerInternal::position_ms_to_pcm(position_ms); let stream_position_pcm = PlayerInternal::position_ms_to_pcm(position_ms);
info!("<{}> ({} ms) loaded", audio.name, audio.duration); info!("<{}> ({} ms) loaded", audio.name, audio.duration);
Some(PlayerLoadedTrackData {
return Some(PlayerLoadedTrackData {
decoder, decoder,
normalisation_factor, normalisation_factor,
stream_loader_controller, stream_loader_controller,
bytes_per_second, bytes_per_second,
duration_ms, duration_ms,
stream_position_pcm, stream_position_pcm,
}) });
}
} }
} }
impl Future for PlayerInternal { impl Future for PlayerInternal {
type Item = (); type Output = ();
type Error = ();
fn poll(&mut self) -> Poll<(), ()> { fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<()> {
// While this is written as a future, it still contains blocking code. // While this is written as a future, it still contains blocking code.
// It must be run on its own thread. // It must be run on its own thread.
let passthrough = self.config.passthrough;
loop { loop {
let mut all_futures_completed_or_not_ready = true; let mut all_futures_completed_or_not_ready = true;
// process commands that were sent to us // process commands that were sent to us
let cmd = match self.commands.poll() { let cmd = match self.commands.poll_recv(cx) {
Ok(Async::Ready(None)) => return Ok(Async::Ready(())), // client has disconnected - shut down. Poll::Ready(None) => return Poll::Ready(()), // client has disconnected - shut down.
Ok(Async::Ready(Some(cmd))) => { Poll::Ready(Some(cmd)) => {
all_futures_completed_or_not_ready = false; all_futures_completed_or_not_ready = false;
Some(cmd) Some(cmd)
} }
Ok(Async::NotReady) => None, _ => None,
Err(_) => None,
}; };
if let Some(cmd) = cmd { if let Some(cmd) = cmd {
@ -810,8 +861,8 @@ impl Future for PlayerInternal {
play_request_id, play_request_id,
} = self.state } = self.state
{ {
match loader.poll() { match loader.as_mut().poll(cx) {
Ok(Async::Ready(loaded_track)) => { Poll::Ready(Ok(loaded_track)) => {
self.start_playback( self.start_playback(
track_id, track_id,
play_request_id, play_request_id,
@ -822,8 +873,7 @@ impl Future for PlayerInternal {
panic!("The state wasn't changed by start_playback()"); panic!("The state wasn't changed by start_playback()");
} }
} }
Ok(Async::NotReady) => (), Poll::Ready(Err(_)) => {
Err(_) => {
warn!("Unable to load <{:?}>\nSkipping to next track", track_id); warn!("Unable to load <{:?}>\nSkipping to next track", track_id);
assert!(self.state.is_loading()); assert!(self.state.is_loading());
self.send_event(PlayerEvent::EndOfTrack { self.send_event(PlayerEvent::EndOfTrack {
@ -831,6 +881,7 @@ impl Future for PlayerInternal {
play_request_id, play_request_id,
}) })
} }
Poll::Pending => (),
} }
} }
@ -840,16 +891,15 @@ impl Future for PlayerInternal {
track_id, track_id,
} = self.preload } = self.preload
{ {
match loader.poll() { match loader.as_mut().poll(cx) {
Ok(Async::Ready(loaded_track)) => { Poll::Ready(Ok(loaded_track)) => {
self.send_event(PlayerEvent::Preloading { track_id }); self.send_event(PlayerEvent::Preloading { track_id });
self.preload = PlayerPreload::Ready { self.preload = PlayerPreload::Ready {
track_id, track_id,
loaded_track, loaded_track: Box::new(loaded_track),
}; };
} }
Ok(Async::NotReady) => (), Poll::Ready(Err(_)) => {
Err(_) => {
debug!("Unable to preload {:?}", track_id); debug!("Unable to preload {:?}", track_id);
self.preload = PlayerPreload::None; self.preload = PlayerPreload::None;
// Let Spirc know that the track was unavailable. // Let Spirc know that the track was unavailable.
@ -866,6 +916,7 @@ impl Future for PlayerInternal {
}); });
} }
} }
Poll::Pending => (),
} }
} }
@ -885,10 +936,10 @@ impl Future for PlayerInternal {
{ {
let packet = decoder.next_packet().expect("Vorbis error"); let packet = decoder.next_packet().expect("Vorbis error");
if !self.config.passthrough { if !passthrough {
if let Some(ref packet) = packet { if let Some(ref packet) = packet {
*stream_position_pcm = *stream_position_pcm +=
*stream_position_pcm + (packet.samples().len() / 2) as u64; (packet.samples().len() / NUM_CHANNELS as usize) as u64;
let stream_position_millis = let stream_position_millis =
Self::position_pcm_to_ms(*stream_position_pcm); Self::position_pcm_to_ms(*stream_position_pcm);
@ -900,11 +951,7 @@ impl Future for PlayerInternal {
.as_millis() .as_millis()
as i64 as i64
- stream_position_millis as i64; - stream_position_millis as i64;
if lag > 1000 { lag > 1000
true
} else {
false
}
} }
}; };
if notify_about_position { if notify_about_position {
@ -964,11 +1011,11 @@ impl Future for PlayerInternal {
} }
if self.session.is_invalid() { if self.session.is_invalid() {
return Ok(Async::Ready(())); return Poll::Ready(());
} }
if (!self.state.is_playing()) && all_futures_completed_or_not_ready { if (!self.state.is_playing()) && all_futures_completed_or_not_ready {
return Ok(Async::NotReady); return Poll::Pending;
} }
} }
} }
@ -1113,9 +1160,116 @@ impl PlayerInternal {
editor.modify_stream(data) editor.modify_stream(data)
} }
if self.config.normalisation && normalisation_factor != 1.0 { if self.config.normalisation
for x in data.iter_mut() { && (f32::abs(normalisation_factor - 1.0) < f32::EPSILON
*x = (*x as f32 * normalisation_factor) as i16; || self.config.normalisation_method != NormalisationMethod::Basic)
{
for sample in data.iter_mut() {
let mut actual_normalisation_factor = normalisation_factor;
if self.config.normalisation_method == NormalisationMethod::Dynamic
{
if self.limiter_active {
// "S"-shaped curve with a configurable knee during attack and release:
// - > 1.0 yields soft knees at start and end, steeper in between
// - 1.0 yields a linear function from 0-100%
// - between 0.0 and 1.0 yields hard knees at start and end, flatter in between
// - 0.0 yields a step response to 50%, causing distortion
// - Rates < 0.0 invert the limiter and are invalid
let mut shaped_limiter_strength = self.limiter_strength;
if shaped_limiter_strength > 0.0
&& shaped_limiter_strength < 1.0
{
shaped_limiter_strength = 1.0
/ (1.0
+ f32::powf(
shaped_limiter_strength
/ (1.0 - shaped_limiter_strength),
-1.0 * self.config.normalisation_knee,
));
}
actual_normalisation_factor =
(1.0 - shaped_limiter_strength) * normalisation_factor
+ shaped_limiter_strength * self.limiter_factor;
};
// Always check for peaks, even when the limiter is already active.
// There may be even higher peaks than we initially targeted.
// Check against the normalisation factor that would be applied normally.
let abs_sample =
((*sample as f64 * normalisation_factor as f64) as f32)
.abs();
if abs_sample > self.config.normalisation_threshold {
self.limiter_active = true;
if self.limiter_release_counter > 0 {
// A peak was encountered while releasing the limiter;
// synchronize with the current release limiter strength.
self.limiter_attack_counter = (((SAMPLES_PER_SECOND
as f32
* self.config.normalisation_release)
- self.limiter_release_counter as f32)
/ (self.config.normalisation_release
/ self.config.normalisation_attack))
as u32;
self.limiter_release_counter = 0;
}
self.limiter_attack_counter =
self.limiter_attack_counter.saturating_add(1);
self.limiter_strength = self.limiter_attack_counter as f32
/ (SAMPLES_PER_SECOND as f32
* self.config.normalisation_attack);
if abs_sample > self.limiter_peak_sample {
self.limiter_peak_sample = abs_sample;
self.limiter_factor =
self.config.normalisation_threshold
/ self.limiter_peak_sample;
}
} else if self.limiter_active {
if self.limiter_attack_counter > 0 {
// Release may start within the attack period, before
// the limiter reached full strength. For that reason
// start the release by synchronizing with the current
// attack limiter strength.
self.limiter_release_counter = (((SAMPLES_PER_SECOND
as f32
* self.config.normalisation_attack)
- self.limiter_attack_counter as f32)
* (self.config.normalisation_release
/ self.config.normalisation_attack))
as u32;
self.limiter_attack_counter = 0;
}
self.limiter_release_counter =
self.limiter_release_counter.saturating_add(1);
if self.limiter_release_counter
> (SAMPLES_PER_SECOND as f32
* self.config.normalisation_release)
as u32
{
self.reset_limiter();
} else {
self.limiter_strength = ((SAMPLES_PER_SECOND as f32
* self.config.normalisation_release)
- self.limiter_release_counter as f32)
/ (SAMPLES_PER_SECOND as f32
* self.config.normalisation_release);
}
}
}
*sample =
(*sample as f64 * actual_normalisation_factor as f64) as f32;
// Extremely sharp attacks, however unlikely, *may* still clip and provide
// undefined results, so strictly enforce output within [-1.0, 1.0].
if *sample < -1.0 {
*sample = -1.0;
} else if *sample > 1.0 {
*sample = 1.0;
}
} }
} }
} }
@ -1146,6 +1300,15 @@ impl PlayerInternal {
} }
} }
fn reset_limiter(&mut self) {
self.limiter_active = false;
self.limiter_release_counter = 0;
self.limiter_attack_counter = 0;
self.limiter_peak_sample = 0.0;
self.limiter_factor = 1.0;
self.limiter_strength = 0.0;
}
fn start_playback( fn start_playback(
&mut self, &mut self,
track_id: SpotifyId, track_id: SpotifyId,
@ -1166,8 +1329,8 @@ impl PlayerInternal {
}); });
self.state = PlayerState::Playing { self.state = PlayerState::Playing {
track_id: track_id, track_id,
play_request_id: play_request_id, play_request_id,
decoder: loaded_track.decoder, decoder: loaded_track.decoder,
normalisation_factor: loaded_track.normalisation_factor, normalisation_factor: loaded_track.normalisation_factor,
stream_loader_controller: loaded_track.stream_loader_controller, stream_loader_controller: loaded_track.stream_loader_controller,
@ -1183,8 +1346,8 @@ impl PlayerInternal {
self.ensure_sink_stopped(false); self.ensure_sink_stopped(false);
self.state = PlayerState::Paused { self.state = PlayerState::Paused {
track_id: track_id, track_id,
play_request_id: play_request_id, play_request_id,
decoder: loaded_track.decoder, decoder: loaded_track.decoder,
normalisation_factor: loaded_track.normalisation_factor, normalisation_factor: loaded_track.normalisation_factor,
stream_loader_controller: loaded_track.stream_loader_controller, stream_loader_controller: loaded_track.stream_loader_controller,
@ -1231,7 +1394,7 @@ impl PlayerInternal {
track_id: old_track_id, track_id: old_track_id,
.. ..
} => self.send_event(PlayerEvent::Changed { } => self.send_event(PlayerEvent::Changed {
old_track_id: old_track_id, old_track_id,
new_track_id: track_id, new_track_id: track_id,
}), }),
PlayerState::Stopped => self.send_event(PlayerEvent::Started { PlayerState::Stopped => self.send_event(PlayerEvent::Started {
@ -1369,7 +1532,7 @@ impl PlayerInternal {
let _ = loaded_track.decoder.seek(position_ms as i64); // This may be blocking let _ = loaded_track.decoder.seek(position_ms as i64); // This may be blocking
loaded_track.stream_loader_controller.set_stream_mode(); loaded_track.stream_loader_controller.set_stream_mode();
} }
self.start_playback(track_id, play_request_id, loaded_track, play); self.start_playback(track_id, play_request_id, *loaded_track, play);
return; return;
} else { } else {
unreachable!(); unreachable!();
@ -1411,9 +1574,7 @@ impl PlayerInternal {
self.preload = PlayerPreload::None; self.preload = PlayerPreload::None;
// If we don't have a loader yet, create one from scratch. // If we don't have a loader yet, create one from scratch.
let loader = loader let loader = loader.unwrap_or_else(|| Box::pin(self.load_track(track_id, position_ms)));
.or_else(|| Some(self.load_track(track_id, position_ms)))
.unwrap();
// Set ourselves to a loading state. // Set ourselves to a loading state.
self.state = PlayerState::Loading { self.state = PlayerState::Loading {
@ -1468,7 +1629,10 @@ impl PlayerInternal {
// schedule the preload of the current track if desired. // schedule the preload of the current track if desired.
if preload_track { if preload_track {
let loader = self.load_track(track_id, 0); let loader = self.load_track(track_id, 0);
self.preload = PlayerPreload::Loading { track_id, loader } self.preload = PlayerPreload::Loading {
track_id,
loader: Box::pin(loader),
}
} }
} }
@ -1571,7 +1735,7 @@ impl PlayerInternal {
fn send_event(&mut self, event: PlayerEvent) { fn send_event(&mut self, event: PlayerEvent) {
let mut index = 0; let mut index = 0;
while index < self.event_senders.len() { while index < self.event_senders.len() {
match self.event_senders[index].unbounded_send(event.clone()) { match self.event_senders[index].send(event.clone()) {
Ok(_) => index += 1, Ok(_) => index += 1,
Err(_) => { Err(_) => {
self.event_senders.remove(index); self.event_senders.remove(index);
@ -1584,7 +1748,7 @@ impl PlayerInternal {
&self, &self,
spotify_id: SpotifyId, spotify_id: SpotifyId,
position_ms: u32, position_ms: u32,
) -> Box<dyn Future<Item = PlayerLoadedTrackData, Error = ()>> { ) -> impl Future<Output = Result<PlayerLoadedTrackData, ()>> + Send + 'static {
// This method creates a future that returns the loaded stream and associated info. // This method creates a future that returns the loaded stream and associated info.
// Ideally all work should be done using asynchronous code. However, seek() on the // Ideally all work should be done using asynchronous code. However, seek() on the
// audio stream is implemented in a blocking fashion. Thus, we can't turn it into future // audio stream is implemented in a blocking fashion. Thus, we can't turn it into future
@ -1596,18 +1760,16 @@ impl PlayerInternal {
config: self.config.clone(), config: self.config.clone(),
}; };
let (result_tx, result_rx) = futures::sync::oneshot::channel(); let (result_tx, result_rx) = oneshot::channel();
std::thread::spawn(move || { std::thread::spawn(move || {
loader let data = futures_executor::block_on(loader.load_track(spotify_id, position_ms));
.load_track(spotify_id, position_ms) if let Some(data) = data {
.and_then(move |data| {
let _ = result_tx.send(data); let _ = result_tx.send(data);
Some(()) }
});
}); });
Box::new(result_rx.map_err(|_| ())) result_rx.map_err(|_| ())
} }
fn preload_data_before_playback(&mut self) { fn preload_data_before_playback(&mut self) {
@ -1729,21 +1891,18 @@ struct Subfile<T: Read + Seek> {
impl<T: Read + Seek> Subfile<T> { impl<T: Read + Seek> Subfile<T> {
pub fn new(mut stream: T, offset: u64) -> Subfile<T> { pub fn new(mut stream: T, offset: u64) -> Subfile<T> {
stream.seek(SeekFrom::Start(offset)).unwrap(); stream.seek(SeekFrom::Start(offset)).unwrap();
Subfile { Subfile { stream, offset }
stream: stream,
offset: offset,
}
} }
} }
impl<T: Read + Seek> Read for Subfile<T> { impl<T: Read + Seek> Read for Subfile<T> {
fn read(&mut self, buf: &mut [u8]) -> Result<usize> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
self.stream.read(buf) self.stream.read(buf)
} }
} }
impl<T: Read + Seek> Seek for Subfile<T> { impl<T: Read + Seek> Seek for Subfile<T> {
fn seek(&mut self, mut pos: SeekFrom) -> Result<u64> { fn seek(&mut self, mut pos: SeekFrom) -> io::Result<u64> {
pos = match pos { pos = match pos {
SeekFrom::Start(offset) => SeekFrom::Start(offset + self.offset), SeekFrom::Start(offset) => SeekFrom::Start(offset + self.offset),
x => x, x => x,

View file

@ -1,3 +1,4 @@
# max_width = 105 # max_width = 105
reorder_imports = true reorder_imports = true
reorder_modules = true reorder_modules = true
edition = "2018"

View file

@ -1,9 +1,8 @@
#![crate_name = "librespot"] #![crate_name = "librespot"]
#![cfg_attr(feature = "cargo-clippy", allow(unused_io_amount))]
pub extern crate librespot_audio as audio; pub use librespot_audio as audio;
pub extern crate librespot_connect as connect; pub use librespot_connect as connect;
pub extern crate librespot_core as core; pub use librespot_core as core;
pub extern crate librespot_metadata as metadata; pub use librespot_metadata as metadata;
pub extern crate librespot_playback as playback; pub use librespot_playback as playback;
pub extern crate librespot_protocol as protocol; pub use librespot_protocol as protocol;

View file

@ -1,33 +1,37 @@
use futures::sync::mpsc::UnboundedReceiver; use futures_util::{future, FutureExt, StreamExt};
use futures::{Async, Future, Poll, Stream}; use librespot_playback::player::PlayerEvent;
use log::{error, info, trace, warn}; use log::{error, info, warn};
use sha1::{Digest, Sha1}; use sha1::{Digest, Sha1};
use std::env; use tokio::sync::mpsc::UnboundedReceiver;
use std::io::{stderr, Write}; use url::Url;
use std::mem;
use librespot::connect::spirc::Spirc;
use librespot::core::authentication::Credentials;
use librespot::core::cache::Cache;
use librespot::core::config::{ConnectConfig, DeviceType, SessionConfig, VolumeCtrl};
use librespot::core::session::Session;
use librespot::core::version;
use librespot::playback::audio_backend::{self, Sink, BACKENDS};
use librespot::playback::config::{
AudioFormat, Bitrate, NormalisationMethod, NormalisationType, PlayerConfig,
};
use librespot::playback::mixer::{self, Mixer, MixerConfig};
use librespot::playback::player::{NormalisationData, Player};
mod player_event_handler;
use player_event_handler::{emit_sink_event, run_program_on_events};
use std::convert::TryFrom;
use std::path::Path; use std::path::Path;
use std::process::exit; use std::process::exit;
use std::str::FromStr; use std::str::FromStr;
use std::time::Instant; use std::{env, time::Instant};
use tokio_core::reactor::{Core, Handle}; use std::{
use tokio_io::IoStream; io::{stderr, Write},
use url::Url; pin::Pin,
};
use librespot::core::authentication::{get_credentials, Credentials}; const MILLIS: f32 = 1000.0;
use librespot::core::cache::Cache;
use librespot::core::config::{ConnectConfig, DeviceType, SessionConfig, VolumeCtrl};
use librespot::core::session::{AuthenticationError, Session};
use librespot::core::version;
use librespot::connect::discovery::{discovery, DiscoveryStream};
use librespot::connect::spirc::{Spirc, SpircTask};
use librespot::playback::audio_backend::{self, Sink, BACKENDS};
use librespot::playback::config::{Bitrate, NormalisationType, PlayerConfig};
use librespot::playback::mixer::{self, Mixer, MixerConfig};
use librespot::playback::player::{Player, PlayerEvent};
mod player_event_handler;
use crate::player_event_handler::{emit_sink_event, run_program_on_events};
fn device_id(name: &str) -> String { fn device_id(name: &str) -> String {
hex::encode(Sha1::digest(name.as_bytes())) hex::encode(Sha1::digest(name.as_bytes()))
@ -71,6 +75,29 @@ fn list_backends() {
} }
} }
pub fn get_credentials<F: FnOnce(&String) -> Option<String>>(
username: Option<String>,
password: Option<String>,
cached_credentials: Option<Credentials>,
prompt: F,
) -> Option<Credentials> {
if let Some(username) = username {
if let Some(password) = password {
return Some(Credentials::with_password(username, password));
}
match cached_credentials {
Some(credentials) if username == credentials.username => Some(credentials),
_ => {
let password = prompt(&username)?;
Some(Credentials::with_password(username, password))
}
}
} else {
cached_credentials
}
}
fn print_version() { fn print_version() {
println!( println!(
"librespot {semver} {sha} (Built on {build_date}, Build ID: {build_id})", "librespot {semver} {sha} (Built on {build_date}, Build ID: {build_id})",
@ -83,7 +110,8 @@ fn print_version() {
#[derive(Clone)] #[derive(Clone)]
struct Setup { struct Setup {
backend: fn(Option<String>) -> Box<dyn Sink>, format: AudioFormat,
backend: fn(Option<String>, AudioFormat) -> Box<dyn Sink + 'static>,
device: Option<String>, device: Option<String>,
mixer: fn(Option<MixerConfig>) -> Box<dyn Mixer>, mixer: fn(Option<MixerConfig>) -> Box<dyn Mixer>,
@ -100,7 +128,7 @@ struct Setup {
emit_sink_events: bool, emit_sink_events: bool,
} }
fn setup(args: &[String]) -> Setup { fn get_setup(args: &[String]) -> Setup {
let mut opts = getopts::Options::new(); let mut opts = getopts::Options::new();
opts.optopt( opts.optopt(
"c", "c",
@ -147,6 +175,12 @@ fn setup(args: &[String]) -> Setup {
"Audio device to use. Use '?' to list options if using portaudio or alsa", "Audio device to use. Use '?' to list options if using portaudio or alsa",
"DEVICE", "DEVICE",
) )
.optopt(
"",
"format",
"Output format (F32, S32, S24, S24_3 or S16). Defaults to S16",
"FORMAT",
)
.optopt("", "mixer", "Mixer to use (alsa or softvol)", "MIXER") .optopt("", "mixer", "Mixer to use (alsa or softvol)", "MIXER")
.optopt( .optopt(
"m", "m",
@ -188,6 +222,12 @@ fn setup(args: &[String]) -> Setup {
"enable-volume-normalisation", "enable-volume-normalisation",
"Play all tracks at the same volume", "Play all tracks at the same volume",
) )
.optopt(
"",
"normalisation-method",
"Specify the normalisation method to use - [basic, dynamic]. Default is dynamic.",
"NORMALISATION_METHOD",
)
.optopt( .optopt(
"", "",
"normalisation-gain-type", "normalisation-gain-type",
@ -200,6 +240,30 @@ fn setup(args: &[String]) -> Setup {
"Pregain (dB) applied by volume normalisation", "Pregain (dB) applied by volume normalisation",
"PREGAIN", "PREGAIN",
) )
.optopt(
"",
"normalisation-threshold",
"Threshold (dBFS) to prevent clipping. Default is -1.0.",
"THRESHOLD",
)
.optopt(
"",
"normalisation-attack",
"Attack time (ms) in which the dynamic limiter is reducing gain. Default is 5.",
"ATTACK",
)
.optopt(
"",
"normalisation-release",
"Release or decay time (ms) in which the dynamic limiter is restoring gain. Default is 100.",
"RELEASE",
)
.optopt(
"",
"normalisation-knee",
"Knee steepness of the dynamic limiter. Default is 1.0.",
"KNEE",
)
.optopt( .optopt(
"", "",
"volume-ctrl", "volume-ctrl",
@ -225,13 +289,7 @@ fn setup(args: &[String]) -> Setup {
let matches = match opts.parse(&args[1..]) { let matches = match opts.parse(&args[1..]) {
Ok(m) => m, Ok(m) => m,
Err(f) => { Err(f) => {
writeln!( eprintln!("error: {}\n{}", f.to_string(), usage(&args[0], &opts));
stderr(),
"error: {}\n{}",
f.to_string(),
usage(&args[0], &opts)
)
.unwrap();
exit(1); exit(1);
} }
}; };
@ -260,9 +318,15 @@ fn setup(args: &[String]) -> Setup {
let backend = audio_backend::find(backend_name).expect("Invalid backend"); let backend = audio_backend::find(backend_name).expect("Invalid backend");
let format = matches
.opt_str("format")
.as_ref()
.map(|format| AudioFormat::try_from(format).expect("Invalid output format"))
.unwrap_or_default();
let device = matches.opt_str("device"); let device = matches.opt_str("device");
if device == Some("?".into()) { if device == Some("?".into()) {
backend(device); backend(device, format);
exit(0); exit(0);
} }
@ -272,8 +336,10 @@ fn setup(args: &[String]) -> Setup {
let mixer_config = MixerConfig { let mixer_config = MixerConfig {
card: matches card: matches
.opt_str("mixer-card") .opt_str("mixer-card")
.unwrap_or(String::from("default")), .unwrap_or_else(|| String::from("default")),
mixer: matches.opt_str("mixer-name").unwrap_or(String::from("PCM")), mixer: matches
.opt_str("mixer-name")
.unwrap_or_else(|| String::from("PCM")),
index: matches index: matches
.opt_str("mixer-index") .opt_str("mixer-index")
.map(|index| index.parse::<u32>().unwrap()) .map(|index| index.parse::<u32>().unwrap())
@ -297,7 +363,7 @@ fn setup(args: &[String]) -> Setup {
.map(|p| AsRef::<Path>::as_ref(p).join("files")); .map(|p| AsRef::<Path>::as_ref(p).join("files"));
system_dir = matches system_dir = matches
.opt_str("system-cache") .opt_str("system-cache")
.or_else(|| cache_dir) .or(cache_dir)
.map(|p| p.into()); .map(|p| p.into());
} }
@ -327,15 +393,17 @@ fn setup(args: &[String]) -> Setup {
.map(|port| port.parse::<u16>().unwrap()) .map(|port| port.parse::<u16>().unwrap())
.unwrap_or(0); .unwrap_or(0);
let name = matches.opt_str("name").unwrap_or("Librespot".to_string()); let name = matches
.opt_str("name")
.unwrap_or_else(|| "Librespot".to_string());
let credentials = { let credentials = {
let cached_credentials = cache.as_ref().and_then(Cache::credentials); let cached_credentials = cache.as_ref().and_then(Cache::credentials);
let password = |username: &String| -> String { let password = |username: &String| -> Option<String> {
write!(stderr(), "Password for {}: ", username).unwrap(); write!(stderr(), "Password for {}: ", username).ok()?;
stderr().flush().unwrap(); stderr().flush().ok()?;
rpassword::read_password().unwrap() rpassword::read_password().ok()
}; };
get_credentials( get_credentials(
@ -351,8 +419,8 @@ fn setup(args: &[String]) -> Setup {
SessionConfig { SessionConfig {
user_agent: version::VERSION_STRING.to_string(), user_agent: version::VERSION_STRING.to_string(),
device_id: device_id, device_id,
proxy: matches.opt_str("proxy").or(std::env::var("http_proxy").ok()).map( proxy: matches.opt_str("proxy").or_else(|| std::env::var("http_proxy").ok()).map(
|s| { |s| {
match Url::parse(&s) { match Url::parse(&s) {
Ok(url) => { Ok(url) => {
@ -382,23 +450,56 @@ fn setup(args: &[String]) -> Setup {
.opt_str("b") .opt_str("b")
.as_ref() .as_ref()
.map(|bitrate| Bitrate::from_str(bitrate).expect("Invalid bitrate")) .map(|bitrate| Bitrate::from_str(bitrate).expect("Invalid bitrate"))
.unwrap_or(Bitrate::default()); .unwrap_or_default();
let gain_type = matches let gain_type = matches
.opt_str("normalisation-gain-type") .opt_str("normalisation-gain-type")
.as_ref() .as_ref()
.map(|gain_type| { .map(|gain_type| {
NormalisationType::from_str(gain_type).expect("Invalid normalisation type") NormalisationType::from_str(gain_type).expect("Invalid normalisation type")
}) })
.unwrap_or(NormalisationType::default()); .unwrap_or_default();
let normalisation_method = matches
.opt_str("normalisation-method")
.as_ref()
.map(|gain_type| {
NormalisationMethod::from_str(gain_type).expect("Invalid normalisation method")
})
.unwrap_or_default();
PlayerConfig { PlayerConfig {
bitrate: bitrate, bitrate,
gapless: !matches.opt_present("disable-gapless"), gapless: !matches.opt_present("disable-gapless"),
normalisation: matches.opt_present("enable-volume-normalisation"), normalisation: matches.opt_present("enable-volume-normalisation"),
normalisation_method,
normalisation_type: gain_type, normalisation_type: gain_type,
normalisation_pregain: matches normalisation_pregain: matches
.opt_str("normalisation-pregain") .opt_str("normalisation-pregain")
.map(|pregain| pregain.parse::<f32>().expect("Invalid pregain float value")) .map(|pregain| pregain.parse::<f32>().expect("Invalid pregain float value"))
.unwrap_or(PlayerConfig::default().normalisation_pregain), .unwrap_or(PlayerConfig::default().normalisation_pregain),
normalisation_threshold: NormalisationData::db_to_ratio(
matches
.opt_str("normalisation-threshold")
.map(|threshold| {
threshold
.parse::<f32>()
.expect("Invalid threshold float value")
})
.unwrap_or(PlayerConfig::default().normalisation_threshold),
),
normalisation_attack: matches
.opt_str("normalisation-attack")
.map(|attack| attack.parse::<f32>().expect("Invalid attack float value"))
.unwrap_or(PlayerConfig::default().normalisation_attack * MILLIS)
/ MILLIS,
normalisation_release: matches
.opt_str("normalisation-release")
.map(|release| release.parse::<f32>().expect("Invalid release float value"))
.unwrap_or(PlayerConfig::default().normalisation_release * MILLIS)
/ MILLIS,
normalisation_knee: matches
.opt_str("normalisation-knee")
.map(|knee| knee.parse::<f32>().expect("Invalid knee float value"))
.unwrap_or(PlayerConfig::default().normalisation_knee),
passthrough, passthrough,
} }
}; };
@ -408,19 +509,19 @@ fn setup(args: &[String]) -> Setup {
.opt_str("device-type") .opt_str("device-type")
.as_ref() .as_ref()
.map(|device_type| DeviceType::from_str(device_type).expect("Invalid device type")) .map(|device_type| DeviceType::from_str(device_type).expect("Invalid device type"))
.unwrap_or(DeviceType::default()); .unwrap_or_default();
let volume_ctrl = matches let volume_ctrl = matches
.opt_str("volume-ctrl") .opt_str("volume-ctrl")
.as_ref() .as_ref()
.map(|volume_ctrl| VolumeCtrl::from_str(volume_ctrl).expect("Invalid volume ctrl type")) .map(|volume_ctrl| VolumeCtrl::from_str(volume_ctrl).expect("Invalid volume ctrl type"))
.unwrap_or(VolumeCtrl::default()); .unwrap_or_default();
ConnectConfig { ConnectConfig {
name: name, name,
device_type: device_type, device_type,
volume: initial_volume, volume: initial_volume,
volume_ctrl: volume_ctrl, volume_ctrl,
autoplay: matches.opt_present("autoplay"), autoplay: matches.opt_present("autoplay"),
} }
}; };
@ -428,251 +529,202 @@ fn setup(args: &[String]) -> Setup {
let enable_discovery = !matches.opt_present("disable-discovery"); let enable_discovery = !matches.opt_present("disable-discovery");
Setup { Setup {
backend: backend, format,
cache: cache, backend,
session_config: session_config, cache,
player_config: player_config, session_config,
connect_config: connect_config, player_config,
credentials: credentials, connect_config,
device: device, credentials,
enable_discovery: enable_discovery, device,
zeroconf_port: zeroconf_port, enable_discovery,
mixer: mixer, zeroconf_port,
mixer_config: mixer_config, mixer,
mixer_config,
player_event_program: matches.opt_str("onevent"), player_event_program: matches.opt_str("onevent"),
emit_sink_events: matches.opt_present("emit-sink-events"), emit_sink_events: matches.opt_present("emit-sink-events"),
} }
} }
struct Main { #[tokio::main(flavor = "current_thread")]
cache: Option<Cache>, async fn main() {
player_config: PlayerConfig,
session_config: SessionConfig,
connect_config: ConnectConfig,
backend: fn(Option<String>) -> Box<dyn Sink>,
device: Option<String>,
mixer: fn(Option<MixerConfig>) -> Box<dyn Mixer>,
mixer_config: MixerConfig,
handle: Handle,
discovery: Option<DiscoveryStream>,
signal: IoStream<()>,
spirc: Option<Spirc>,
spirc_task: Option<SpircTask>,
connect: Box<dyn Future<Item = Session, Error = AuthenticationError>>,
shutdown: bool,
last_credentials: Option<Credentials>,
auto_connect_times: Vec<Instant>,
player_event_channel: Option<UnboundedReceiver<PlayerEvent>>,
player_event_program: Option<String>,
emit_sink_events: bool,
}
impl Main {
fn new(handle: Handle, setup: Setup) -> Main {
let mut task = Main {
handle: handle.clone(),
cache: setup.cache,
session_config: setup.session_config,
player_config: setup.player_config,
connect_config: setup.connect_config,
backend: setup.backend,
device: setup.device,
mixer: setup.mixer,
mixer_config: setup.mixer_config,
connect: Box::new(futures::future::empty()),
discovery: None,
spirc: None,
spirc_task: None,
shutdown: false,
last_credentials: None,
auto_connect_times: Vec::new(),
signal: Box::new(tokio_signal::ctrl_c().flatten_stream()),
player_event_channel: None,
player_event_program: setup.player_event_program,
emit_sink_events: setup.emit_sink_events,
};
if setup.enable_discovery {
let config = task.connect_config.clone();
let device_id = task.session_config.device_id.clone();
task.discovery =
Some(discovery(&handle, config, device_id, setup.zeroconf_port).unwrap());
}
if let Some(credentials) = setup.credentials {
task.credentials(credentials);
}
task
}
fn credentials(&mut self, credentials: Credentials) {
self.last_credentials = Some(credentials.clone());
let config = self.session_config.clone();
let handle = self.handle.clone();
let connection = Session::connect(config, credentials, self.cache.clone(), handle);
self.connect = connection;
self.spirc = None;
let task = mem::replace(&mut self.spirc_task, None);
if let Some(task) = task {
self.handle.spawn(task);
}
}
}
impl Future for Main {
type Item = ();
type Error = ();
fn poll(&mut self) -> Poll<(), ()> {
loop {
let mut progress = false;
if let Some(Async::Ready(Some(creds))) =
self.discovery.as_mut().map(|d| d.poll().unwrap())
{
if let Some(ref spirc) = self.spirc {
spirc.shutdown();
}
self.auto_connect_times.clear();
self.credentials(creds);
progress = true;
}
match self.connect.poll() {
Ok(Async::Ready(session)) => {
self.connect = Box::new(futures::future::empty());
let mixer_config = self.mixer_config.clone();
let mixer = (self.mixer)(Some(mixer_config));
let player_config = self.player_config.clone();
let connect_config = self.connect_config.clone();
let audio_filter = mixer.get_audio_filter();
let backend = self.backend;
let device = self.device.clone();
let (player, event_channel) =
Player::new(player_config, session.clone(), audio_filter, move || {
(backend)(device)
});
if self.emit_sink_events {
if let Some(player_event_program) = &self.player_event_program {
let player_event_program = player_event_program.clone();
player.set_sink_event_callback(Some(Box::new(move |sink_status| {
emit_sink_event(sink_status, &player_event_program)
})));
}
}
let (spirc, spirc_task) = Spirc::new(connect_config, session, player, mixer);
self.spirc = Some(spirc);
self.spirc_task = Some(spirc_task);
self.player_event_channel = Some(event_channel);
progress = true;
}
Ok(Async::NotReady) => (),
Err(error) => {
error!("Could not connect to server: {}", error);
self.connect = Box::new(futures::future::empty());
}
}
if let Async::Ready(Some(())) = self.signal.poll().unwrap() {
trace!("Ctrl-C received");
if !self.shutdown {
if let Some(ref spirc) = self.spirc {
spirc.shutdown();
} else {
return Ok(Async::Ready(()));
}
self.shutdown = true;
} else {
return Ok(Async::Ready(()));
}
progress = true;
}
let mut drop_spirc_and_try_to_reconnect = false;
if let Some(ref mut spirc_task) = self.spirc_task {
if let Async::Ready(()) = spirc_task.poll().unwrap() {
if self.shutdown {
return Ok(Async::Ready(()));
} else {
warn!("Spirc shut down unexpectedly");
drop_spirc_and_try_to_reconnect = true;
}
progress = true;
}
}
if drop_spirc_and_try_to_reconnect {
self.spirc_task = None;
while (!self.auto_connect_times.is_empty())
&& ((Instant::now() - self.auto_connect_times[0]).as_secs() > 600)
{
let _ = self.auto_connect_times.remove(0);
}
if let Some(credentials) = self.last_credentials.clone() {
if self.auto_connect_times.len() >= 5 {
warn!("Spirc shut down too often. Not reconnecting automatically.");
} else {
self.auto_connect_times.push(Instant::now());
self.credentials(credentials);
}
}
}
if let Some(ref mut player_event_channel) = self.player_event_channel {
if let Async::Ready(Some(event)) = player_event_channel.poll().unwrap() {
progress = true;
if let Some(ref program) = self.player_event_program {
if let Some(child) = run_program_on_events(event, program) {
if child.is_ok() {
let child = child
.unwrap()
.map(|status| {
if !status.success() {
error!("child exited with status {:?}", status.code());
}
})
.map_err(|e| error!("failed to wait on child process: {}", e));
self.handle.spawn(child);
} else {
error!("{:?} failed to start", program);
}
}
}
}
}
if !progress {
return Ok(Async::NotReady);
}
}
}
}
fn main() {
if env::var("RUST_BACKTRACE").is_err() { if env::var("RUST_BACKTRACE").is_err() {
env::set_var("RUST_BACKTRACE", "full") env::set_var("RUST_BACKTRACE", "full")
} }
let mut core = Core::new().unwrap();
let handle = core.handle();
let args: Vec<String> = std::env::args().collect(); let args: Vec<String> = std::env::args().collect();
let setup = get_setup(&args);
core.run(Main::new(handle, setup(&args))).unwrap() let mut last_credentials = None;
let mut spirc: Option<Spirc> = None;
let mut spirc_task: Option<Pin<_>> = None;
let mut player_event_channel: Option<UnboundedReceiver<PlayerEvent>> = None;
let mut auto_connect_times: Vec<Instant> = vec![];
let mut discovery = None;
let mut connecting: Pin<Box<dyn future::FusedFuture<Output = _>>> = Box::pin(future::pending());
if setup.enable_discovery {
let config = setup.connect_config.clone();
let device_id = setup.session_config.device_id.clone();
discovery = Some(
librespot_connect::discovery::discovery(config, device_id, setup.zeroconf_port)
.unwrap(),
);
}
if let Some(credentials) = setup.credentials {
last_credentials = Some(credentials.clone());
connecting = Box::pin(
Session::connect(
setup.session_config.clone(),
credentials,
setup.cache.clone(),
)
.fuse(),
);
}
loop {
tokio::select! {
credentials = async { discovery.as_mut().unwrap().next().await }, if discovery.is_some() => {
match credentials {
Some(credentials) => {
last_credentials = Some(credentials.clone());
auto_connect_times.clear();
if let Some(spirc) = spirc.take() {
spirc.shutdown();
}
if let Some(spirc_task) = spirc_task.take() {
// Continue shutdown in its own task
tokio::spawn(spirc_task);
}
connecting = Box::pin(Session::connect(
setup.session_config.clone(),
credentials,
setup.cache.clone(),
).fuse());
},
None => {
warn!("Discovery stopped!");
discovery = None;
}
}
},
session = &mut connecting, if !connecting.is_terminated() => match session {
Ok(session) => {
let mixer_config = setup.mixer_config.clone();
let mixer = (setup.mixer)(Some(mixer_config));
let player_config = setup.player_config.clone();
let connect_config = setup.connect_config.clone();
let audio_filter = mixer.get_audio_filter();
let format = setup.format;
let backend = setup.backend;
let device = setup.device.clone();
let (player, event_channel) =
Player::new(player_config, session.clone(), audio_filter, move || {
(backend)(device, format)
});
if setup.emit_sink_events {
if let Some(player_event_program) = setup.player_event_program.clone() {
player.set_sink_event_callback(Some(Box::new(move |sink_status| {
match emit_sink_event(sink_status, &player_event_program) {
Ok(e) if e.success() => (),
Ok(e) => {
if let Some(code) = e.code() {
warn!("Sink event prog returned exit code {}", code);
} else {
warn!("Sink event prog returned failure");
}
}
Err(e) => {
warn!("Emitting sink event failed: {}", e);
}
}
})));
}
};
let (spirc_, spirc_task_) = Spirc::new(connect_config, session, player, mixer);
spirc = Some(spirc_);
spirc_task = Some(Box::pin(spirc_task_));
player_event_channel = Some(event_channel);
},
Err(e) => {
warn!("Connection failed: {}", e);
}
},
_ = async { spirc_task.as_mut().unwrap().await }, if spirc_task.is_some() => {
spirc_task = None;
warn!("Spirc shut down unexpectedly");
while !auto_connect_times.is_empty()
&& ((Instant::now() - auto_connect_times[0]).as_secs() > 600)
{
let _ = auto_connect_times.remove(0);
}
if let Some(credentials) = last_credentials.clone() {
if auto_connect_times.len() >= 5 {
warn!("Spirc shut down too often. Not reconnecting automatically.");
} else {
auto_connect_times.push(Instant::now());
connecting = Box::pin(Session::connect(
setup.session_config.clone(),
credentials,
setup.cache.clone(),
).fuse());
}
}
},
event = async { player_event_channel.as_mut().unwrap().recv().await }, if player_event_channel.is_some() => match event {
Some(event) => {
if let Some(program) = &setup.player_event_program {
if let Some(child) = run_program_on_events(event, program) {
if child.is_ok() {
let mut child = child.unwrap();
tokio::spawn(async move {
match child.wait().await {
Ok(status) if !status.success() => error!("child exited with status {:?}", status.code()),
Err(e) => error!("failed to wait on child process: {}", e),
_ => {}
}
});
} else {
error!("program failed to start");
}
}
}
},
None => {
player_event_channel = None;
}
},
_ = tokio::signal::ctrl_c() => {
break;
}
}
}
info!("Gracefully shutting down");
// Shutdown spirc if necessary
if let Some(spirc) = spirc {
spirc.shutdown();
if let Some(mut spirc_task) = spirc_task {
tokio::select! {
_ = tokio::signal::ctrl_c() => (),
_ = spirc_task.as_mut() => ()
}
}
}
} }

View file

@ -1,23 +1,13 @@
use librespot::playback::player::PlayerEvent; use librespot::playback::player::PlayerEvent;
use librespot::playback::player::SinkStatus;
use log::info; use log::info;
use tokio::process::{Child as AsyncChild, Command as AsyncCommand};
use std::collections::HashMap; use std::collections::HashMap;
use std::io; use std::io;
use std::process::Command; use std::process::{Command, ExitStatus};
use tokio_process::{Child, CommandExt};
use futures::Future; pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<io::Result<AsyncChild>> {
use librespot::playback::player::SinkStatus;
fn run_program(program: &str, env_vars: HashMap<&str, String>) -> io::Result<Child> {
let mut v: Vec<&str> = program.split_whitespace().collect();
info!("Running {:?} with environment variables {:?}", v, env_vars);
Command::new(&v.remove(0))
.args(&v)
.envs(env_vars.iter())
.spawn_async()
}
pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<io::Result<Child>> {
let mut env_vars = HashMap::new(); let mut env_vars = HashMap::new();
match event { match event {
PlayerEvent::Changed { PlayerEvent::Changed {
@ -68,10 +58,18 @@ pub fn run_program_on_events(event: PlayerEvent, onevent: &str) -> Option<io::Re
} }
_ => return None, _ => return None,
} }
Some(run_program(onevent, env_vars))
let mut v: Vec<&str> = onevent.split_whitespace().collect();
info!("Running {:?} with environment variables {:?}", v, env_vars);
Some(
AsyncCommand::new(&v.remove(0))
.args(&v)
.envs(env_vars.iter())
.spawn(),
)
} }
pub fn emit_sink_event(sink_status: SinkStatus, onevent: &str) { pub fn emit_sink_event(sink_status: SinkStatus, onevent: &str) -> io::Result<ExitStatus> {
let mut env_vars = HashMap::new(); let mut env_vars = HashMap::new();
env_vars.insert("PLAYER_EVENT", "sink".to_string()); env_vars.insert("PLAYER_EVENT", "sink".to_string());
let sink_status = match sink_status { let sink_status = match sink_status {
@ -80,6 +78,12 @@ pub fn emit_sink_event(sink_status: SinkStatus, onevent: &str) {
SinkStatus::Closed => "closed", SinkStatus::Closed => "closed",
}; };
env_vars.insert("SINK_STATUS", sink_status.to_string()); env_vars.insert("SINK_STATUS", sink_status.to_string());
let mut v: Vec<&str> = onevent.split_whitespace().collect();
info!("Running {:?} with environment variables {:?}", v, env_vars);
let _ = run_program(onevent, env_vars).and_then(|child| child.wait()); Command::new(&v.remove(0))
.args(&v)
.envs(env_vars.iter())
.spawn()?
.wait()
} }