Compare commits

...

2 Commits

Author SHA1 Message Date
Oystein Kristoffer Tveit 66d0e451ee
WIP: add more tests
Build and test / check (pull_request) Failing after 1m51s Details
Build and test / build (pull_request) Successful in 1m57s Details
Build and test / test (pull_request) Successful in 4m6s Details
Build and test / docs (pull_request) Successful in 2m32s Details
2024-05-01 22:39:15 +02:00
Oystein Kristoffer Tveit 6ab4bb9d54
add script to create coverage report manually 2024-05-01 22:39:15 +02:00
6 changed files with 238 additions and 53 deletions

View File

@ -22,7 +22,8 @@ tokio-stream = { version = "0.1.15", features = ["sync"] }
[dev-dependencies]
env_logger = "0.10.0"
test-log = "0.2.15"
tokio = { version = "1.37.0", features = ["rt-multi-thread", "time"] }
tokio = { version = "1.37.0", features = ["rt-multi-thread", "time", "process"] }
uuid = { version = "1.8.0", features = ["v4"] }
[lib]
doctest = false

View File

@ -26,9 +26,10 @@
devShell = forAllSystems (system: pkgs: toolchain: pkgs.mkShell {
packages = [
(toolchain.withComponents [
"cargo" "rustc" "rustfmt" "clippy"
"cargo" "rustc" "rustfmt" "clippy" "llvm-tools"
])
pkgs.mpv
pkgs.grcov
];
RUST_SRC_PATH = "${toolchain.rust-src}/lib/rustlib/src/rust/";
});

22
scripts/coverage.sh Executable file
View File

@ -0,0 +1,22 @@
#!/usr/bin/env bash
mkdir -p target/coverage
echo "Running tests"
RUSTFLAGS="-Cinstrument-coverage" LLVM_PROFILE_FILE="target/coverage/%p-%m.profraw" cargo test --all-features --release --no-fail-fast --lib --bins
echo "Generating coverage report"
grcov \
--source-dir . \
--binary-path ./target/release/deps/ \
--excl-start 'mod test* \{' \
--ignore 'tests/*' \
--ignore "*test.rs" \
--ignore "*tests.rs" \
--ignore "*github.com*" \
--ignore "*libcore*" \
--ignore "*rustc*" \
--ignore "*liballoc*" \
--ignore "*cargo*" \
-t html \
-o ./target/coverage/html \
target/coverage/

View File

@ -68,7 +68,7 @@ pub(crate) trait IntoRawCommandPart {
}
/// Generic data type representing all possible data types that mpv can return.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum MpvDataType {
Array(Vec<MpvDataType>),
Bool(bool),

View File

@ -136,61 +136,27 @@ pub(crate) fn json_map_to_hashmap(
}
pub(crate) fn json_array_to_vec(array: &[Value]) -> Vec<MpvDataType> {
let mut output: Vec<MpvDataType> = Vec::new();
if !array.is_empty() {
match array[0] {
Value::Array(_) => {
for entry in array {
if let Value::Array(ref a) = *entry {
output.push(MpvDataType::Array(json_array_to_vec(a)));
}
array
.iter()
.map(|entry| match entry {
Value::Array(a) => MpvDataType::Array(json_array_to_vec(a)),
Value::Bool(b) => MpvDataType::Bool(*b),
Value::Number(n) => {
if n.is_u64() {
MpvDataType::Usize(n.as_u64().unwrap() as usize)
} else if n.is_f64() {
MpvDataType::Double(n.as_f64().unwrap())
} else {
panic!("unimplemented number");
}
}
Value::Bool(_) => {
for entry in array {
if let Value::Bool(ref b) = *entry {
output.push(MpvDataType::Bool(*b));
}
}
}
Value::Number(_) => {
for entry in array {
if let Value::Number(ref n) = *entry {
if n.is_u64() {
output.push(MpvDataType::Usize(n.as_u64().unwrap() as usize));
} else if n.is_f64() {
output.push(MpvDataType::Double(n.as_f64().unwrap()));
} else {
panic!("unimplemented number");
}
}
}
}
Value::Object(_) => {
for entry in array {
if let Value::Object(ref map) = *entry {
output.push(MpvDataType::HashMap(json_map_to_hashmap(map)));
}
}
}
Value::String(_) => {
for entry in array {
if let Value::String(ref s) = *entry {
output.push(MpvDataType::String(s.to_string()));
}
}
}
Value::Object(o) => MpvDataType::HashMap(json_map_to_hashmap(&o)),
Value::String(s) => MpvDataType::String(s.to_owned()),
Value::Null => {
unimplemented!();
}
}
}
output
})
.collect()
}
pub(crate) fn json_array_to_playlist(array: &[Value]) -> Vec<PlaylistEntry> {
@ -217,3 +183,137 @@ pub(crate) fn json_array_to_playlist(array: &[Value]) -> Vec<PlaylistEntry> {
}
output
}
#[cfg(test)]
mod test {
use super::*;
use crate::MpvDataType;
use serde_json::json;
use std::collections::HashMap;
#[test]
fn test_json_map_to_hashmap() {
let json = json!({
"array": [1, 2, 3],
"bool": true,
"double": 1.0,
"usize": 1,
"string": "string",
"object": {
"key": "value"
}
});
let mut expected = HashMap::new();
expected.insert(
"array".to_string(),
MpvDataType::Array(vec![
MpvDataType::Usize(1),
MpvDataType::Usize(2),
MpvDataType::Usize(3),
]),
);
expected.insert("bool".to_string(), MpvDataType::Bool(true));
expected.insert("double".to_string(), MpvDataType::Double(1.0));
expected.insert("usize".to_string(), MpvDataType::Usize(1));
expected.insert(
"string".to_string(),
MpvDataType::String("string".to_string()),
);
expected.insert(
"object".to_string(),
MpvDataType::HashMap(HashMap::from([(
"key".to_string(),
MpvDataType::String("value".to_string()),
)])),
);
assert_eq!(json_map_to_hashmap(json.as_object().unwrap()), expected);
}
#[test]
#[should_panic]
fn test_json_map_to_hashmap_fail_on_null() {
json_map_to_hashmap(
json!({
"null": null
})
.as_object()
.unwrap(),
);
}
#[test]
fn test_json_array_to_vec() {
let json = json!([
[1, 2, 3],
true,
1.0,
1,
"string",
{
"key": "value"
}
]);
println!("{:?}", json.as_array().unwrap());
println!("{:?}", json_array_to_vec(json.as_array().unwrap()));
let expected = vec![
MpvDataType::Array(vec![
MpvDataType::Usize(1),
MpvDataType::Usize(2),
MpvDataType::Usize(3),
]),
MpvDataType::Bool(true),
MpvDataType::Double(1.0),
MpvDataType::Usize(1),
MpvDataType::String("string".to_string()),
MpvDataType::HashMap(HashMap::from([(
"key".to_string(),
MpvDataType::String("value".to_string()),
)])),
];
assert_eq!(json_array_to_vec(json.as_array().unwrap()), expected);
}
#[test]
#[should_panic]
fn test_json_array_to_vec_fail_on_null() {
json_array_to_vec(json!([null]).as_array().unwrap().as_slice());
}
#[test]
fn test_json_array_to_playlist() {
let json = json!([
{
"filename": "file1",
"title": "title1",
"current": true
},
{
"filename": "file2",
"title": "title2",
"current": false
}
]);
let expected = vec![
PlaylistEntry {
id: 0,
filename: "file1".to_string(),
title: "title1".to_string(),
current: true,
},
PlaylistEntry {
id: 1,
filename: "file2".to_string(),
title: "title2".to_string(),
current: false,
},
];
assert_eq!(json_array_to_playlist(json.as_array().unwrap()), expected);
}
}

61
tests/integration.rs Normal file
View File

@ -0,0 +1,61 @@
use mpvipc::{Error, Mpv, MpvExt};
use std::path::Path;
use tokio::{
process::{Child, Command},
time::{sleep, timeout, Duration},
};
#[cfg(target_family = "unix")]
async fn spawn_headless_mpv() -> Result<(Child, Mpv), Error> {
let socket_path_str = format!("/tmp/mpv-ipc-{}", uuid::Uuid::new_v4());
let socket_path = Path::new(&socket_path_str);
let process_handle = Command::new("mpv")
.arg("--no-config")
.arg("--idle")
.arg("--no-video")
.arg("--no-audio")
.arg(format!(
"--input-ipc-server={}",
&socket_path.to_str().unwrap()
))
.spawn()
.expect("Failed to start mpv");
if timeout(Duration::from_millis(500), async {
while !&socket_path.exists() {
sleep(Duration::from_millis(10)).await;
}
})
.await
.is_err()
{
panic!("Failed to create mpv socket at {:?}", &socket_path);
}
let mpv = Mpv::connect(socket_path.to_str().unwrap()).await.unwrap();
Ok((process_handle, mpv))
}
#[tokio::test]
#[cfg(target_family = "unix")]
async fn test_get_mpv_version() {
let (mut proc, mpv) = spawn_headless_mpv().await.unwrap();
let version: String = mpv.get_property("mpv-version").await.unwrap();
assert!(version.starts_with("mpv"));
mpv.kill().await.unwrap();
proc.kill().await.unwrap();
}
#[tokio::test]
#[cfg(target_family = "unix")]
async fn test_set_property() {
let (mut proc, mpv) = spawn_headless_mpv().await.unwrap();
mpv.set_property("pause", true).await.unwrap();
let paused: bool = mpv.get_property("pause").await.unwrap();
assert!(paused);
mpv.kill().await.unwrap();
proc.kill().await.unwrap();
}