Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 21 additions & 27 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

153 changes: 141 additions & 12 deletions client/src/migration.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use crate::{AsAttributes, Result, dbus::Service, file::UnlockedKeyring};
use std::path::Path;

use crate::{AsAttributes, Result, Secret, dbus::Service, file::UnlockedKeyring};

/// Helper to migrate your secrets from the host Secret Service
/// to the sandboxed file backend.
Expand All @@ -7,17 +9,29 @@ use crate::{AsAttributes, Result, dbus::Service, file::UnlockedKeyring};
/// Secret Service.
pub async fn migrate(attributes: Vec<impl AsAttributes>, replace: bool) -> Result<()> {
let service = Service::new().await?;
let secret = crate::Secret::sandboxed().await?;
let file_backend =
match UnlockedKeyring::load(crate::file::api::Keyring::default_path()?, secret).await {
Ok(file) => Ok(file),
Err(super::file::Error::Portal(ashpd::Error::PortalNotFound(_))) => {
#[cfg(feature = "tracing")]
tracing::debug!("Portal not available, no migration to do");
return Ok(());
}
Err(err) => Err(err),
}?;
let secret = match Secret::sandboxed().await {
Ok(secret) => Ok(secret),
Err(super::file::Error::Portal(ashpd::Error::PortalNotFound(_))) => {
#[cfg(feature = "tracing")]
tracing::debug!("Portal not available, no migration to do");
return Ok(());
}
Err(err) => Err(err),
}?;
let keyring_path = crate::file::api::Keyring::default_path()?;

migrate_inner(&service, secret, &keyring_path, attributes, replace).await
}

/// Inner migration function for testing.
async fn migrate_inner(
service: &Service,
secret: Secret,
keyring_path: &Path,
attributes: Vec<impl AsAttributes>,
replace: bool,
) -> Result<()> {
let file_backend = UnlockedKeyring::load(keyring_path, secret).await?;

let collection = service.default_collection().await?;
let mut all_items = Vec::default();
Expand Down Expand Up @@ -58,3 +72,118 @@ pub async fn migrate(attributes: Vec<impl AsAttributes>, replace: bool) -> Resul

Ok(())
}

#[cfg(test)]
mod tests {
use super::*;
use crate::{Secret, dbus::Service, file::UnlockedKeyring};

#[tokio::test]
#[cfg(feature = "tokio")]
async fn test_migrate_from_dbus_to_file() {
let temp_dir = tempfile::tempdir().unwrap();
let setup = oo7_server::tests::TestServiceSetup::plain_session(true)
.await
.unwrap();

// Create a DBus service with test connection
let service = Service::new_with_connection(&setup.client_conn)
.await
.unwrap();

// Create some items on the DBus backend
let collection = service.default_collection().await.unwrap();

collection
.create_item(
"Migration Test 1",
&[("app", "test-migration"), ("user", "alice")],
"secret1",
false,
None,
)
.await
.unwrap();

collection
.create_item(
"Migration Test 2",
&[("app", "test-migration"), ("user", "bob")],
"secret2",
false,
None,
)
.await
.unwrap();

// Verify items exist in DBus backend
let items_before = collection
.search_items(&[("app", "test-migration")])
.await
.unwrap();
assert_eq!(items_before.len(), 2);

// Create file backend keyring
let keyring_path = temp_dir.path().join("migrated.keyring");
let secret = Secret::from([1, 2].into_iter().cycle().take(64).collect::<Vec<_>>());

// Perform migration using internal function
migrate_inner(
&service,
secret.clone(),
&keyring_path,
vec![&[("app", "test-migration")]],
false,
)
.await
.unwrap();

// Verify items are deleted from DBus backend
let items_after = collection
.search_items(&[("app", "test-migration")])
.await
.unwrap();
assert_eq!(items_after.len(), 0);

// Verify items exist in file backend
let file_backend = UnlockedKeyring::load(&keyring_path, secret).await.unwrap();
let migrated_items = file_backend
.search_items(&[("app", "test-migration")])
.await
.unwrap();

assert_eq!(migrated_items.len(), 2);

// Verify item details
let alice_item = migrated_items
.iter()
.find(|item| {
item.attributes()
.get("user")
.map(|u| u == "alice")
.unwrap_or(false)
})
.expect("Alice's item should exist");

assert_eq!(alice_item.label(), "Migration Test 1");
assert_eq!(alice_item.secret(), Secret::text("secret1"));
assert_eq!(
alice_item.attributes().get("app").unwrap(),
"test-migration"
);

let bob_item = migrated_items
.iter()
.find(|item| {
item.attributes()
.get("user")
.map(|u| u == "bob")
.unwrap_or(false)
})
.expect("Bob's item should exist");

assert_eq!(bob_item.label(), "Migration Test 2");
assert_eq!(bob_item.secret(), Secret::text("secret2"));
assert_eq!(bob_item.attributes().get("app").unwrap(), "test-migration");
}
}
8 changes: 6 additions & 2 deletions coverage.sh
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@ grcov coverage-raw/combined.info \
--ignore "**/examples/*" \
--ignore "**/kwallet/*" \
--ignore "**/target/*" \
--ignore "**/error.rs"
--ignore "**/error.rs" \
--ignore "**/main.rs" \
--ignore "**/capability.rs"

# Generate HTML report with grcov
grcov coverage-raw/combined.info \
Expand All @@ -78,7 +80,9 @@ grcov coverage-raw/combined.info \
--ignore "**/examples/*" \
--ignore "**/kwallet/*" \
--ignore "**/target/*" \
--ignore "**/error.rs"
--ignore "**/error.rs" \
--ignore "**/main.rs" \
--ignore "**/capability.rs"

# Extract and display coverage percentage
if [ -f coverage/html/coverage.json ]; then
Expand Down
2 changes: 1 addition & 1 deletion kwallet/parser/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ exclude.workspace = true
blowfish = "0.9"
cbc = "0.1"
ecb = "0.1"
md5 = "0.7"
md-5 = "0.10"
sha1 = "0.10"
sha2 = "0.10"
pbkdf2 = { version = "0.12", default-features = false, features = ["simple"] }
Expand Down
3 changes: 2 additions & 1 deletion kwallet/parser/src/crypto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,8 @@ fn validate_sha1(data: &[u8], expected_hash: &[u8]) -> Result<()> {

/// Compute MD5 hash
pub fn compute_md5(data: &[u8]) -> [u8; 16] {
md5::compute(data).into()
use md5::Digest;
md5::Md5::digest(data).into()
}

/// Extract wallet data from decrypted payload and validate SHA-1 hash
Expand Down
Loading