🔧 fix: Resolve clippy warnings and formatting issues

- Remove unnecessary borrowing where values can be used directly
- Use variables directly in format strings for better readability
- Apply consistent code formatting per rustfmt standards
- All clippy warnings resolved with -D warnings flag
- Code now follows Rust idioms and best practices
This commit is contained in:
Jeremiah Russell
2025-10-21 07:41:31 +01:00
committed by Jeremiah Russell
parent 2bee42d7ba
commit 171f441f1d
2 changed files with 145 additions and 118 deletions

View File

@@ -511,15 +511,18 @@ async fn run_rules(client: &mut GmailClient, rules: Rules, execute: bool) -> Res
/// - CI/CD pipelines with stored token secrets
/// - Ephemeral compute environments requiring periodic Gmail access
fn restore_tokens_if_available(config: &Config, client_config: &ClientConfig) -> Result<()> {
let token_env_var = config.get_string("token_cache_env")
let token_env_var = config
.get_string("token_cache_env")
.unwrap_or_else(|_| "CULL_GMAIL_TOKEN_CACHE".to_string());
if let Ok(token_data) = env::var(&token_env_var) {
log::info!("Found {} environment variable, restoring tokens", token_env_var);
log::info!("Found {token_env_var} environment variable, restoring tokens");
restore_tokens_from_string(&token_data, client_config.persist_path())?;
log::info!("Tokens successfully restored from environment variable");
} else {
log::debug!("No {} environment variable found, proceeding with normal token flow", token_env_var);
log::debug!(
"No {token_env_var} environment variable found, proceeding with normal token flow"
);
}
Ok(())

View File

@@ -57,12 +57,12 @@
//! - Token metadata and expiration
//! - Encoded as base64 for environment variable compatibility
use crate::{ClientConfig, Result};
use base64::{Engine as _, engine::general_purpose::STANDARD as Base64Engine};
use clap::Subcommand;
use cull_gmail::Error;
use std::fs;
use std::path::Path;
use clap::Subcommand;
use base64::{Engine as _, engine::general_purpose::STANDARD as Base64Engine};
use crate::{Result, ClientConfig};
use cull_gmail::Error;
/// Token management operations for ephemeral environments.
///
@@ -213,12 +213,13 @@ async fn export_tokens(config: &ClientConfig) -> Result<()> {
if token_path.is_file() {
// OAuth2 token is stored as a single file
let filename = token_path.file_name()
let filename = token_path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| Error::FileIo("Invalid token filename".to_string()))?;
let content = fs::read_to_string(&token_path)
.map_err(|e| Error::FileIo(format!("Failed to read token file: {}", e)))?;
let content = fs::read_to_string(token_path)
.map_err(|e| Error::FileIo(format!("Failed to read token file: {e}")))?;
token_data.insert(filename.to_string(), content);
} else if token_path.is_dir() {
@@ -228,12 +229,14 @@ async fn export_tokens(config: &ClientConfig) -> Result<()> {
let path = entry.path();
if path.is_file() {
let filename = path.file_name()
let filename = path
.file_name()
.and_then(|n| n.to_str())
.ok_or_else(|| Error::FileIo("Invalid filename in token cache".to_string()))?;
let content = fs::read_to_string(&path)
.map_err(|e| Error::FileIo(format!("Failed to read token file {}: {}", filename, e)))?;
let content = fs::read_to_string(&path).map_err(|e| {
Error::FileIo(format!("Failed to read token file {filename}: {e}"))
})?;
token_data.insert(filename.to_string(), content);
}
@@ -246,29 +249,33 @@ async fn export_tokens(config: &ClientConfig) -> Result<()> {
}
if token_data.is_empty() {
return Err(Error::TokenNotFound("No token data found in cache".to_string()));
return Err(Error::TokenNotFound(
"No token data found in cache".to_string(),
));
}
// Serialize to JSON
let json_data = serde_json::to_string(&token_data)
.map_err(|e| Error::SerializationError(format!("Failed to serialize token data: {}", e)))?;
.map_err(|e| Error::SerializationError(format!("Failed to serialize token data: {e}")))?;
// Compress using flate2
use flate2::write::GzEncoder;
use flate2::Compression;
use flate2::write::GzEncoder;
use std::io::Write;
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
encoder.write_all(json_data.as_bytes())
.map_err(|e| Error::SerializationError(format!("Failed to compress token data: {}", e)))?;
let compressed_data = encoder.finish()
.map_err(|e| Error::SerializationError(format!("Failed to finalize compression: {}", e)))?;
encoder
.write_all(json_data.as_bytes())
.map_err(|e| Error::SerializationError(format!("Failed to compress token data: {e}")))?;
let compressed_data = encoder
.finish()
.map_err(|e| Error::SerializationError(format!("Failed to finalize compression: {e}")))?;
// Encode to base64
let encoded = Base64Engine.encode(&compressed_data);
// Output to stdout
println!("{}", encoded);
println!("{encoded}");
Ok(())
}
@@ -302,10 +309,9 @@ async fn export_tokens(config: &ClientConfig) -> Result<()> {
/// - Decoding/decompression errors for malformed token data
/// - I/O errors creating token files
pub async fn import_tokens(config: &ClientConfig) -> Result<()> {
let token_env = std::env::var("CULL_GMAIL_TOKEN_CACHE")
.map_err(|_| Error::TokenNotFound(
"CULL_GMAIL_TOKEN_CACHE environment variable not set".to_string()
))?;
let token_env = std::env::var("CULL_GMAIL_TOKEN_CACHE").map_err(|_| {
Error::TokenNotFound("CULL_GMAIL_TOKEN_CACHE environment variable not set".to_string())
})?;
restore_tokens_from_string(&token_env, config.persist_path())?;
@@ -332,8 +338,9 @@ pub async fn import_tokens(config: &ClientConfig) -> Result<()> {
/// Created token files are set to 600 (owner read/write only) for security.
pub fn restore_tokens_from_string(token_string: &str, persist_path: &str) -> Result<()> {
// Decode from base64
let compressed_data = Base64Engine.decode(token_string.trim())
.map_err(|e| Error::SerializationError(format!("Failed to decode base64 token data: {}", e)))?;
let compressed_data = Base64Engine.decode(token_string.trim()).map_err(|e| {
Error::SerializationError(format!("Failed to decode base64 token data: {e}"))
})?;
// Decompress
use flate2::read::GzDecoder;
@@ -341,76 +348,90 @@ pub fn restore_tokens_from_string(token_string: &str, persist_path: &str) -> Res
let mut decoder = GzDecoder::new(compressed_data.as_slice());
let mut json_data = String::new();
decoder.read_to_string(&mut json_data)
.map_err(|e| Error::SerializationError(format!("Failed to decompress token data: {}", e)))?;
decoder
.read_to_string(&mut json_data)
.map_err(|e| Error::SerializationError(format!("Failed to decompress token data: {e}")))?;
// Parse JSON
let token_files: std::collections::HashMap<String, String> = serde_json::from_str(&json_data)
.map_err(|e| Error::SerializationError(format!("Failed to parse token JSON: {}", e)))?;
let token_files: std::collections::HashMap<String, String> =
serde_json::from_str(&json_data)
.map_err(|e| Error::SerializationError(format!("Failed to parse token JSON: {e}")))?;
let token_path = Path::new(persist_path);
// Count files for logging
let file_count = token_files.len();
if file_count == 1 && token_files.keys().next().map(|k| k.as_str()) == token_path.file_name().and_then(|n| n.to_str()) {
if file_count == 1
&& token_files.keys().next().map(|k| k.as_str())
== token_path.file_name().and_then(|n| n.to_str())
{
// Single file case - write directly to the persist path
let content = token_files.into_values().next().unwrap();
// Create parent directory if needed
if let Some(parent) = token_path.parent() {
fs::create_dir_all(parent)
.map_err(|e| Error::FileIo(format!("Failed to create token directory {}: {}", parent.display(), e)))?;
fs::create_dir_all(parent).map_err(|e| {
Error::FileIo(format!(
"Failed to create token directory {}: {}",
parent.display(),
e
))
})?;
}
fs::write(&token_path, &content)
.map_err(|e| Error::FileIo(format!("Failed to write token file: {}", e)))?;
fs::write(token_path, &content)
.map_err(|e| Error::FileIo(format!("Failed to write token file: {e}")))?;
// Set secure permissions (600 - owner read/write only)
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&token_path)
.map_err(|e| Error::FileIo(format!("Failed to get file metadata: {}", e)))?
let mut perms = fs::metadata(token_path)
.map_err(|e| Error::FileIo(format!("Failed to get file metadata: {e}")))?
.permissions();
perms.set_mode(0o600);
fs::set_permissions(&token_path, perms)
.map_err(|e| Error::FileIo(format!("Failed to set file permissions: {}", e)))?;
fs::set_permissions(token_path, perms)
.map_err(|e| Error::FileIo(format!("Failed to set file permissions: {e}")))?;
}
} else {
// Multiple files case - create directory structure
fs::create_dir_all(token_path)
.map_err(|e| Error::FileIo(format!("Failed to create token directory {}: {}", persist_path, e)))?;
fs::create_dir_all(token_path).map_err(|e| {
Error::FileIo(format!(
"Failed to create token directory {persist_path}: {e}"
))
})?;
// Write token files
for (filename, content) in token_files {
let file_path = token_path.join(&filename);
fs::write(&file_path, &content)
.map_err(|e| Error::FileIo(format!("Failed to write token file {}: {}", filename, e)))?;
fs::write(&file_path, &content).map_err(|e| {
Error::FileIo(format!("Failed to write token file {filename}: {e}"))
})?;
// Set secure permissions (600 - owner read/write only)
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut perms = fs::metadata(&file_path)
.map_err(|e| Error::FileIo(format!("Failed to get file metadata: {}", e)))?
.map_err(|e| Error::FileIo(format!("Failed to get file metadata: {e}")))?
.permissions();
perms.set_mode(0o600);
fs::set_permissions(&file_path, perms)
.map_err(|e| Error::FileIo(format!("Failed to set file permissions: {}", e)))?;
.map_err(|e| Error::FileIo(format!("Failed to set file permissions: {e}")))?;
}
}
}
log::info!("Restored {} token files to {}", file_count, persist_path);
log::info!("Restored {file_count} token files to {persist_path}");
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::TempDir;
use std::collections::HashMap;
use tempfile::TempDir;
#[test]
fn test_token_export_import_cycle() {
@@ -421,14 +442,17 @@ mod tests {
// Create mock token files
let mut test_files = HashMap::new();
test_files.insert("tokencache.json".to_string(),
r#"{"access_token":"test_access","refresh_token":"test_refresh"}"#.to_string());
test_files.insert("metadata.json".to_string(),
r#"{"created":"2023-01-01","expires":"2023-12-31"}"#.to_string());
test_files.insert(
"tokencache.json".to_string(),
r#"{"access_token":"test_access","refresh_token":"test_refresh"}"#.to_string(),
);
test_files.insert(
"metadata.json".to_string(),
r#"{"created":"2023-01-01","expires":"2023-12-31"}"#.to_string(),
);
for (filename, content) in &test_files {
fs::write(token_dir.join(filename), content)
.expect("Failed to write test token file");
fs::write(token_dir.join(filename), content).expect("Failed to write test token file");
}
// Test export
@@ -458,8 +482,8 @@ mod tests {
let json_str = serde_json::to_string(&token_data).unwrap();
// Compress
use flate2::write::GzEncoder;
use flate2::Compression;
use flate2::write::GzEncoder;
use std::io::Write;
let mut encoder = GzEncoder::new(Vec::new(), Compression::default());
@@ -471,7 +495,7 @@ mod tests {
// Test restore
let result = restore_tokens_from_string(&encoded, &persist_path);
assert!(result.is_ok(), "Restore should succeed: {:?}", result);
assert!(result.is_ok(), "Restore should succeed: {result:?}");
// Verify file was created
let restored_path = Path::new(&persist_path).join("test.json");