diff --git a/src/api/config/mod.rs b/src/api/config/mod.rs index adc3260..68b21ef 100644 --- a/src/api/config/mod.rs +++ b/src/api/config/mod.rs @@ -108,7 +108,7 @@ fn create_llm_model(agent: HashMap) -> Result, .find(|el| el.get_name().to_ascii_lowercase() == model_name.to_ascii_lowercase()) .ok_or(NoLLMError)?; - Ok(llm_provider.create(model_config.clone())?) + llm_provider.create(model_config.clone()) } fn create_memory_model(agent: HashMap) -> Result, Box> { @@ -119,7 +119,7 @@ fn create_memory_model(agent: HashMap) -> Result Result> { @@ -142,7 +142,7 @@ pub fn load_config(config: &str) -> Result<(String, SmartGPT), Box> { let mut exit = false; for (name, _) in &config.plugins { let plugin = plugins.iter().find(|el| el.name.to_ascii_lowercase() == name.to_ascii_lowercase()); - if let None = plugin { + if plugin.is_none() { println!("{}: No plugin named \"{}\".", "Error".red(), name); exit = true; } diff --git a/src/api/smartgpt.rs b/src/api/smartgpt.rs index 8c3d5d8..d5f977a 100644 --- a/src/api/smartgpt.rs +++ b/src/api/smartgpt.rs @@ -1,9 +1,9 @@ -use std::{sync::{Mutex, Arc}, collections::HashMap, error::Error, vec, fmt::Display}; +use std::{sync::{Mutex, Arc}, error::Error, fmt::Display}; use serde::Serialize; -use serde_json::Value; -use crate::{CommandContext, PluginStore, Agents, AgentInfo, LLMProvider, LLMModel, LLM, ChatGPTProvider, ChatGPTConfig, memory_from_provider, LocalProvider, auto::{run_auto, Action, DisallowedAction, Update}, GoogleData}; + +use crate::{CommandContext, auto::{run_auto, Action, DisallowedAction, Update}}; #[derive(Debug, Clone)] pub struct NoPluginError(pub String); diff --git a/src/auto/agents/worker/actor.rs b/src/auto/agents/worker/actor.rs index 49a0b30..1bc6b8f 100644 --- a/src/auto/agents/worker/actor.rs +++ b/src/auto/agents/worker/actor.rs @@ -15,5 +15,5 @@ pub fn use_tool( let agent = get_agent(context); agent.llm.clear_history(); - return Ok(out); + Ok(out) } \ No newline at end of file diff --git a/src/auto/agents/worker/adept.rs b/src/auto/agents/worker/adept.rs index effacd3..08c979e 100644 --- a/src/auto/agents/worker/adept.rs +++ b/src/auto/agents/worker/adept.rs @@ -1,8 +1,8 @@ use std::{error::Error, ops::Deref, fmt::Display}; -use colored::Colorize; + use serde::{Serialize, Deserialize}; -use crate::{CommandContext, AgentInfo, Message, auto::{try_parse_json, agents::{worker::{log_yaml, run_method_agent}, prompt::{CONCISE_PLAN, ConcisePlanInfo, PersonalityInfo, PERSONALITY, THOUGHTS, ThoughtInfo, NewThoughtInfo, NEW_THOUGHTS}}, run::Action, DisallowedAction, DynamicUpdate}, ScriptValue}; +use crate::{CommandContext, AgentInfo, Message, auto::{try_parse_json, agents::{worker::{run_method_agent}, prompt::{CONCISE_PLAN, ConcisePlanInfo, PersonalityInfo, PERSONALITY, THOUGHTS, ThoughtInfo, NewThoughtInfo, NEW_THOUGHTS}}, run::Action, DisallowedAction, DynamicUpdate}, ScriptValue}; use super::Update; @@ -74,7 +74,7 @@ pub fn get_response( let mut data: Option = None; - if assets.len() > 0 { + if !assets.is_empty() { data = Some( assets.iter() .map(|el| format!("## Asset `${el}`:\n{}", context.assets[el])) @@ -87,7 +87,7 @@ pub fn get_response( Ok(out) }, "brainstorm" => { - Ok(format!("Successfully brainstormed.")) + Ok("Successfully brainstormed.".to_string()) } "final_response" => { let FinalResponseArgs { response } = thoughts.decision.args.parse()?; @@ -95,7 +95,7 @@ pub fn get_response( Ok(response) }, decision_type => { - return Err(Box::new(NoDecisionTypeError(decision_type.to_string()))) + Err(Box::new(NoDecisionTypeError(decision_type.to_string()))) } } } @@ -136,13 +136,13 @@ pub fn run_brain_agent( listen_to_update(&Update::DynamicAgent(DynamicUpdate::Thoughts(thoughts.clone())))?; - drop(agent); + let _ = agent; let mut response = get_response( context, &|ctx| &mut ctx.agents.static_agent, &|ctx| &mut ctx.agents.planner, &thoughts, - &personality, + personality, allow_action, listen_to_update )?; @@ -153,12 +153,11 @@ pub fn run_brain_agent( loop { let cloned_assets = context.assets.clone(); - let asset_list = if cloned_assets.len() == 0 { - format!("No assets.") + let _asset_list = if cloned_assets.is_empty() { + "No assets.".to_string() } else { cloned_assets - .keys() - .map(|asset| asset.clone()) + .keys().cloned() .collect::>() .join(", ") }; @@ -182,7 +181,7 @@ pub fn run_brain_agent( &|ctx| &mut ctx.agents.static_agent, &|ctx| &mut ctx.agents.planner, &thoughts, - &personality, + personality, allow_action, listen_to_update )?; diff --git a/src/auto/agents/worker/methodical.rs b/src/auto/agents/worker/methodical.rs index 9d1454c..682a1be 100644 --- a/src/auto/agents/worker/methodical.rs +++ b/src/auto/agents/worker/methodical.rs @@ -1,11 +1,11 @@ use std::{error::Error, collections::{HashSet}}; -use colored::Colorize; + use serde::{Serialize, Deserialize}; use crate::{CommandContext, AgentInfo, Message, auto::{run::Action, try_parse_json, agents::{worker::create_tool_list, prompt::{SUMMARIZE_MEMORIES, NoData, PERSONALITY, PersonalityInfo, CREATE_PLAN, CreatePlanInfo, NextStepInfo, NEXT_STEP, SAVE_ASSET, SaveAssetInfo}}, DisallowedAction, StaticUpdate, Update, NamedAsset}, Weights, Tool}; -use super::{log_yaml, use_tool}; +use super::{use_tool}; #[derive(Serialize, Deserialize, Clone, Debug)] pub struct MethodicalThoughts { @@ -102,7 +102,7 @@ pub fn run_method_agent( let tools = create_tool_list(&tools); let cloned_assets = context.assets.clone(); - let assets_before: HashSet<&String> = cloned_assets.keys().collect(); + let _assets_before: HashSet<&String> = cloned_assets.keys().collect(); get_agent(context).llm.clear_history(); @@ -122,7 +122,7 @@ pub fn run_method_agent( }, 30 )?; - let observations = if observations.len() == 0 { + let observations = if observations.is_empty() { "None found.".to_string() } else { observations.iter() @@ -131,7 +131,7 @@ pub fn run_method_agent( .join("\n") }; - let data = assets.unwrap_or(format!("No assets.")); + let data = assets.unwrap_or("No assets.".to_string()); planner.llm.prompt.push(Message::User( CREATE_PLAN.fill(CreatePlanInfo { @@ -151,7 +151,7 @@ pub fn run_method_agent( let prompt = planner.llm.prompt.clone(); let message_history = planner.llm.message_history.clone(); - drop(planner); + let _ = planner; let agent = get_agent(context); agent.llm.prompt = prompt; @@ -173,7 +173,7 @@ pub fn run_method_agent( agent.llm.message_history.push(Message::Assistant(thoughts.raw)); let thoughts = thoughts.data; - drop(agent); + let _ = agent; listen_to_update(&Update::StaticAgent(StaticUpdate::Thoughts(thoughts.clone())))?; allow_action(&thoughts.action)?; @@ -220,7 +220,7 @@ pub fn run_method_agent( let asset_content = agent.llm.model.get_response_sync(&agent.llm.get_messages(), Some(800), Some(0.3))?; agent.llm.message_history.pop(); - drop(agent); + let _ = agent; *context.assets .entry(asset.name.clone()) @@ -235,8 +235,8 @@ pub fn run_method_agent( add_memories(agent, listen_to_update)?; - let asset_str = if changed_assets.len() == 0 { - format!("No assets changed.") + let asset_str = if changed_assets.is_empty() { + "No assets changed.".to_string() } else { changed_assets .iter() .map(|el| format!("## Asset `{}`\n{}", el.0, el.1)) @@ -247,5 +247,5 @@ pub fn run_method_agent( let resp = format!("Assets:\n\n{}", asset_str); - return Ok(resp); + Ok(resp) } \ No newline at end of file diff --git a/src/auto/agents/worker/mod.rs b/src/auto/agents/worker/mod.rs index fc4b679..fb80977 100644 --- a/src/auto/agents/worker/mod.rs +++ b/src/auto/agents/worker/mod.rs @@ -1,5 +1,5 @@ use std::{error::Error}; -use crate::{SmartGPT, AgentInfo, auto::{run::Action, DisallowedAction}}; +use crate::{SmartGPT, auto::{run::Action, DisallowedAction}}; use serde::Serialize; mod adept; diff --git a/src/auto/agents/worker/tools.rs b/src/auto/agents/worker/tools.rs index 087934d..41be915 100644 --- a/src/auto/agents/worker/tools.rs +++ b/src/auto/agents/worker/tools.rs @@ -13,25 +13,23 @@ pub fn create_filtered_tool_list(header: &str, tools: &[&Tool], tool_type: ToolT let Tool { name, purpose, args, .. } = tool; - let mut schema = format!("{{ "); + let mut schema = "{ ".to_string(); for arg in args { let ToolArgument { name, example } = arg; schema.push_str(&format!(r#""{name}": {example}, "#)) } schema = schema.trim_end_matches(", ").to_string(); - schema.push_str(&format!(" }}")); + schema.push_str(" }"); prompt.push('\n'); prompt.push_str(&format!("{name} {schema} - {purpose}")); } - return prompt; + prompt } pub fn create_tool_list(tools: &[&Tool]) -> String { - vec![ - create_filtered_tool_list("Resources", tools, ToolType::Resource), - create_filtered_tool_list("Actions", tools, ToolType::Action { needs_permission: false }) - ].join("\n\n") + [create_filtered_tool_list("Resources", tools, ToolType::Resource), + create_filtered_tool_list("Actions", tools, ToolType::Action { needs_permission: false })].join("\n\n") } \ No newline at end of file diff --git a/src/auto/classify.rs b/src/auto/classify.rs index d4542f6..ce10f5d 100644 --- a/src/auto/classify.rs +++ b/src/auto/classify.rs @@ -15,6 +15,7 @@ pub struct Classification { classification: String, } +#[allow(dead_code)] pub fn is_task(smartgpt: &mut SmartGPT, task: &str) -> Result> { let SmartGPT { context, .. @@ -23,23 +24,23 @@ pub fn is_task(smartgpt: &mut SmartGPT, task: &str) -> Result) -> std::fmt::Result { - write!(f, "{}", "could not parse.") + write!(f, "could not parse.") } } @@ -104,9 +105,9 @@ pub fn try_parse_base(llm: &LLM, tries: usize, max_tokens: let response = llm.model.get_response_sync(&llm.get_messages(), max_tokens, temperature)?; let processed_response = response.trim(); let processed_response = processed_response.strip_prefix("```") - .unwrap_or(&processed_response) + .unwrap_or(processed_response) .to_string(); - let processed_response = processed_response.strip_prefix(&format!("{lang}")) + let processed_response = processed_response.strip_prefix(&lang.to_string()) .unwrap_or(&response) .to_string(); let processed_response = processed_response.strip_suffix("```") diff --git a/src/auto/responses.rs b/src/auto/responses.rs index bea8d81..7100fea 100644 --- a/src/auto/responses.rs +++ b/src/auto/responses.rs @@ -9,11 +9,12 @@ pub struct Response { response: String } +#[allow(dead_code)] pub fn create_runner_prompt() -> String { - format!( -r#"Now, please write a response back to the user. Tell the user, in detail, everything you did, the outcome, and any permanent changes that were carried out."#) + r#"Now, please write a response back to the user. Tell the user, in detail, everything you did, the outcome, and any permanent changes that were carried out."#.to_string() } +#[allow(dead_code)] pub fn ask_for_responses(agent: &mut AgentInfo) -> Result> { agent.llm.message_history.push(Message::User(create_runner_prompt())); diff --git a/src/lib.rs b/src/lib.rs index 69ced88..21ef22d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -10,12 +10,14 @@ mod auto; mod log; pub use plugin::*; -pub use plugins::*; pub use tools::*; pub use chunk::*; pub use llms::*; pub use api::*; pub use runner::*; pub use memory::*; +#[allow(ambiguous_glob_reexports)] +pub use plugins::*; +#[allow(ambiguous_glob_reexports)] pub use auto::*; pub use log::*; \ No newline at end of file diff --git a/src/llms/chatgpt.rs b/src/llms/chatgpt.rs index e9aa24a..dbecfe3 100644 --- a/src/llms/chatgpt.rs +++ b/src/llms/chatgpt.rs @@ -4,7 +4,7 @@ use async_openai::{Client, types::{CreateChatCompletionResponse, CreateChatCompl use async_trait::async_trait; use serde::{Serialize, Deserialize}; use serde_json::Value; -use tiktoken_rs::{async_openai::{get_chat_completion_max_tokens, num_tokens_from_messages}, model::get_context_size, cl100k_base, r50k_base}; +use tiktoken_rs::{async_openai::{get_chat_completion_max_tokens, num_tokens_from_messages}, model::get_context_size, r50k_base}; use crate::{LLMProvider, Message, LLMModel}; diff --git a/src/llms/local.rs b/src/llms/local.rs index f4bde2f..e352b1f 100644 --- a/src/llms/local.rs +++ b/src/llms/local.rs @@ -72,8 +72,8 @@ impl LLMModel for LocalLLM { 2048 } - fn get_tokens_from_text(&self, text: &str) -> Result, Box> { - return Ok(vec![]) + fn get_tokens_from_text(&self, _text: &str) -> Result, Box> { + Ok(vec![]) } } @@ -104,7 +104,7 @@ impl LLMProvider for LocalLLMProvider { mmap } = serde_json::from_value(value)?; let model = load_dynamic( - match model_type.to_ascii_lowercase().replace("-", "").as_str() { + match model_type.to_ascii_lowercase().replace('-', "").as_str() { "llama" => llm::ModelArchitecture::Llama, "bloom" => llm::ModelArchitecture::Bloom, "gpt2" => llm::ModelArchitecture::Gpt2, @@ -114,10 +114,10 @@ impl LLMProvider for LocalLLMProvider { return Err(Box::new(NoLocalModelError(format!("unknown model: {model_type}")))) } }, - &Path::new(&model_path), + Path::new(&model_path), ModelParameters { prefer_mmap: mmap.unwrap_or(true), - n_context_tokens: n_context_tokens, + n_context_tokens, ..Default::default() }, |_| {} diff --git a/src/llms/mod.rs b/src/llms/mod.rs index ed5c582..002d84c 100644 --- a/src/llms/mod.rs +++ b/src/llms/mod.rs @@ -37,7 +37,7 @@ impl Display for Message { Self::User(_) => "USER" }; - write!(f, "-- {header} --\n")?; + writeln!(f, "-- {header} --")?; write!(f, "{}", self.content()) } } @@ -193,7 +193,7 @@ pub fn format_prompt(messages: &[Message]) -> String { }, message.content() )); - out.push_str("\n"); + out.push('\n'); } out.push_str("ASSISTANT: "); diff --git a/src/main.rs b/src/main.rs index 6117192..0be2966 100644 --- a/src/main.rs +++ b/src/main.rs @@ -8,7 +8,7 @@ pub struct NoThoughtError; impl Display for NoThoughtError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", "no thought detected.") + write!(f, "no thought detected.") } } @@ -49,7 +49,7 @@ fn main() -> Result<(), Box> { } } - let tools = if plugin.tools.len() == 0 { + let tools = if plugin.tools.is_empty() { vec![ "".white() ] } else { plugin.tools.iter() diff --git a/src/memory/qdrant/system.rs b/src/memory/qdrant/system.rs index 15eefdd..51a9327 100644 --- a/src/memory/qdrant/system.rs +++ b/src/memory/qdrant/system.rs @@ -98,7 +98,7 @@ impl MemorySystem for QdrantMemorySystem { min_count: usize, ) -> Result, Box> { let embedding = llm.model.get_base_embed(memory).await?; - let latest_point_id_option = self.latest_point_id.lock().await.clone(); + let latest_point_id_option = *self.latest_point_id.lock().await; let latest_point_id = latest_point_id_option.unwrap_or(0); let mut points: Vec = vec![]; @@ -151,13 +151,10 @@ impl MemorySystem for QdrantMemorySystem { let relevant_memories_result: Result, _> = search_result .iter() - .map(|point| convert_to_relevant_memory(point)) + .map(convert_to_relevant_memory) .collect(); - match relevant_memories_result { - Ok(relevant_memories) => Ok(relevant_memories), - Err(e) => Err(e), - } + relevant_memories_result } async fn decay_recency(&mut self, _decay_factor: f32) -> Result<(), Box> { diff --git a/src/memory/qdrant/utils.rs b/src/memory/qdrant/utils.rs index afe53db..7e615d9 100644 --- a/src/memory/qdrant/utils.rs +++ b/src/memory/qdrant/utils.rs @@ -55,7 +55,7 @@ pub fn create_initial_collection(name: String) -> CreateCollection { ..Default::default() }); - return create_collection; + create_collection } pub fn convert_to_relevant_memory(point: &ScoredPoint) -> Result> { diff --git a/src/memory/redis/system.rs b/src/memory/redis/system.rs index 8c06977..50f7b5e 100644 --- a/src/memory/redis/system.rs +++ b/src/memory/redis/system.rs @@ -43,7 +43,7 @@ impl MemorySystem for RedisMemorySystem { recency: 1., recall: 1., }, - embedding: embedding + embedding }; let mut latest_point_id = self.latest_point_id.lock().await; @@ -70,7 +70,7 @@ impl MemorySystem for RedisMemorySystem { Bulk(items) => { items .chunks_exact(2) - .filter_map(|chunk| match (chunk.get(0), chunk.get(1)) { + .filter_map(|chunk| match (chunk.first(), chunk.get(1)) { (Some(Data(key)), Some(Data(value))) => { let score: f32 = String::from_utf8_lossy(value) .parse() @@ -146,7 +146,7 @@ impl MemoryProvider for RedisProvider { Ok(()) => {Ok(())} Err(err) => { eprintln!("Failed to create vector index: {}", err); - return Err(Box::new(err)); + Err(Box::new(err)) } } })?; diff --git a/src/memory/redis/utils.rs b/src/memory/redis/utils.rs index 9943076..3000c1b 100644 --- a/src/memory/redis/utils.rs +++ b/src/memory/redis/utils.rs @@ -74,8 +74,7 @@ pub async fn search_vector_field( let query_blob_str = STANDARD.encode(query_blob); - Ok( - execute_redis_tool::( + execute_redis_tool::( con, "FT.SEARCH", &[ @@ -88,8 +87,7 @@ pub async fn search_vector_field( "DIALECT", "2", ], - ).await? - ) + ).await } pub async fn set_json_record( @@ -103,7 +101,7 @@ pub async fn set_json_record( &[ point_id, "$", - &serde_json::to_value(&embedded_memory)?.to_string(), + &serde_json::to_value(embedded_memory)?.to_string(), ], ).await } \ No newline at end of file diff --git a/src/plugin.rs b/src/plugin.rs index a6151b1..fd4265a 100644 --- a/src/plugin.rs +++ b/src/plugin.rs @@ -1,7 +1,7 @@ use std::{collections::HashMap, error::Error, fmt::Display, any::Any}; use async_trait::async_trait; -use serde::{Serialize, de::DeserializeOwned, Deserialize}; +use serde::{Serialize, de::DeserializeOwned}; use serde_json::Value; #[derive(Debug, Clone)] @@ -26,7 +26,7 @@ impl<'a> Display for CommandNoArgError<'a> { impl<'a> Error for CommandNoArgError<'a> {} -use crate::{LLM, ScriptValue, MemorySystem, AutoType}; +use crate::{LLM, ScriptValue, MemorySystem}; #[async_trait] pub trait PluginData: Any + Send + Sync { @@ -35,6 +35,12 @@ pub trait PluginData: Any + Send + Sync { pub struct PluginStore(pub HashMap>); +impl Default for PluginStore { + fn default() -> Self { + Self::new() + } +} + impl PluginStore { pub fn new() -> Self { Self(HashMap::new()) diff --git a/src/plugins/brainstorm/mod.rs b/src/plugins/brainstorm/mod.rs index 9d8569e..cf8a9a7 100644 --- a/src/plugins/brainstorm/mod.rs +++ b/src/plugins/brainstorm/mod.rs @@ -18,7 +18,7 @@ pub struct BrainstormImpl; #[async_trait] impl CommandImpl for BrainstormImpl { - async fn invoke(&self, ctx: &mut CommandContext, args: ScriptValue) -> Result> { + async fn invoke(&self, _ctx: &mut CommandContext, _args: ScriptValue) -> Result> { Ok(CommandResult::ScriptValue(brainstorm().await?)) } diff --git a/src/plugins/browse/mod.rs b/src/plugins/browse/mod.rs index dbf69a7..04aed7c 100644 --- a/src/plugins/browse/mod.rs +++ b/src/plugins/browse/mod.rs @@ -3,7 +3,7 @@ use async_trait::async_trait; use colored::Colorize; use readability::extractor; use reqwest::{Client, header::{USER_AGENT, HeaderMap}}; -use textwrap::wrap; + mod extract; @@ -51,7 +51,7 @@ pub struct BrowseNoArgError; impl Display for BrowseNoArgError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", "'browse-article' tool did not receive one of its arguments.") + write!(f, "'browse-article' tool did not receive one of its arguments.") } } @@ -63,7 +63,7 @@ pub struct NoContentError { help: String } -fn chunk_text(llm: &LLM, text: &str, chunk_size: usize) -> Result, Box> { +fn chunk_text(llm: &LLM, text: &str, _chunk_size: usize) -> Result, Box> { let tokens = llm.get_tokens_from_text(text)?; Ok( diff --git a/src/plugins/filesystem/mod.rs b/src/plugins/filesystem/mod.rs index 4a8c682..90fe087 100644 --- a/src/plugins/filesystem/mod.rs +++ b/src/plugins/filesystem/mod.rs @@ -53,9 +53,7 @@ pub async fn file_write(_ctx: &mut CommandContext, args: ScriptValue, append: bo pub async fn file_list(_ctx: &mut CommandContext, _args: ScriptValue) -> Result> { let files = fs::read_dir("./files/")?; let files = files - .map(|el| el.map(|el| el.path().display().to_string())) - .filter(|el| el.is_ok()) - .map(|el| el.unwrap()) + .flat_map(|el| el.map(|el| el.path().display().to_string())) .collect::>(); Ok(ScriptValue::List(files.iter().map(|el| el.clone().into()).collect())) @@ -131,12 +129,10 @@ impl PluginCycle for FileCycle { async fn create_context(&self, _context: &mut CommandContext, _previous_prompt: Option<&str>) -> Result, Box> { let files = fs::read_dir("files")?; let files = files - .map(|el| el.map(|el| el.path().display().to_string())) - .filter(|el| el.is_ok()) - .map(|el| el.unwrap()) + .flat_map(|el| el.map(|el| el.path().display().to_string())) .collect::>(); - Ok(Some(if files.len() == 0 { + Ok(Some(if files.is_empty() { "Files: No saved files.".to_string() } else { format!("Files: {} (Consider reading these.)", files.join(", ")) diff --git a/src/plugins/google/mod.rs b/src/plugins/google/mod.rs index 0370b69..498f1d9 100644 --- a/src/plugins/google/mod.rs +++ b/src/plugins/google/mod.rs @@ -14,7 +14,7 @@ pub struct GoogleNoQueryError; impl Display for GoogleNoQueryError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", "'google' tool did not receive a query.") + write!(f, "'google' tool did not receive a query.") } } diff --git a/src/plugins/news/mod.rs b/src/plugins/news/mod.rs index d94ec7c..0f1dc53 100644 --- a/src/plugins/news/mod.rs +++ b/src/plugins/news/mod.rs @@ -20,7 +20,7 @@ pub struct NewsNoQueryError; impl Display for NewsNoQueryError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", "one of the 'news' tools did not receive enough info.") + write!(f, "one of the 'news' tools did not receive enough info.") } } diff --git a/src/plugins/wolfram/mod.rs b/src/plugins/wolfram/mod.rs index ea586ab..fe209a6 100644 --- a/src/plugins/wolfram/mod.rs +++ b/src/plugins/wolfram/mod.rs @@ -12,7 +12,7 @@ pub struct WolframNoQueryError; impl Display for WolframNoQueryError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", "one of the 'wolfram' tools did not receive enough info.") + write!(f, "one of the 'wolfram' tools did not receive enough info.") } } @@ -64,7 +64,7 @@ pub async fn wolfram(ctx: &mut CommandContext, args: ScriptValue) -> Result 0 { + let response = if !response.trim().is_empty() { response } else { "Sorry, but Wolfram Alpha did not understand your query. Please try using another tool.".to_string() diff --git a/src/runner/mod.rs b/src/runner/mod.rs index 41608af..e518a01 100644 --- a/src/runner/mod.rs +++ b/src/runner/mod.rs @@ -4,4 +4,3 @@ mod convert; pub use parse::*; pub use scriptvalue::*; -pub use convert::*; \ No newline at end of file diff --git a/src/runner/parse.rs b/src/runner/parse.rs index 6a673e1..e4c8989 100644 --- a/src/runner/parse.rs +++ b/src/runner/parse.rs @@ -32,7 +32,7 @@ impl Debug for Primitive { .map(|el| el.to_string()) .collect::>() .join(""); - text.push_str(&r#"...""#); + text.push_str(r#"...""#); write!(f, "{}", text) } else { write!(f, "{:?}", string) @@ -85,7 +85,7 @@ impl Debug for Expression { } Expression::Dict(dict) => { write!(f, "{{")?; - if dict.len() > 0 { + if !dict.is_empty() { write!(f, " ")?; } for (ind, (key, value)) in dict.iter().enumerate() { @@ -205,7 +205,7 @@ pub fn to_expr(node: ExprKind) -> Result { } let hash_map: HashMap = parsed_keys.into_iter() - .zip(parsed_values.into_iter()) + .zip(parsed_values) .collect(); Ok(Expression::Dict(hash_map)) @@ -226,7 +226,7 @@ pub enum Statement { pub fn to_statement(statement: StmtKind) -> Result { match statement { StmtKind::Expr { value } => { - to_expr(value.node).map(|el| Statement::Expression(el)) + to_expr(value.node).map(Statement::Expression) } StmtKind::Assign { targets, value, .. } => { let target = to_expr(targets[0].node.clone())?; diff --git a/src/tools.rs b/src/tools.rs index 4ca5775..1981217 100644 --- a/src/tools.rs +++ b/src/tools.rs @@ -11,7 +11,7 @@ pub async fn generate_context(context: &mut CommandContext, plugins: &[Plugin], } } - Ok(if out.len() > 0 { + Ok(if !out.is_empty() { out.join("\n\n") + "\n\n" } else { "".to_string()