diff --git a/config.toml b/config.toml index d3affcf..b0c8b68 100644 --- a/config.toml +++ b/config.toml @@ -2,6 +2,10 @@ api-key = "" base-url = "api.openai.com" +chroot-dir = "/home/bot/root" + +# If using chroot (recommended) then this will be relative. +command-path = "/cmds" # If you don't already know the model name you can generally find a listing # on the models API pages. diff --git a/src/command.rs b/src/command.rs deleted file mode 100644 index e69de29..0000000 diff --git a/src/main.rs b/src/main.rs index 270c344..b639706 100644 --- a/src/main.rs +++ b/src/main.rs @@ -3,6 +3,7 @@ use color_eyre::{ eyre::WrapErr, }; use human_panic::setup_panic; +use std::os::unix::fs; use tracing::{ Level, info, @@ -10,12 +11,12 @@ use tracing::{ use tracing_subscriber::FmtSubscriber; mod chat; -mod command; +mod commands; mod qna; mod setup; -const DEFAULT_INSTRUCT: &'static str = -"You are a shady, yet helpful IRC bot. You try to give responses that can +const DEFAULT_INSTRUCT: &str = + "You are a shady, yet helpful IRC bot. You try to give responses that can be sent in a single IRC response according to the specification."; #[tokio::main] @@ -36,6 +37,13 @@ async fn main() -> Result<()> { let settings = setup::init().await.wrap_err("Failed to initialize.")?; let config = settings.config; + // chroot if applicable. + if let Ok(chroot_path) = config.get_string("chroot-dir") { + fs::chroot(&chroot_path) + .wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path.to_string()))?; + std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?; + } + let handle = qna::new( config.get_string("api-key").wrap_err("API missing.")?, config @@ -44,7 +52,9 @@ async fn main() -> Result<()> { config .get_string("model") .wrap_err("model string missing.")?, - config.get_string("instruct").unwrap_or_else(|_| DEFAULT_INSTRUCT.to_string()), + config + .get_string("instruct") + .unwrap_or_else(|_| DEFAULT_INSTRUCT.to_string()), ) .wrap_err("Couldn't initialize LLM handle.")?; let mut c = chat::new(&config, &handle).await?; diff --git a/src/setup.rs b/src/setup.rs index 7453afa..5db02d9 100644 --- a/src/setup.rs +++ b/src/setup.rs @@ -1,15 +1,9 @@ use clap::Parser; -use color_eyre::{ - Result, - eyre::WrapErr, -}; +use color_eyre::{Result, eyre::WrapErr}; use config::Config; use directories::ProjectDirs; use std::path::PathBuf; -use tracing::{ - info, - instrument, -}; +use tracing::{info, instrument}; // TODO: use [clap(long, short, help_heading = Some(section))] #[derive(Clone, Debug, Parser)] @@ -23,6 +17,14 @@ pub(crate) struct Args { /// Base URL for the LLM API to use. pub(crate) base_url: Option, + /// Directory to use for chroot (recommended). + #[arg(long)] + pub(crate) chroot_dir: Option, + + /// Root directory for file based command structure. + #[arg(long)] + pub(crate) command_dir: Option, + #[arg(long)] /// Instructions to the model on how to behave. pub(crate) intruct: Option, @@ -30,7 +32,7 @@ pub(crate) struct Args { #[arg(long)] pub(crate) model: Option, - #[arg(long)] + #[arg(long = "channel")] /// List of IRC channels to join. pub(crate) channels: Option>, @@ -93,6 +95,8 @@ pub async fn init() -> Result { // but a derive macro could do this a bit better if this becomes too large. .set_override_option("api-key", args.api_key.clone())? .set_override_option("base-url", args.base_url.clone())? + .set_override_option("chroot-dir", args.chroot_dir.clone())? + .set_override_option("command-path", args.command_dir.clone())? .set_override_option("model", args.model.clone())? .set_override_option("instruct", args.model.clone())? .set_override_option("channels", args.channels.clone())?