Initial commit.

This commit is contained in:
Micheal Smith
2025-08-05 01:47:11 -05:00
commit 16af21b54e
10 changed files with 3688 additions and 0 deletions

89
src/chat.rs Normal file
View File

@@ -0,0 +1,89 @@
use color_eyre::{
Result,
eyre::{
OptionExt,
WrapErr,
},
};
// Lots of namespace confusion potential
use crate::qna::LLMHandle;
use config::Config as MainConfig;
use futures::StreamExt;
use irc::client::prelude::{
Client as IRCClient,
Command,
Config as IRCConfig,
};
use tracing::{
Level,
event,
instrument,
};
#[derive(Debug)]
pub struct Chat {
client: IRCClient,
llm_handle: LLMHandle, // FIXME: This needs to be thread safe, and shared, etc.
}
// Need: owners, channels, username, nick, server, password
#[instrument]
pub async fn new(settings: &MainConfig, handle: &LLMHandle) -> Result<Chat> {
// Going to just assign and let the irc library handle errors for now, and
// add my own checking if necessary.
let port: u16 = settings.get("port")?;
let channels: Vec<String> = settings.get("channels")
.wrap_err("No channels provided.")?;
event!(Level::INFO, "Channels = {:?}", channels);
let config = IRCConfig {
server: settings.get_string("server").ok(),
nickname: settings.get_string("nickname").ok(),
port: Some(port),
username: settings.get_string("username").ok(),
use_tls: settings.get_bool("use_tls").ok(),
channels,
..IRCConfig::default()
};
event!(Level::INFO, "IRC connection starting...");
Ok(Chat {
client: IRCClient::from_config(config).await?,
llm_handle: handle.clone(),
})
}
impl Chat {
pub async fn run(&mut self) -> Result<()> {
let client = &mut self.client;
client.identify()?;
let outgoing = client
.outgoing()
.ok_or_eyre("Couldn't get outgoing irc sink.")?;
let mut stream = client.stream()?;
tokio::spawn(async move {
if let Err(e) = outgoing.await {
event!(Level::ERROR, "Failed to drive output: {}", e);
}
});
while let Some(message) = stream.next().await.transpose()? {
if let Command::PRIVMSG(channel, message) = message.command {
if message.starts_with("!gem") {
let msg = self.llm_handle.send_request(message).await?;
event!(Level::INFO, "Message received.");
client
.send_privmsg(channel, msg)
.wrap_err("Couldn't send response to channel.")?;
}
}
}
Ok(())
}
}

0
src/command.rs Normal file
View File

53
src/main.rs Normal file
View File

@@ -0,0 +1,53 @@
use color_eyre::{
Result,
eyre::WrapErr,
};
use human_panic::setup_panic;
use tracing::{
Level,
info,
};
use tracing_subscriber::FmtSubscriber;
mod chat;
mod command;
mod qna;
mod setup;
#[tokio::main]
async fn main() -> Result<()> {
// Some error sprucing.
better_panic::install();
setup_panic!();
let subscriber = FmtSubscriber::builder()
.with_max_level(Level::TRACE)
.finish();
tracing::subscriber::set_global_default(subscriber)
.wrap_err("Failed to setup trace logging.")?;
info!("Starting");
let settings = setup::init().await.wrap_err("Failed to initialize.")?;
let config = settings.config;
let handle = qna::new(
config.get_string("api-key").wrap_err("API missing.")?,
config
.get_string("base-url")
.wrap_err("base-url missing.")?,
config
.get_string("model")
.wrap_err("model string missing.")?,
"You are a shady, yet helpful IRC bot. You try to give responses that can
be sent in a single IRC response according to the specification."
.to_string(),
)
.wrap_err("Couldn't initialize LLM handle.")?;
let mut c = chat::new(&config, &handle).await?;
c.run().await.unwrap();
Ok(())
}

74
src/qna.rs Normal file
View File

@@ -0,0 +1,74 @@
use color_eyre::Result;
use futures::StreamExt;
use genai::{
Client,
ModelIden,
chat::{
ChatMessage,
ChatRequest,
ChatStreamEvent,
StreamChunk,
},
resolver::{
AuthData,
AuthResolver,
},
};
use tracing::info;
// Represents an LLM completion source.
// FIXME: Clone is probably temporary.
#[derive(Clone, Debug)]
pub struct LLMHandle {
chat_request: ChatRequest,
client: Client,
model: String,
}
pub fn new(
api_key: String,
_base_url: impl AsRef<str>,
model: impl Into<String>,
system_role: String,
) -> Result<LLMHandle> {
let auth_resolver = AuthResolver::from_resolver_fn(
|_model_iden: ModelIden| -> Result<Option<AuthData>, genai::resolver::Error> {
// let ModelIden { adapter_kind, model_name } = model_iden;
Ok(Some(AuthData::from_single(api_key)))
},
);
let client = Client::builder().with_auth_resolver(auth_resolver).build();
let chat_request = ChatRequest::default().with_system(system_role);
info!("New LLMHandle created.");
Ok(LLMHandle {
client,
chat_request,
model: model.into(),
})
}
impl LLMHandle {
pub async fn send_request(&mut self, message: impl Into<String>) -> Result<String> {
let mut req = self.chat_request.clone();
let client = self.client.clone();
req = req.append_message(ChatMessage::user(message.into()));
let response = client
.exec_chat_stream(&self.model, req.clone(), None)
.await?;
let mut stream = response.stream;
let mut text = String::new();
while let Some(Ok(stream_event)) = stream.next().await {
if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event {
text.push_str(&content);
}
}
Ok(text)
}
}

108
src/setup.rs Normal file
View File

@@ -0,0 +1,108 @@
use clap::Parser;
use color_eyre::{
Result,
eyre::WrapErr,
};
use config::Config;
use directories::ProjectDirs;
use std::path::PathBuf;
use tracing::{
info,
instrument,
};
// TODO: use [clap(long, short, help_heading = Some(section))]
#[derive(Clone, Debug, Parser)]
#[command(about, version)]
pub(crate) struct Args {
#[arg(short, long)]
/// API Key for the LLM in use.
pub(crate) api_key: Option<String>,
#[arg(short, long, default_value = "https://api.openai.com")]
/// Base URL for the LLM API to use.
pub(crate) base_url: Option<String>,
#[arg(long)]
/// Instructions to the model on how to behave.
pub(crate) intruct: Option<String>,
#[arg(long)]
pub(crate) model: Option<String>,
#[arg(long)]
/// List of IRC channels to join.
pub(crate) channels: Option<Vec<String>>,
#[arg(short, long)]
/// Custom configuration file location if need be.
pub(crate) config_file: Option<PathBuf>,
#[arg(short, long, default_value = "irc.libera.chat")]
/// IRC server.
pub(crate) server: Option<String>,
#[arg(short, long, default_value = "6697")]
/// Port of the IRC server.
pub(crate) port: Option<String>,
#[arg(long)]
/// IRC Nickname.
pub(crate) nickname: Option<String>,
#[arg(long)]
/// IRC Nick Password
pub(crate) nick_password: Option<String>,
#[arg(long)]
/// IRC Username
pub(crate) username: Option<String>,
#[arg(long)]
/// Whether or not to use TLS when connecting to the IRC server.
pub(crate) use_tls: Option<bool>,
}
pub(crate) struct Setup {
pub(crate) config: Config,
}
#[instrument]
pub async fn init() -> Result<Setup> {
// Get arguments. These overrule configuration file, and environment
// variables if applicable.
let args = Args::parse();
// Use default config location unless specified.
let config_location: PathBuf = if let Some(ref path) = args.config_file {
path.to_owned()
} else {
ProjectDirs::from("", "", env!("CARGO_PKG_NAME"))
.unwrap()
.config_dir()
.to_owned()
.join(r"config.toml")
};
info!("Starting.");
let settings = Config::builder()
.add_source(config::File::with_name(&config_location.to_string_lossy()).required(false))
.add_source(config::Environment::with_prefix("BOT"))
// Doing all of these overrides provides a unified access point for options,
// but a derive macro could do this a bit better if this becomes too large.
.set_override_option("api-key", args.api_key.clone())?
.set_override_option("base-url", args.base_url.clone())?
.set_override_option("model", args.model.clone())?
.set_override_option("instruct", args.model.clone())?
.set_override_option("channels", args.channels.clone())?
.set_override_option("server", args.server.clone())?
.set_override_option("port", args.port.clone())? // FIXME: Make this a default here not in clap.
.set_override_option("nickname", args.nickname.clone())?
.set_override_option("username", args.username.clone())?
.set_override_option("use_tls", args.use_tls)?
.build()
.wrap_err("Couldn't read configuration settings.")?;
Ok(Setup { config: settings })
}