Added README, and LICENSE.

This commit is contained in:
Micheal Smith
2025-08-11 23:37:18 -05:00
parent b86e46fe00
commit 5f30fdbf77
9 changed files with 182 additions and 63 deletions

View File

@@ -6,7 +6,10 @@ use color_eyre::{
},
};
// Lots of namespace confusion potential
use crate::qna::LLMHandle;
use crate::{
commands,
qna::LLMHandle,
};
use config::Config as MainConfig;
use futures::StreamExt;
use irc::client::prelude::{
@@ -28,12 +31,14 @@ pub struct Chat {
// Need: owners, channels, username, nick, server, password
#[instrument]
pub async fn new(settings: &MainConfig, handle: &LLMHandle) -> Result<Chat> {
pub async fn new(
settings: &MainConfig,
handle: &LLMHandle,
) -> Result<Chat> {
// Going to just assign and let the irc library handle errors for now, and
// add my own checking if necessary.
let port: u16 = settings.get("port")?;
let channels: Vec<String> = settings.get("channels")
.wrap_err("No channels provided.")?;
let channels: Vec<String> = settings.get("channels").wrap_err("No channels provided.")?;
event!(Level::INFO, "Channels = {:?}", channels);
@@ -73,14 +78,14 @@ impl Chat {
});
while let Some(message) = stream.next().await.transpose()? {
if let Command::PRIVMSG(channel, message) = message.command {
if message.starts_with("!gem") {
let msg = self.llm_handle.send_request(message).await?;
event!(Level::INFO, "Message received.");
client
.send_privmsg(channel, msg)
.wrap_err("Couldn't send response to channel.")?;
}
if let Command::PRIVMSG(channel, message) = message.command
&& message.starts_with("!gem")
{
let msg = self.llm_handle.send_request(message).await?;
event!(Level::INFO, "Message received.");
client
.send_privmsg(channel, msg)
.wrap_err("Couldn't send response to channel.")?;
}
}

21
src/commands.rs Normal file
View File

@@ -0,0 +1,21 @@
use color_eyre::Result;
use std::{
path::{Path, PathBuf},
};
#[derive(Clone, Debug)]
pub struct Root {
path: PathBuf,
}
impl Root {
pub fn new(path: impl AsRef<Path>) -> Self {
Root {
path: path.as_ref().to_owned(),
}
}
pub fn run_command(cmd_string: impl AsRef<str>) -> Result<()> {
todo!();
}
}

View File

@@ -39,16 +39,26 @@ async fn main() -> Result<()> {
// chroot if applicable.
if let Ok(chroot_path) = config.get_string("chroot-dir") {
info!("Attempting to chroot to {}", chroot_path);
fs::chroot(&chroot_path)
.wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path.to_string()))?;
.wrap_err_with(|| format!("Failed setting chroot '{}'", chroot_path))?;
std::env::set_current_dir("/").wrap_err("Couldn't change directory after chroot.")?;
}
let handle = qna::new(
// Setup root path for commands.
let cmd_root = if let Ok(command_path) = config.get_string("command-path") {
Some(commands::Root::new(command_path))
} else {
None
};
let handle = qna::LLMHandle::new(
config.get_string("api-key").wrap_err("API missing.")?,
config
.get_string("base-url")
.wrap_err("base-url missing.")?,
cmd_root,
config
.get_string("model")
.wrap_err("model string missing.")?,

View File

@@ -1,3 +1,4 @@
use crate::commands;
use color_eyre::Result;
use futures::StreamExt;
use genai::{
@@ -22,36 +23,39 @@ use tracing::info;
pub struct LLMHandle {
chat_request: ChatRequest,
client: Client,
cmd_root: Option<commands::Root>,
model: String,
}
pub fn new(
api_key: String,
_base_url: impl AsRef<str>,
model: impl Into<String>,
system_role: String,
) -> Result<LLMHandle> {
let auth_resolver = AuthResolver::from_resolver_fn(
|_model_iden: ModelIden| -> Result<Option<AuthData>, genai::resolver::Error> {
// let ModelIden { adapter_kind, model_name } = model_iden;
Ok(Some(AuthData::from_single(api_key)))
},
);
let client = Client::builder().with_auth_resolver(auth_resolver).build();
let chat_request = ChatRequest::default().with_system(system_role);
info!("New LLMHandle created.");
Ok(LLMHandle {
client,
chat_request,
model: model.into(),
})
}
impl LLMHandle {
pub fn new(
api_key: String,
_base_url: impl AsRef<str>,
cmd_root: Option<commands::Root>,
model: impl Into<String>,
system_role: String,
) -> Result<LLMHandle> {
let auth_resolver = AuthResolver::from_resolver_fn(
|_model_iden: ModelIden| -> Result<Option<AuthData>, genai::resolver::Error> {
// let ModelIden { adapter_kind, model_name } = model_iden;
Ok(Some(AuthData::from_single(api_key)))
},
);
let client = Client::builder().with_auth_resolver(auth_resolver).build();
let chat_request = ChatRequest::default().with_system(system_role);
info!("New LLMHandle created.");
Ok(LLMHandle {
client,
chat_request,
cmd_root,
model: model.into(),
})
}
pub async fn send_request(&mut self, message: impl Into<String>) -> Result<String> {
let mut req = self.chat_request.clone();
let client = self.client.clone();
@@ -66,6 +70,11 @@ impl LLMHandle {
while let Some(Ok(stream_event)) = stream.next().await {
if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event {
text.push_str(&content);
} else if let ChatStreamEvent::End(end) = stream_event {
let texts = end.captured_texts().unwrap();
for text in texts.into_iter() {
info!("An answer: {}", text);
}
}
}