Initial commit.
This commit is contained in:
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/target
|
||||
.env
|
||||
3319
Cargo.lock
generated
Normal file
3319
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
20
Cargo.toml
Normal file
20
Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "robotnik"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
# TODO: make this a dev and/or debug dependency later.
|
||||
better-panic = "0.3.0"
|
||||
clap = { version = "4.5", features = [ "derive" ] }
|
||||
color-eyre = "0.6.3"
|
||||
config = { version = "0.15", features = [ "toml" ] }
|
||||
directories = "6.0"
|
||||
dotenvy_macro = "0.15"
|
||||
futures = "0.3"
|
||||
human-panic = "2.0"
|
||||
genai = "0.4.0-alpha.9"
|
||||
irc = "1.1"
|
||||
tokio = { version = "1", features = [ "full" ] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = "0.3"
|
||||
16
config.toml
Normal file
16
config.toml
Normal file
@@ -0,0 +1,16 @@
|
||||
# Example configuration file.
|
||||
|
||||
api-key = "<YOUR-KEY>"
|
||||
base-url = "api.openai.com"
|
||||
|
||||
# If you don't already know the model name you can generally find a listing
|
||||
# on the models API pages.
|
||||
model = "gpt4o"
|
||||
|
||||
# IRC settings
|
||||
server = "irc.libera.chat"
|
||||
port = 6697
|
||||
channels = ["somechan"]
|
||||
username = "bot"
|
||||
nickname = "Roboto"
|
||||
use_tls = true
|
||||
7
rustfmt.toml
Normal file
7
rustfmt.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
edition = "2024"
|
||||
style_edition = "2024"
|
||||
comment_width = 100
|
||||
format_code_in_doc_comments = true
|
||||
imports_granularity = "Crate"
|
||||
imports_layout = "Vertical"
|
||||
wrap_comments = true
|
||||
89
src/chat.rs
Normal file
89
src/chat.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use color_eyre::{
|
||||
Result,
|
||||
eyre::{
|
||||
OptionExt,
|
||||
WrapErr,
|
||||
},
|
||||
};
|
||||
// Lots of namespace confusion potential
|
||||
use crate::qna::LLMHandle;
|
||||
use config::Config as MainConfig;
|
||||
use futures::StreamExt;
|
||||
use irc::client::prelude::{
|
||||
Client as IRCClient,
|
||||
Command,
|
||||
Config as IRCConfig,
|
||||
};
|
||||
use tracing::{
|
||||
Level,
|
||||
event,
|
||||
instrument,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Chat {
|
||||
client: IRCClient,
|
||||
llm_handle: LLMHandle, // FIXME: This needs to be thread safe, and shared, etc.
|
||||
}
|
||||
|
||||
// Need: owners, channels, username, nick, server, password
|
||||
#[instrument]
|
||||
pub async fn new(settings: &MainConfig, handle: &LLMHandle) -> Result<Chat> {
|
||||
// Going to just assign and let the irc library handle errors for now, and
|
||||
// add my own checking if necessary.
|
||||
let port: u16 = settings.get("port")?;
|
||||
let channels: Vec<String> = settings.get("channels")
|
||||
.wrap_err("No channels provided.")?;
|
||||
|
||||
event!(Level::INFO, "Channels = {:?}", channels);
|
||||
|
||||
let config = IRCConfig {
|
||||
server: settings.get_string("server").ok(),
|
||||
nickname: settings.get_string("nickname").ok(),
|
||||
port: Some(port),
|
||||
username: settings.get_string("username").ok(),
|
||||
use_tls: settings.get_bool("use_tls").ok(),
|
||||
channels,
|
||||
..IRCConfig::default()
|
||||
};
|
||||
|
||||
event!(Level::INFO, "IRC connection starting...");
|
||||
|
||||
Ok(Chat {
|
||||
client: IRCClient::from_config(config).await?,
|
||||
llm_handle: handle.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
impl Chat {
|
||||
pub async fn run(&mut self) -> Result<()> {
|
||||
let client = &mut self.client;
|
||||
|
||||
client.identify()?;
|
||||
|
||||
let outgoing = client
|
||||
.outgoing()
|
||||
.ok_or_eyre("Couldn't get outgoing irc sink.")?;
|
||||
let mut stream = client.stream()?;
|
||||
|
||||
tokio::spawn(async move {
|
||||
if let Err(e) = outgoing.await {
|
||||
event!(Level::ERROR, "Failed to drive output: {}", e);
|
||||
}
|
||||
});
|
||||
|
||||
while let Some(message) = stream.next().await.transpose()? {
|
||||
if let Command::PRIVMSG(channel, message) = message.command {
|
||||
if message.starts_with("!gem") {
|
||||
let msg = self.llm_handle.send_request(message).await?;
|
||||
event!(Level::INFO, "Message received.");
|
||||
client
|
||||
.send_privmsg(channel, msg)
|
||||
.wrap_err("Couldn't send response to channel.")?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
0
src/command.rs
Normal file
0
src/command.rs
Normal file
53
src/main.rs
Normal file
53
src/main.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use color_eyre::{
|
||||
Result,
|
||||
eyre::WrapErr,
|
||||
};
|
||||
use human_panic::setup_panic;
|
||||
use tracing::{
|
||||
Level,
|
||||
info,
|
||||
};
|
||||
use tracing_subscriber::FmtSubscriber;
|
||||
|
||||
mod chat;
|
||||
mod command;
|
||||
mod qna;
|
||||
mod setup;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
// Some error sprucing.
|
||||
better_panic::install();
|
||||
setup_panic!();
|
||||
|
||||
let subscriber = FmtSubscriber::builder()
|
||||
.with_max_level(Level::TRACE)
|
||||
.finish();
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber)
|
||||
.wrap_err("Failed to setup trace logging.")?;
|
||||
|
||||
info!("Starting");
|
||||
|
||||
let settings = setup::init().await.wrap_err("Failed to initialize.")?;
|
||||
let config = settings.config;
|
||||
|
||||
let handle = qna::new(
|
||||
config.get_string("api-key").wrap_err("API missing.")?,
|
||||
config
|
||||
.get_string("base-url")
|
||||
.wrap_err("base-url missing.")?,
|
||||
config
|
||||
.get_string("model")
|
||||
.wrap_err("model string missing.")?,
|
||||
"You are a shady, yet helpful IRC bot. You try to give responses that can
|
||||
be sent in a single IRC response according to the specification."
|
||||
.to_string(),
|
||||
)
|
||||
.wrap_err("Couldn't initialize LLM handle.")?;
|
||||
let mut c = chat::new(&config, &handle).await?;
|
||||
|
||||
c.run().await.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
74
src/qna.rs
Normal file
74
src/qna.rs
Normal file
@@ -0,0 +1,74 @@
|
||||
use color_eyre::Result;
|
||||
use futures::StreamExt;
|
||||
use genai::{
|
||||
Client,
|
||||
ModelIden,
|
||||
chat::{
|
||||
ChatMessage,
|
||||
ChatRequest,
|
||||
ChatStreamEvent,
|
||||
StreamChunk,
|
||||
},
|
||||
resolver::{
|
||||
AuthData,
|
||||
AuthResolver,
|
||||
},
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
// Represents an LLM completion source.
|
||||
// FIXME: Clone is probably temporary.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct LLMHandle {
|
||||
chat_request: ChatRequest,
|
||||
client: Client,
|
||||
model: String,
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
api_key: String,
|
||||
_base_url: impl AsRef<str>,
|
||||
model: impl Into<String>,
|
||||
system_role: String,
|
||||
) -> Result<LLMHandle> {
|
||||
let auth_resolver = AuthResolver::from_resolver_fn(
|
||||
|_model_iden: ModelIden| -> Result<Option<AuthData>, genai::resolver::Error> {
|
||||
// let ModelIden { adapter_kind, model_name } = model_iden;
|
||||
|
||||
Ok(Some(AuthData::from_single(api_key)))
|
||||
},
|
||||
);
|
||||
|
||||
let client = Client::builder().with_auth_resolver(auth_resolver).build();
|
||||
let chat_request = ChatRequest::default().with_system(system_role);
|
||||
|
||||
info!("New LLMHandle created.");
|
||||
|
||||
Ok(LLMHandle {
|
||||
client,
|
||||
chat_request,
|
||||
model: model.into(),
|
||||
})
|
||||
}
|
||||
|
||||
impl LLMHandle {
|
||||
pub async fn send_request(&mut self, message: impl Into<String>) -> Result<String> {
|
||||
let mut req = self.chat_request.clone();
|
||||
let client = self.client.clone();
|
||||
|
||||
req = req.append_message(ChatMessage::user(message.into()));
|
||||
let response = client
|
||||
.exec_chat_stream(&self.model, req.clone(), None)
|
||||
.await?;
|
||||
let mut stream = response.stream;
|
||||
let mut text = String::new();
|
||||
|
||||
while let Some(Ok(stream_event)) = stream.next().await {
|
||||
if let ChatStreamEvent::Chunk(StreamChunk { content }) = stream_event {
|
||||
text.push_str(&content);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(text)
|
||||
}
|
||||
}
|
||||
108
src/setup.rs
Normal file
108
src/setup.rs
Normal file
@@ -0,0 +1,108 @@
|
||||
use clap::Parser;
|
||||
use color_eyre::{
|
||||
Result,
|
||||
eyre::WrapErr,
|
||||
};
|
||||
use config::Config;
|
||||
use directories::ProjectDirs;
|
||||
use std::path::PathBuf;
|
||||
use tracing::{
|
||||
info,
|
||||
instrument,
|
||||
};
|
||||
|
||||
// TODO: use [clap(long, short, help_heading = Some(section))]
|
||||
#[derive(Clone, Debug, Parser)]
|
||||
#[command(about, version)]
|
||||
pub(crate) struct Args {
|
||||
#[arg(short, long)]
|
||||
/// API Key for the LLM in use.
|
||||
pub(crate) api_key: Option<String>,
|
||||
|
||||
#[arg(short, long, default_value = "https://api.openai.com")]
|
||||
/// Base URL for the LLM API to use.
|
||||
pub(crate) base_url: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// Instructions to the model on how to behave.
|
||||
pub(crate) intruct: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
pub(crate) model: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// List of IRC channels to join.
|
||||
pub(crate) channels: Option<Vec<String>>,
|
||||
|
||||
#[arg(short, long)]
|
||||
/// Custom configuration file location if need be.
|
||||
pub(crate) config_file: Option<PathBuf>,
|
||||
|
||||
#[arg(short, long, default_value = "irc.libera.chat")]
|
||||
/// IRC server.
|
||||
pub(crate) server: Option<String>,
|
||||
|
||||
#[arg(short, long, default_value = "6697")]
|
||||
/// Port of the IRC server.
|
||||
pub(crate) port: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// IRC Nickname.
|
||||
pub(crate) nickname: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// IRC Nick Password
|
||||
pub(crate) nick_password: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// IRC Username
|
||||
pub(crate) username: Option<String>,
|
||||
|
||||
#[arg(long)]
|
||||
/// Whether or not to use TLS when connecting to the IRC server.
|
||||
pub(crate) use_tls: Option<bool>,
|
||||
}
|
||||
|
||||
pub(crate) struct Setup {
|
||||
pub(crate) config: Config,
|
||||
}
|
||||
|
||||
#[instrument]
|
||||
pub async fn init() -> Result<Setup> {
|
||||
// Get arguments. These overrule configuration file, and environment
|
||||
// variables if applicable.
|
||||
let args = Args::parse();
|
||||
|
||||
// Use default config location unless specified.
|
||||
let config_location: PathBuf = if let Some(ref path) = args.config_file {
|
||||
path.to_owned()
|
||||
} else {
|
||||
ProjectDirs::from("", "", env!("CARGO_PKG_NAME"))
|
||||
.unwrap()
|
||||
.config_dir()
|
||||
.to_owned()
|
||||
.join(r"config.toml")
|
||||
};
|
||||
|
||||
info!("Starting.");
|
||||
|
||||
let settings = Config::builder()
|
||||
.add_source(config::File::with_name(&config_location.to_string_lossy()).required(false))
|
||||
.add_source(config::Environment::with_prefix("BOT"))
|
||||
// Doing all of these overrides provides a unified access point for options,
|
||||
// but a derive macro could do this a bit better if this becomes too large.
|
||||
.set_override_option("api-key", args.api_key.clone())?
|
||||
.set_override_option("base-url", args.base_url.clone())?
|
||||
.set_override_option("model", args.model.clone())?
|
||||
.set_override_option("instruct", args.model.clone())?
|
||||
.set_override_option("channels", args.channels.clone())?
|
||||
.set_override_option("server", args.server.clone())?
|
||||
.set_override_option("port", args.port.clone())? // FIXME: Make this a default here not in clap.
|
||||
.set_override_option("nickname", args.nickname.clone())?
|
||||
.set_override_option("username", args.username.clone())?
|
||||
.set_override_option("use_tls", args.use_tls)?
|
||||
.build()
|
||||
.wrap_err("Couldn't read configuration settings.")?;
|
||||
|
||||
Ok(Setup { config: settings })
|
||||
}
|
||||
Reference in New Issue
Block a user