langchain rust
v4.6.0
⚡通过合并性与LLM构建应用程序,生锈! ⚡
这是Langchain的生锈语言实施。
LLMS
嵌入
向量商店
链
代理商
工具
语义路由
文档加载程序
use futures_util :: StreamExt ;
async fn main ( ) {
let path = "./src/document_loaders/test_data/sample.pdf" ;
let loader = PdfExtractLoader :: from_path ( path ) . expect ( "Failed to create PdfExtractLoader" ) ;
// let loader = LoPdfLoader::from_path(path).expect("Failed to create LoPdfLoader");
let docs = loader
. load ( )
. await
. unwrap ( )
. map ( |d| d . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} 潘多克
use futures_util :: StreamExt ;
async fn main ( ) {
let path = "./src/document_loaders/test_data/sample.docx" ;
let loader = PandocLoader :: from_path ( InputFormat :: Docx . to_string ( ) , path )
. await
. expect ( "Failed to create PandocLoader" ) ;
let docs = loader
. load ( )
. await
. unwrap ( )
. map ( |d| d . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} html
use futures_util :: StreamExt ;
use url :: Url ;
async fn main ( ) {
let path = "./src/document_loaders/test_data/example.html" ;
let html_loader = HtmlLoader :: from_path ( path , Url :: parse ( "https://example.com/" ) . unwrap ( ) )
. expect ( "Failed to create html loader" ) ;
let documents = html_loader
. load ( )
. await
. unwrap ( )
. map ( |x| x . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} html到降价
use futures_util :: StreamExt ;
use url :: Url ;
async fn main ( ) {
let path = "./src/document_loaders/test_data/example.html" ;
let html_to_markdown_loader = HtmlToMarkdownLoader :: from_path ( path , Url :: parse ( "https://example.com/" ) . unwrap ( ) , HtmlToMarkdownOptions :: default ( ) . with_skip_tags ( vec ! [ "figure" .to_string ( ) ] ) )
. expect ( "Failed to create html to markdown loader" ) ;
let documents = html_to_markdown_loader
. load ( )
. await
. unwrap ( )
. map ( |x| x . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} CSV
use futures_util :: StreamExt ;
async fn main ( ) {
let path = "./src/document_loaders/test_data/test.csv" ;
let columns = vec ! [
"name" .to_string ( ) ,
"age" .to_string ( ) ,
"city" .to_string ( ) ,
"country" .to_string ( ) ,
] ;
let csv_loader = CsvLoader :: from_path ( path , columns ) . expect ( "Failed to create csv loader" ) ;
let documents = csv_loader
. load ( )
. await
. unwrap ( )
. map ( |x| x . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} git提交
use futures_util :: StreamExt ;
async fn main ( ) {
let path = "/path/to/git/repo" ;
let git_commit_loader = GitCommitLoader :: from_path ( path ) . expect ( "Failed to create git commit loader" ) ;
let documents = csv_loader
. load ( )
. await
. unwrap ( )
. map ( |x| x . unwrap ( ) )
. collect :: < Vec < _ > > ( )
. await ;
} 源代码
let loader_with_dir =
SourceCodeLoader :: from_path ( "./src/document_loaders/test_data" . to_string ( ) )
. with_dir_loader_options ( DirLoaderOptions {
glob : None ,
suffixes : Some ( vec ! [ "rs" .to_string ( ) ] ) ,
exclude : None ,
} ) ;
let stream = loader_with_dir . load ( ) . await . unwrap ( ) ;
let documents = stream . map ( |x| x . unwrap ( ) ) . collect :: < Vec < _ > > ( ) . await ; 该库在很大程度上依赖serde_json操作。
serde_json首先,确保将serde_json添加到您的Rust Project中。
cargo add serde_jsonlangchain-rust然后,您可以在Rust Project中添加langchain-rust 。
cargo add langchain-rust从https://github.com/asg017/sqlite-vss下载其他sqlite_vss库
cargo add langchain-rust --features sqlite-vss从https://github.com/asg017/sqlite-vec下载其他sqlite_vec库
cargo add langchain-rust --features sqlite-veccargo add langchain-rust --features postgrescargo add langchain-rust --features surrealdbcargo add langchain-rust --features qdrant请记住,根据您的特定用例,请记住要替换功能标志sqlite , postgres或surrealdb 。
这将添加serde_json和langchain-rust作为您的Cargo.toml文件中的依赖项。现在,当您构建项目时,两个依赖项将被获取和编译,并可以在您的项目中使用。
请记住, serde_json是必要的依赖项,而sqlite , postgres和surrealdb是可以根据项目需求添加的可选功能。
use langchain_rust :: {
chain :: { Chain , LLMChainBuilder } ,
fmt_message , fmt_placeholder , fmt_template ,
language_models :: llm :: LLM ,
llm :: openai :: { OpenAI , OpenAIModel } ,
message_formatter ,
prompt :: HumanMessagePromptTemplate ,
prompt_args ,
schemas :: messages :: Message ,
template_fstring ,
} ;
# [ tokio :: main ]
async fn main ( ) {
//We can then initialize the model:
// If you'd prefer not to set an environment variable you can pass the key in directly via the `openai_api_key` named parameter when initiating the OpenAI LLM class:
// let open_ai = OpenAI::default()
// .with_config(
// OpenAIConfig::default()
// .with_api_key("<your_key>"),
// ).with_model(OpenAIModel::Gpt4oMini.to_string());
let open_ai = OpenAI :: default ( ) . with_model ( OpenAIModel :: Gpt4oMini . to_string ( ) ) ;
//Once you've installed and initialized the LLM of your choice, we can try using it! Let's ask it what LangSmith is - this is something that wasn't present in the training data so it shouldn't have a very good response.
let resp = open_ai . invoke ( "What is rust" ) . await . unwrap ( ) ;
println ! ( "{}" , resp ) ;
// We can also guide it's response with a prompt template. Prompt templates are used to convert raw user input to a better input to the LLM.
let prompt = message_formatter ! [
fmt_message! ( Message ::new_system_message (
"You are world class technical documentation writer."
) ) ,
fmt_template! ( HumanMessagePromptTemplate ::new ( template_fstring! (
"{input}" , "input"
) ) )
] ;
//We can now combine these into a simple LLM chain:
let chain = LLMChainBuilder :: new ( )
. prompt ( prompt )
. llm ( open_ai . clone ( ) )
. build ( )
. unwrap ( ) ;
//We can now invoke it and ask the same question. It still won't know the answer, but it should respond in a more proper tone for a technical writer!
match chain
. invoke ( prompt_args ! {
"input" => "Quien es el escritor de 20000 millas de viaje submarino" ,
} )
. await
{
Ok ( result ) => {
println ! ( "Result: {:?}" , result ) ;
}
Err ( e ) => panic ! ( "Error invoking LLMChain: {:?}" , e ) ,
}
//If you want to prompt to have a list of messages you could use the `fmt_placeholder` macro
let prompt = message_formatter ! [
fmt_message! ( Message ::new_system_message (
"You are world class technical documentation writer."
) ) ,
fmt_placeholder! ( "history" ) ,
fmt_template! ( HumanMessagePromptTemplate ::new ( template_fstring! (
"{input}" , "input"
) ) ) ,
] ;
let chain = LLMChainBuilder :: new ( )
. prompt ( prompt )
. llm ( open_ai )
. build ( )
. unwrap ( ) ;
match chain
. invoke ( prompt_args ! {
"input" => "Who is the writer of 20,000 Leagues Under the Sea, and what is my name?" ,
"history" => vec! [
Message ::new_human_message ( "My name is: luis" ) ,
Message ::new_ai_message ( "Hi luis" ) ,
] ,
} )
. await
{
Ok ( result ) => {
println ! ( "Result: {:?}" , result ) ;
}
Err ( e ) => panic ! ( "Error invoking LLMChain: {:?}" , e ) ,
}
}