Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitd9333f2

Browse files
authored
add a set of canonical urls for renamed docs/blogs (#1280)
1 parent77a181e commitd9333f2

File tree

24 files changed

+362
-596
lines changed

24 files changed

+362
-596
lines changed

‎pgml-dashboard/build.rs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -4,10 +4,7 @@ use std::process::Command;
44
fnmain(){
55
println!("cargo:rerun-if-changed=migrations");
66

7-
let output =Command::new("git")
8-
.args(["rev-parse","HEAD"])
9-
.output()
10-
.unwrap();
7+
let output =Command::new("git").args(["rev-parse","HEAD"]).output().unwrap();
118
let git_hash =String::from_utf8(output.stdout).unwrap();
129
println!("cargo:rustc-env=GIT_SHA={}", git_hash);
1310

@@ -28,8 +25,7 @@ fn main() {
2825
}
2926
}
3027

31-
let css_version =
32-
read_to_string("static/css/.pgml-bundle").expect("failed to read .pgml-bundle");
28+
let css_version =read_to_string("static/css/.pgml-bundle").expect("failed to read .pgml-bundle");
3329
let css_version = css_version.trim();
3430

3531
let js_version =read_to_string("static/js/.pgml-bundle").expect("failed to read .pgml-bundle");

‎pgml-dashboard/rustfmt.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
max_width=120

‎pgml-dashboard/src/api/chatbot.rs

Lines changed: 68 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -46,9 +46,9 @@ impl ChatRole {
4646
matchself{
4747
ChatRole::User =>"user",
4848
ChatRole::Bot =>match brain{
49-
ChatbotBrain::OpenAIGPT4
50-
|ChatbotBrain::TekniumOpenHermes25Mistral7B
51-
|ChatbotBrain::Starling7b =>"assistant",
49+
ChatbotBrain::OpenAIGPT4 |ChatbotBrain::TekniumOpenHermes25Mistral7B |ChatbotBrain::Starling7b =>{
50+
"assistant"
51+
}
5252
ChatbotBrain::GrypheMythoMaxL213b =>"model",
5353
},
5454
ChatRole::System =>"system",
@@ -69,11 +69,7 @@ impl ChatbotBrain {
6969
!matches!(self,Self::OpenAIGPT4)
7070
}
7171

72-
fnget_system_message(
73-
&self,
74-
knowledge_base:&KnowledgeBase,
75-
context:&str,
76-
) -> anyhow::Result<serde_json::Value>{
72+
fnget_system_message(&self,knowledge_base:&KnowledgeBase,context:&str) -> anyhow::Result<serde_json::Value>{
7773
matchself{
7874
Self::OpenAIGPT4 =>{
7975
let system_prompt = std::env::var("CHATBOT_CHATGPT_SYSTEM_PROMPT")?;
@@ -242,10 +238,7 @@ impl Document {
242238
.take(32)
243239
.map(char::from)
244240
.collect();
245-
let timestamp =SystemTime::now()
246-
.duration_since(UNIX_EPOCH)
247-
.unwrap()
248-
.as_millis();
241+
let timestamp =SystemTime::now().duration_since(UNIX_EPOCH).unwrap().as_millis();
249242
Document{
250243
id,
251244
text: text.to_string(),
@@ -275,9 +268,7 @@ async fn get_openai_chatgpt_answer<M: Serialize>(messages: M) -> anyhow::Result<
275268
.json::<serde_json::Value>()
276269
.await?;
277270

278-
let response = response["choices"]
279-
.as_array()
280-
.context("No data returned from OpenAI")?[0]["message"]["content"]
271+
let response = response["choices"].as_array().context("No data returned from OpenAI")?[0]["message"]["content"]
281272
.as_str()
282273
.context("The reponse content from OpenAI was not a string")?
283274
.to_string();
@@ -449,12 +440,11 @@ async fn do_chatbot_get_history(user: &User, limit: usize) -> anyhow::Result<Vec
449440
.as_str()
450441
.context("Error parsing text")?
451442
.to_string();
452-
let model:ChatbotBrain = serde_json::from_value(m["document"]["model"].to_owned())
453-
.context("Error parsing model")?;
443+
let model:ChatbotBrain =
444+
serde_json::from_value(m["document"]["model"].to_owned()).context("Error parsing model")?;
454445
let model:&str = model.into();
455-
let knowledge_base:KnowledgeBase =
456-
serde_json::from_value(m["document"]["knowledge_base"].to_owned())
457-
.context("Error parsing knowledge_base")?;
446+
let knowledge_base:KnowledgeBase = serde_json::from_value(m["document"]["knowledge_base"].to_owned())
447+
.context("Error parsing knowledge_base")?;
458448
let knowledge_base:&str = knowledge_base.into();
459449
Ok(HistoryMessage{
460450
side,
@@ -538,17 +528,24 @@ async fn process_message(
538528
Some(std::env::var("CHATBOT_DATABASE_URL").expect("CHATBOT_DATABASE_URL not set")),
539529
);
540530
let context = collection
541-
.query()
542-
.vector_recall(&data.question,&pipeline,Some(json!({
543-
"instruction":"Represent the Wikipedia question for retrieving supporting documents: "
544-
}).into()))
545-
.limit(5)
546-
.fetch_all()
547-
.await?
548-
.into_iter()
549-
.map(|(_, context, metadata)|format!("\n\n#### Document {}:\n{}\n\n", metadata["id"], context))
550-
.collect::<Vec<String>>()
551-
.join("\n");
531+
.query()
532+
.vector_recall(
533+
&data.question,
534+
&pipeline,
535+
Some(
536+
json!({
537+
"instruction":"Represent the Wikipedia question for retrieving supporting documents: "
538+
})
539+
.into(),
540+
),
541+
)
542+
.limit(5)
543+
.fetch_all()
544+
.await?
545+
.into_iter()
546+
.map(|(_, context, metadata)|format!("\n\n#### Document {}:\n{}\n\n", metadata["id"], context))
547+
.collect::<Vec<String>>()
548+
.join("\n");
552549

553550
let history_collection =Collection::new(
554551
"ChatHistory",
@@ -590,49 +587,47 @@ async fn process_message(
590587
.await?;
591588
messages.reverse();
592589

593-
let(mut history, _) =
594-
messages
595-
.into_iter()
596-
.fold((Vec::new(),None), |(mut new_history, role), value|{
597-
let current_role:ChatRole =
598-
serde_json::from_value(value["document"]["role"].to_owned())
599-
.expect("Error parsing chat role");
600-
ifletSome(role) = role{
601-
if role == current_role{
602-
match role{
603-
ChatRole::User => new_history.push(
604-
serde_json::json!({
605-
"role":ChatRole::Bot.to_model_specific_role(&brain),
606-
"content":"*no response due to error*"
607-
})
608-
.into(),
609-
),
610-
ChatRole::Bot => new_history.push(
611-
serde_json::json!({
612-
"role":ChatRole::User.to_model_specific_role(&brain),
613-
"content":"*no response due to error*"
614-
})
615-
.into(),
616-
),
617-
_ =>panic!("Too many system messages"),
618-
}
590+
let(mut history, _) = messages
591+
.into_iter()
592+
.fold((Vec::new(),None), |(mut new_history, role), value|{
593+
let current_role:ChatRole =
594+
serde_json::from_value(value["document"]["role"].to_owned()).expect("Error parsing chat role");
595+
ifletSome(role) = role{
596+
if role == current_role{
597+
match role{
598+
ChatRole::User => new_history.push(
599+
serde_json::json!({
600+
"role":ChatRole::Bot.to_model_specific_role(&brain),
601+
"content":"*no response due to error*"
602+
})
603+
.into(),
604+
),
605+
ChatRole::Bot => new_history.push(
606+
serde_json::json!({
607+
"role":ChatRole::User.to_model_specific_role(&brain),
608+
"content":"*no response due to error*"
609+
})
610+
.into(),
611+
),
612+
_ =>panic!("Too many system messages"),
619613
}
620-
let new_message: pgml::types::Json = serde_json::json!({
621-
"role": current_role.to_model_specific_role(&brain),
622-
"content": value["document"]["text"]
623-
})
624-
.into();
625-
new_history.push(new_message);
626-
}elseifmatches!(current_role,ChatRole::User){
627-
let new_message: pgml::types::Json = serde_json::json!({
628-
"role": current_role.to_model_specific_role(&brain),
629-
"content": value["document"]["text"]
630-
})
631-
.into();
632-
new_history.push(new_message);
633614
}
634-
(new_history,Some(current_role))
635-
});
615+
let new_message: pgml::types::Json = serde_json::json!({
616+
"role": current_role.to_model_specific_role(&brain),
617+
"content": value["document"]["text"]
618+
})
619+
.into();
620+
new_history.push(new_message);
621+
}elseifmatches!(current_role,ChatRole::User){
622+
let new_message: pgml::types::Json = serde_json::json!({
623+
"role": current_role.to_model_specific_role(&brain),
624+
"content": value["document"]["text"]
625+
})
626+
.into();
627+
new_history.push(new_message);
628+
}
629+
(new_history,Some(current_role))
630+
});
636631

637632
let system_message = brain.get_system_message(&knowledge_base,&context)?;
638633
history.insert(0, system_message.into());
@@ -657,8 +652,7 @@ async fn process_message(
657652
.into(),
658653
);
659654

660-
let update_history =
661-
UpdateHistory::new(history_collection, user_document, brain, knowledge_base);
655+
let update_history =UpdateHistory::new(history_collection, user_document, brain, knowledge_base);
662656

663657
if brain.is_open_source(){
664658
let op =OpenSourceAI::new(Some(

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp