Skip to content

Commit

Permalink
feat: support to fetch redis official blogs and use chatgpt to summar…
Browse files Browse the repository at this point in the history
…ize the content.
  • Loading branch information
wangjiahan committed May 16, 2024
1 parent 2df3015 commit 04af756
Show file tree
Hide file tree
Showing 5 changed files with 132 additions and 60 deletions.
9 changes: 8 additions & 1 deletion config.template.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
openai_api_key: "xxx"
redis:
username: "user"
password: "password123"
Expand All @@ -8,4 +9,10 @@ go_weekly:
- "http://example.com/webhook1"
- "http://example.com/webhook2"
cron_expression: "0 30 10 * * *"
once_post_limit: 5
once_post_limit: 5
redis_official_blog:
webhooks:
- "http://example.com/webhook1"
- "http://example.com/webhook2"
cron_expression: "0 30 10 * * *"
once_post_limit: 1
49 changes: 31 additions & 18 deletions src/chatgpt.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
use anyhow::anyhow;
use serde::{Deserialize, Serialize};
use tracing::error;

#[derive(Debug, Serialize)]
pub struct Req {
Expand All @@ -7,6 +9,7 @@ pub struct Req {
}

#[derive(Debug, Deserialize, Default)]
#[serde(default)]
pub struct Resp {
pub id: String,
pub object: String,
Expand Down Expand Up @@ -39,29 +42,39 @@ impl Req {
}
}

pub async fn send_request(req: &Req, key: impl Into<String>) -> Result<Resp, anyhow::Error> {
pub async fn send_request(req: Req, key: impl Into<String>) -> Result<Resp, anyhow::Error> {
let client = reqwest::Client::new();
let resp: Resp = client
let resp = client
.post("https://api.openai.com/v1/chat/completions")
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", key.into()))
.json(req)
.json(&req)
.send()
.await?
.json()
.await?;
Ok(resp)
}

// #[cfg(test)]
// mod tests {

// use super::*;
if resp.status().is_success() {
let resp: Resp = resp.json().await?;
Ok(resp)
} else {
let status = resp.status();
let error_text = resp
.text()
.await
.unwrap_or_else(|_| "Failed to read response body".to_string());
error!(
"Request failed with status: {} and body: {}",
status, error_text
);
Err(anyhow!("{}: {}", status, error_text))
}
}

// #[tokio::test]
// async fn test_send_request() -> anyhow::Result<()> {
// let resp = send_request(&Req::new("gpt-3.5-turbo", "什么是Rust?"), "xxx").await?;
// println!("{:?}", resp.choices[0].message.content);
// Ok(())
// }
// }
pub fn build_req_content(content: &str) -> String {
let mut res = String::with_capacity(content.len() + 128);
res.push_str("这事一篇文章的详细内容:\n");
res.push_str(content);
res.push('\n');
res.push_str("请你使用中文对文章进行总结概括,不要超过150个字。\n");
res.push_str("如果文中有列出参考链接的话,也请你整理并放置在回复的最下面。");
res
}
11 changes: 11 additions & 0 deletions src/conf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@ use serde::Deserialize;

#[derive(Debug, Deserialize, PartialEq)]
pub struct Conf {
pub openai_api_key: String,
pub redis: RedisConf,
pub go_weekly: ArticleSourceConfig,
pub redis_official_blog: ArticleSourceConfig,
}

#[derive(Debug, Clone, Deserialize, PartialEq)]
Expand Down Expand Up @@ -52,6 +54,7 @@ mod tests {
assert_eq!(
conf,
Conf {
openai_api_key: "xxx".to_string(),
redis: RedisConf {
username: "user".to_string(),
password: "password123".to_string(),
Expand All @@ -65,6 +68,14 @@ mod tests {
"http://example.com/webhook2".to_string()
],
once_post_limit: 5,
},
redis_official_blog: ArticleSourceConfig {
cron_expression: "0 30 10 * * *".to_string(),
webhooks: vec![
"http://example.com/webhook1".to_string(),
"http://example.com/webhook2".to_string()
],
once_post_limit: 1,
}
}
)
Expand Down
16 changes: 15 additions & 1 deletion src/cron_task.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::time::Duration;
use job_scheduler::{Job, JobScheduler};
use tokio::runtime::Runtime;

use crate::{conf::Conf, go_weekly, redis_base::Redis};
use crate::{conf::Conf, go_weekly, redis_base::Redis, redis_blog};

pub fn run_every_10_30pm(redis: &Redis, conf: &Conf) {
let mut sched = JobScheduler::new();
Expand All @@ -20,6 +20,20 @@ pub fn run_every_10_30pm(redis: &Redis, conf: &Conf) {
},
));

let redis_official_blog_conf = &conf.redis_official_blog;
sched.add(Job::new(
redis_official_blog_conf.cron_expression.parse().unwrap(),
|| {
let rt = Runtime::new().unwrap();
let _ = rt.block_on(redis_blog::send_feishu_msg(
redis,
redis_official_blog_conf.webhooks.clone(),
redis_official_blog_conf.once_post_limit,
Some(&conf.openai_api_key),
));
},
));

loop {
sched.tick();
std::thread::sleep(Duration::from_millis(500)); // 短暂休眠以减少CPU使用率
Expand Down
107 changes: 67 additions & 40 deletions src/redis_blog.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
use std::{thread, time::Duration};

use serde_json::json;
use tracing::{error, info};

use crate::{
chatgpt::{self, build_req_content, Req},
feishu_bot,
redis_base::{self, Redis},
rss::{resolve_xml_data, send_request, Rss, DEFAULT_ONCE_POST_LIMIT},
Expand All @@ -11,7 +14,7 @@ use crate::{
const REDIS_BLOG_RSS_URL: &str = "https://redis.io/blog/feed/";

#[derive(Debug)]
struct Article {
pub struct Article {
pub url: String,
pub title: String,
pub description: String,
Expand All @@ -25,50 +28,48 @@ pub async fn send_feishu_msg(
redis: &redis_base::Redis,
webhooks: Vec<String>,
once_post_limit: u8,
openai_api_key: Option<&str>,
) -> anyhow::Result<()> {
info!("start fetching redis official blogs");
let (_, articles) = get_rss_articles(Some(redis), once_post_limit).await?;
let client = reqwest::Client::new();
for article in articles {
for webhook in &webhooks {
let res: feishu_bot::SendMessageResp = client
.post(webhook)
.json(&json!({
"msg_type": "interactive",
"card": {
"elements": [
{
"tag": "markdown",
"content": format!("{}\n\npublish date: {}", article.description, article.date)
thread::sleep(Duration::from_secs(3));
let content = build_feishu_content(openai_api_key, &article).await;
let req = &json!({
"msg_type": "interactive",
"card": {
"elements": [
{
"tag": "markdown",
"content": content,
},
{
"actions": [{
"tag": "button",
"text": {
"content": "origin link",
"tag": "lark_md"
},
{
"actions": [{
"tag": "button",
"text": {
"content": "origin link",
"tag": "lark_md"
},
"url": format!("{}", article.url),
"type": "default",
"value": {}
}],
"tag": "action"
}
],
"header": {
"title": {
"content": format!("{} ({})", article.title, article.author),
"tag": "plain_text"
},
"template": "red",
}
}
}))
.send()
.await?
.json()
.await?;

"url": format!("{}", article.url),
"type": "default",
"value": {}
}],
"tag": "action"
}
],
"header": {
"title": {
"content": format!("{} ({}) \n -- {}", article.title, article.author, article.date),
"tag": "plain_text"
},
"template": "red",
}
}
});
for webhook in &webhooks {
let res: feishu_bot::SendMessageResp =
client.post(webhook).json(req).send().await?.json().await?;
if res.code != 0 {
error!(
"send redis official blogs to feishu failed, code: {}, msg: {}",
Expand All @@ -81,7 +82,7 @@ pub async fn send_feishu_msg(
Ok(())
}

async fn get_rss_articles(
pub async fn get_rss_articles(
redis: Option<&redis_base::Redis>,
mut once_post_limit: u8,
) -> anyhow::Result<(Rss, Vec<Article>)> {
Expand Down Expand Up @@ -123,6 +124,32 @@ async fn get_rss_articles(
Ok((rss, articles))
}

async fn build_feishu_content(openai_api_key: Option<&str>, article: &Article) -> String {
if openai_api_key.is_none() {
return article.description.to_string();
}

let openai_api_key = openai_api_key.unwrap();
let mut res = String::with_capacity(4096);
res.push_str(&article.description);
res.push_str("\n---\n");
res.push_str("\n**以下内容为 OpenAI 生成,仅供参考:**\n\n");
let req = Req::new("gpt-4o", build_req_content(&article.content));
let resp = chatgpt::send_request(req, openai_api_key).await;
match resp {
Err(e) => res.push_str(e.to_string().as_str()),
Ok(v) => {
if v.choices.is_empty() {
res.push_str(format!("{:#?}", v).as_str())
} else {
res.push_str(&v.choices[0].message.content);
}
}
}
res.push_str("\n---\n");
res.to_string()
}

#[cfg(test)]
mod tests {

Expand Down

0 comments on commit 04af756

Please sign in to comment.