chore: 修改项目路径结构,使用 workspace 组织包 (#118)

This commit is contained in:
ᴀᴍᴛᴏᴀᴇʀ
2024-06-07 10:56:53 -07:00
committed by GitHub
parent c4db12b154
commit 1744f8647b
44 changed files with 181 additions and 4753 deletions

View File

@@ -0,0 +1,317 @@
use anyhow::{anyhow, bail, Result};
use serde::{Deserialize, Serialize};
use crate::bilibili::error::BiliError;
pub struct PageAnalyzer {
info: serde_json::Value,
}
#[derive(Debug, strum::FromRepr, PartialEq, PartialOrd, Serialize, Deserialize)]
pub enum VideoQuality {
Quality360p = 16,
Quality480p = 32,
Quality720p = 64,
Quality1080p = 80,
Quality1080pPLUS = 112,
Quality1080p60 = 116,
Quality4k = 120,
QualityHdr = 125,
QualityDolby = 126,
Quality8k = 127,
}
#[derive(Debug, strum::FromRepr, PartialEq, PartialOrd, Serialize, Deserialize)]
pub enum AudioQuality {
Quality64k = 30216,
Quality132k = 30232,
QualityDolby = 30250,
QualityHiRES = 30251,
Quality192k = 30280,
}
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, strum::EnumString, strum::Display, PartialEq, PartialOrd, Serialize, Deserialize)]
pub enum VideoCodecs {
#[strum(serialize = "hev")]
HEV,
#[strum(serialize = "avc")]
AVC,
#[strum(serialize = "av01")]
AV1,
}
// 视频流的筛选偏好
#[derive(Serialize, Deserialize)]
pub struct FilterOption {
pub video_max_quality: VideoQuality,
pub video_min_quality: VideoQuality,
pub audio_max_quality: AudioQuality,
pub audio_min_quality: AudioQuality,
pub codecs: Vec<VideoCodecs>,
pub no_dolby_video: bool,
pub no_dolby_audio: bool,
pub no_hdr: bool,
pub no_hires: bool,
}
impl Default for FilterOption {
fn default() -> Self {
Self {
video_max_quality: VideoQuality::Quality8k,
video_min_quality: VideoQuality::Quality360p,
audio_max_quality: AudioQuality::QualityHiRES,
audio_min_quality: AudioQuality::Quality64k,
codecs: vec![VideoCodecs::AV1, VideoCodecs::HEV, VideoCodecs::AVC],
no_dolby_video: false,
no_dolby_audio: false,
no_hdr: false,
no_hires: false,
}
}
}
// 上游项目中的五种流类型,不过目测应该只有 Flv、DashVideo、DashAudio 三种会被用到
#[derive(Debug, PartialEq, PartialOrd)]
pub enum Stream {
Flv(String),
Html5Mp4(String),
EpositeTryMp4(String),
DashVideo {
url: String,
quality: VideoQuality,
codecs: VideoCodecs,
},
DashAudio {
url: String,
quality: AudioQuality,
},
}
// 通用的获取流链接的方法,交由 Downloader 使用
impl Stream {
pub fn url(&self) -> &str {
match self {
Self::Flv(url) => url,
Self::Html5Mp4(url) => url,
Self::EpositeTryMp4(url) => url,
Self::DashVideo { url, .. } => url,
Self::DashAudio { url, .. } => url,
}
}
}
/// 用于获取视频流的最佳筛选结果,有两种可能:
/// 1. 单个混合流,作为 Mixed 返回
/// 2. 视频、音频分离,作为 VideoAudio 返回,其中音频流可能不存在(对于无声视频,如 BV1J7411H7KQ
#[derive(Debug)]
pub enum BestStream {
VideoAudio { video: Stream, audio: Option<Stream> },
Mixed(Stream),
}
impl PageAnalyzer {
pub fn new(info: serde_json::Value) -> Self {
Self { info }
}
fn is_flv_stream(&self) -> bool {
self.info.get("durl").is_some()
&& self.info["format"].is_string()
&& self.info["format"].as_str().unwrap().starts_with("flv")
}
fn is_html5_mp4_stream(&self) -> bool {
self.info.get("durl").is_some()
&& self.info["format"].is_string()
&& self.info["format"].as_str().unwrap().starts_with("mp4")
&& self.info["is_html5"].is_boolean()
&& self.info["is_html5"].as_bool().unwrap()
}
fn is_episode_try_mp4_stream(&self) -> bool {
self.info.get("durl").is_some()
&& self.info["format"].is_string()
&& self.info["format"].as_str().unwrap().starts_with("mp4")
&& !(self.info["is_html5"].is_boolean() && self.info["is_html5"].as_bool().unwrap())
}
fn streams(&mut self, filter_option: &FilterOption) -> Result<Vec<Stream>> {
if self.is_flv_stream() {
return Ok(vec![Stream::Flv(
self.info["durl"][0]["url"]
.as_str()
.ok_or(anyhow!("invalid flv stream"))?
.to_string(),
)]);
}
if self.is_html5_mp4_stream() {
return Ok(vec![Stream::Html5Mp4(
self.info["durl"][0]["url"]
.as_str()
.ok_or(anyhow!("invalid html5 mp4 stream"))?
.to_string(),
)]);
}
if self.is_episode_try_mp4_stream() {
return Ok(vec![Stream::EpositeTryMp4(
self.info["durl"][0]["url"]
.as_str()
.ok_or(anyhow!("invalid episode try mp4 stream"))?
.to_string(),
)]);
}
let mut streams: Vec<Stream> = Vec::new();
let videos_data = self.info["dash"]["video"].take();
let audios_data = self.info["dash"]["audio"].take();
let flac_data = self.info["dash"]["flac"].take();
let dolby_data = self.info["dash"]["dolby"].take();
for video_data in videos_data.as_array().ok_or(BiliError::RiskControlOccurred)?.iter() {
let video_stream_url = video_data["baseUrl"].as_str().unwrap().to_string();
let video_stream_quality = VideoQuality::from_repr(video_data["id"].as_u64().unwrap() as usize)
.ok_or(anyhow!("invalid video stream quality"))?;
if (video_stream_quality == VideoQuality::QualityHdr && filter_option.no_hdr)
|| (video_stream_quality == VideoQuality::QualityDolby && filter_option.no_dolby_video)
|| (video_stream_quality != VideoQuality::QualityDolby
&& video_stream_quality != VideoQuality::QualityHdr
&& (video_stream_quality < filter_option.video_min_quality
|| video_stream_quality > filter_option.video_max_quality))
// 此处过滤包含三种情况:
// 1. HDR 视频,但指定不需要 HDR
// 2. 杜比视界视频,但指定不需要杜比视界
// 3. 视频质量不在指定范围内
{
continue;
}
let video_codecs = video_data["codecs"].as_str().unwrap();
// 从视频流的 codecs 字段中获取编码格式,此处并非精确匹配而是判断包含,比如 codecs 是 av1.42c01e,需要匹配为 av1
let video_codecs = vec![VideoCodecs::HEV, VideoCodecs::AVC, VideoCodecs::AV1]
.into_iter()
.find(|c| video_codecs.contains(c.to_string().as_str()));
let Some(video_codecs) = video_codecs else {
continue;
};
if !filter_option.codecs.contains(&video_codecs) {
continue;
}
streams.push(Stream::DashVideo {
url: video_stream_url,
quality: video_stream_quality,
codecs: video_codecs,
});
}
if audios_data.is_array() {
for audio_data in audios_data.as_array().unwrap().iter() {
let audio_stream_url = audio_data["baseUrl"].as_str().unwrap().to_string();
let audio_stream_quality = AudioQuality::from_repr(audio_data["id"].as_u64().unwrap() as usize);
let Some(audio_stream_quality) = audio_stream_quality else {
continue;
};
if audio_stream_quality > filter_option.audio_max_quality
|| audio_stream_quality < filter_option.audio_min_quality
{
continue;
}
streams.push(Stream::DashAudio {
url: audio_stream_url,
quality: audio_stream_quality,
});
}
}
if !(filter_option.no_hires || flac_data["audio"].is_null()) {
// 允许 hires 且存在 flac 音频流才会进来
let flac_stream_url = flac_data["audio"]["baseUrl"].as_str().unwrap().to_string();
let flac_stream_quality =
AudioQuality::from_repr(flac_data["audio"]["id"].as_u64().unwrap() as usize).unwrap();
streams.push(Stream::DashAudio {
url: flac_stream_url,
quality: flac_stream_quality,
});
}
if !(filter_option.no_dolby_audio || dolby_data["audio"].is_null()) {
// 同理,允许杜比音频且存在杜比音频流才会进来
let dolby_stream_data = dolby_data["audio"].as_array().and_then(|v| v.first());
if dolby_stream_data.is_some() {
let dolby_stream_data = dolby_stream_data.unwrap();
let dolby_stream_url = dolby_stream_data["baseUrl"].as_str().unwrap().to_string();
let dolby_stream_quality =
AudioQuality::from_repr(dolby_stream_data["id"].as_u64().unwrap() as usize).unwrap();
streams.push(Stream::DashAudio {
url: dolby_stream_url,
quality: dolby_stream_quality,
});
}
}
Ok(streams)
}
pub fn best_stream(&mut self, filter_option: &FilterOption) -> Result<BestStream> {
let streams = self.streams(filter_option)?;
if self.is_flv_stream() || self.is_html5_mp4_stream() || self.is_episode_try_mp4_stream() {
// 按照 streams 中的假设,符合这三种情况的流只有一个,直接取
return Ok(BestStream::Mixed(streams.into_iter().next().unwrap()));
}
// 将视频流和音频流拆分,分别做排序
let (mut video_streams, mut audio_streams): (Vec<_>, Vec<_>) =
streams.into_iter().partition(|s| matches!(s, Stream::DashVideo { .. }));
// 因为该处的排序与筛选选项有关,因此不能在外面实现 PartialOrd trait只能在这里写闭包
video_streams.sort_by(|a, b| match (a, b) {
(
Stream::DashVideo {
quality: a_quality,
codecs: a_codecs,
..
},
Stream::DashVideo {
quality: b_quality,
codecs: b_codecs,
..
},
) => {
if a_quality == &VideoQuality::QualityDolby && !filter_option.no_dolby_video {
return std::cmp::Ordering::Greater;
}
if b_quality == &VideoQuality::QualityDolby && !filter_option.no_dolby_video {
return std::cmp::Ordering::Less;
}
if a_quality == &VideoQuality::QualityHdr && !filter_option.no_hdr {
return std::cmp::Ordering::Greater;
}
if b_quality == &VideoQuality::QualityHdr && !filter_option.no_hdr {
return std::cmp::Ordering::Less;
}
if a_quality != b_quality {
return a_quality.partial_cmp(b_quality).unwrap();
}
// 如果视频质量相同,按照偏好的编码优先级排序
filter_option
.codecs
.iter()
.position(|c| c == b_codecs)
.cmp(&filter_option.codecs.iter().position(|c| c == a_codecs))
}
_ => unreachable!(),
});
audio_streams.sort_by(|a, b| match (a, b) {
(Stream::DashAudio { quality: a_quality, .. }, Stream::DashAudio { quality: b_quality, .. }) => {
if a_quality == &AudioQuality::QualityDolby && !filter_option.no_dolby_audio {
return std::cmp::Ordering::Greater;
}
if b_quality == &AudioQuality::QualityDolby && !filter_option.no_dolby_audio {
return std::cmp::Ordering::Less;
}
a_quality.partial_cmp(b_quality).unwrap()
}
_ => unreachable!(),
});
if video_streams.is_empty() {
bail!("no video stream found");
}
Ok(BestStream::VideoAudio {
video: video_streams.remove(video_streams.len() - 1),
// 音频流可能为空,因此直接使用 pop 返回 Option
audio: audio_streams.pop(),
})
}
}

View File

@@ -0,0 +1,97 @@
use std::sync::Arc;
use anyhow::{bail, Result};
use reqwest::{header, Method};
use crate::bilibili::Credential;
use crate::config::CONFIG;
// 一个对 reqwest::Client 的简单封装,用于 Bilibili 请求
#[derive(Clone)]
pub struct Client(reqwest::Client);
impl Client {
pub fn new() -> Self {
// 正常访问 api 所必须的 header作为默认 header 添加到每个请求中
let mut headers = header::HeaderMap::new();
headers.insert(
header::USER_AGENT,
header::HeaderValue::from_static(
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/123.0.0.0 Safari/537.36",
),
);
headers.insert(
header::REFERER,
header::HeaderValue::from_static("https://www.bilibili.com"),
);
Self(
reqwest::Client::builder()
.default_headers(headers)
.gzip(true)
.connect_timeout(std::time::Duration::from_secs(10))
.read_timeout(std::time::Duration::from_secs(10))
.build()
.unwrap(),
)
}
// a wrapper of reqwest::Client::request to add credential to the request
pub fn request(&self, method: Method, url: &str, credential: Option<&Credential>) -> reqwest::RequestBuilder {
let mut req = self.0.request(method, url);
// 如果有 credential会将其转换成 cookie 添加到请求的 header 中
if let Some(credential) = credential {
req = req
.header(header::COOKIE, format!("SESSDATA={}", credential.sessdata))
.header(header::COOKIE, format!("bili_jct={}", credential.bili_jct))
.header(header::COOKIE, format!("buvid3={}", credential.buvid3))
.header(header::COOKIE, format!("DedeUserID={}", credential.dedeuserid))
.header(header::COOKIE, format!("ac_time_value={}", credential.ac_time_value));
}
req
}
}
// clippy 建议实现 Default trait
impl Default for Client {
fn default() -> Self {
Self::new()
}
}
pub struct BiliClient {
pub client: Client,
}
impl BiliClient {
pub fn new() -> Self {
let client = Client::new();
Self { client }
}
pub fn request(&self, method: Method, url: &str) -> reqwest::RequestBuilder {
let credential = CONFIG.credential.load();
self.client.request(method, url, credential.as_deref())
}
pub async fn check_refresh(&self) -> Result<()> {
let credential = CONFIG.credential.load();
let Some(credential) = credential.as_deref() else {
return Ok(());
};
if !credential.need_refresh(&self.client).await? {
return Ok(());
}
let new_credential = credential.refresh(&self.client).await?;
CONFIG.credential.store(Some(Arc::new(new_credential)));
CONFIG.save()
}
/// 检查凭据是否已设置且有效
pub async fn is_login(&self) -> Result<()> {
let credential = CONFIG.credential.load();
let Some(credential) = credential.as_deref() else {
bail!("no credential found");
};
credential.is_login(&self.client).await
}
}

View File

@@ -0,0 +1,202 @@
use std::collections::HashSet;
use anyhow::{anyhow, bail, Result};
use cookie::Cookie;
use regex::Regex;
use reqwest::{header, Method};
use rsa::pkcs8::DecodePublicKey;
use rsa::sha2::Sha256;
use rsa::{Oaep, RsaPublicKey};
use serde::{Deserialize, Serialize};
use crate::bilibili::{Client, Validate};
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct Credential {
pub sessdata: String,
pub bili_jct: String,
pub buvid3: String,
pub dedeuserid: String,
pub ac_time_value: String,
}
impl Credential {
/// 检查凭据是否有效
pub async fn need_refresh(&self, client: &Client) -> Result<bool> {
let res = client
.request(
Method::GET,
"https://passport.bilibili.com/x/passport-login/web/cookie/info",
Some(self),
)
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
res["data"]["refresh"].as_bool().ok_or(anyhow!("check refresh failed"))
}
/// 需要使用一个需要鉴权的接口来检查是否登录
/// 此处使用查看用户状态数的接口,该接口返回内容少,请求成本低
pub async fn is_login(&self, client: &Client) -> Result<()> {
client
.request(
Method::GET,
"https://api.bilibili.com/x/web-interface/nav/stat",
Some(self),
)
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(())
}
pub async fn refresh(&self, client: &Client) -> Result<Self> {
let correspond_path = Self::get_correspond_path();
let csrf = self.get_refresh_csrf(client, correspond_path).await?;
let new_credential = self.get_new_credential(client, &csrf).await?;
self.confirm_refresh(client, &new_credential).await?;
Ok(new_credential)
}
fn get_correspond_path() -> String {
// 调用频率很低,让 key 在函数内部构造影响不大
let key = RsaPublicKey::from_public_key_pem(
"-----BEGIN PUBLIC KEY-----
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDLgd2OAkcGVtoE3ThUREbio0Eg
Uc/prcajMKXvkCKFCWhJYJcLkcM2DKKcSeFpD/j6Boy538YXnR6VhcuUJOhH2x71
nzPjfdTcqMz7djHum0qSZA0AyCBDABUqCrfNgCiJ00Ra7GmRj+YCK1NJEuewlb40
JNrRuoEUXpabUzGB8QIDAQAB
-----END PUBLIC KEY-----",
)
.unwrap();
let ts = chrono::Local::now().timestamp_millis();
let data = format!("refresh_{}", ts).into_bytes();
let mut rng = rand::rngs::OsRng;
let encrypted = key.encrypt(&mut rng, Oaep::new::<Sha256>(), &data).unwrap();
hex::encode(encrypted)
}
async fn get_refresh_csrf(&self, client: &Client, correspond_path: String) -> Result<String> {
let res = client
.request(
Method::GET,
format!("https://www.bilibili.com/correspond/1/{}", correspond_path).as_str(),
Some(self),
)
.header(header::COOKIE, "Domain=.bilibili.com")
.send()
.await?
.error_for_status()?;
regex_find(r#"<div id="1-name">(.+?)</div>"#, res.text().await?.as_str())
}
async fn get_new_credential(&self, client: &Client, csrf: &str) -> Result<Credential> {
let mut res = client
.request(
Method::POST,
"https://passport.bilibili.com/x/passport-login/web/cookie/refresh",
Some(self),
)
.header(header::COOKIE, "Domain=.bilibili.com")
.form(&[
// 这里不是 json而是 form data
("csrf", self.bili_jct.as_str()),
("refresh_csrf", csrf),
("refresh_token", self.ac_time_value.as_str()),
("source", "main_web"),
])
.send()
.await?
.error_for_status()?;
// 必须在 .json 前取出 headers否则 res 会被消耗
let headers = std::mem::take(res.headers_mut());
let res = res.json::<serde_json::Value>().await?.validate()?;
let set_cookies = headers.get_all(header::SET_COOKIE);
let mut credential = Self {
buvid3: self.buvid3.clone(),
..Self::default()
};
let required_cookies = HashSet::from(["SESSDATA", "bili_jct", "DedeUserID"]);
let cookies: Vec<Cookie> = set_cookies
.iter()
.filter_map(|x| x.to_str().ok())
.filter_map(|x| Cookie::parse(x).ok())
.filter(|x| required_cookies.contains(x.name()))
.collect();
if cookies.len() != required_cookies.len() {
bail!("not all required cookies found");
}
for cookie in cookies {
match cookie.name() {
"SESSDATA" => credential.sessdata = cookie.value().to_string(),
"bili_jct" => credential.bili_jct = cookie.value().to_string(),
"DedeUserID" => credential.dedeuserid = cookie.value().to_string(),
_ => unreachable!(),
}
}
if !res["data"]["refresh_token"].is_string() {
bail!("refresh_token not found");
}
credential.ac_time_value = res["data"]["refresh_token"].as_str().unwrap().to_string();
Ok(credential)
}
async fn confirm_refresh(&self, client: &Client, new_credential: &Credential) -> Result<()> {
client
.request(
Method::POST,
"https://passport.bilibili.com/x/passport-login/web/confirm/refresh",
// 此处用的是新的凭证
Some(new_credential),
)
.form(&[
("csrf", new_credential.bili_jct.as_str()),
("refresh_token", self.ac_time_value.as_str()),
])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(())
}
}
// 用指定的 pattern 正则表达式在 doc 中查找,返回第一个匹配的捕获组
fn regex_find(pattern: &str, doc: &str) -> Result<String> {
let re = Regex::new(pattern)?;
Ok(re
.captures(doc)
.ok_or(anyhow!("pattern not match"))?
.get(1)
.unwrap()
.as_str()
.to_string())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_and_find() {
let doc = r#"
<html lang="zh-Hans">
<body>
<div id="1-name">b0cc8411ded2f9db2cff2edb3123acac</div>
</body>
</html>
"#;
assert_eq!(
regex_find(r#"<div id="1-name">(.+?)</div>"#, doc).unwrap(),
"b0cc8411ded2f9db2cff2edb3123acac",
);
}
}

View File

@@ -0,0 +1,235 @@
use std::borrow::Cow;
use std::fmt;
use std::pin::Pin;
use anyhow::Result;
use tokio::io::{AsyncWrite, AsyncWriteExt, BufWriter};
use crate::bilibili::danmaku::canvas::CanvasConfig;
use crate::bilibili::danmaku::{DanmakuOption, DrawEffect, Drawable};
struct TimePoint {
t: f64,
}
impl fmt::Display for TimePoint {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let secs = self.t.floor() as u32;
let hour = secs / 3600;
let minutes = (secs % 3600) / 60;
let left = self.t - (hour * 3600) as f64 - (minutes * 60) as f64;
write!(f, "{hour}:{minutes:02}:{left:05.2}")
}
}
struct AssEffect {
effect: DrawEffect,
}
impl fmt::Display for AssEffect {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self.effect {
DrawEffect::Move { start, end } => {
let (x0, y0) = start;
let (x1, y1) = end;
write!(f, "\\move({x0}, {y0}, {x1}, {y1})")
}
}
}
}
impl DanmakuOption {
pub fn ass_styles(&self) -> Vec<String> {
vec![
// Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, \
// Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, \
// Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
format!(
"Style: Float,{font},{font_size},&H{a:02x}FFFFFF,&H00FFFFFF,&H{a:02x}000000,&H00000000,\
{bold}, 0, 0, 0, 100, 100, 0.00, 0.00, 1, \
{outline}, 0, 7, 0, 0, 0, 1",
a = self.opacity,
font = self.font,
font_size = self.font_size,
bold = self.bold as u8,
outline = self.outline,
),
format!(
"Style: Bottom,{font},{font_size},&H{a:02x}FFFFFF,&H00FFFFFF,&H{a:02x}000000,&H00000000,\
{bold}, 0, 0, 0, 100, 100, 0.00, 0.00, 1, \
{outline}, 0, 7, 0, 0, 0, 1",
a = self.opacity,
font = self.font,
font_size = self.font_size,
bold = self.bold as u8,
outline = self.outline,
),
format!(
"Style: Top,{font},{font_size},&H{a:02x}FFFFFF,&H00FFFFFF,&H{a:02x}000000,&H00000000,\
{bold}, 0, 0, 0, 100, 100, 0.00, 0.00, 1, \
{outline}, 0, 7, 0, 0, 0, 1",
a = self.opacity,
font = self.font,
font_size = self.font_size,
bold = self.bold as u8,
outline = self.outline,
),
]
}
}
struct CanvasStyles(Vec<String>);
impl fmt::Display for CanvasStyles {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for style in &self.0 {
writeln!(f, "{}", style)?;
}
Ok(())
}
}
pub struct AssWriter<W: AsyncWrite> {
f: Pin<Box<BufWriter<W>>>,
title: String,
canvas_config: CanvasConfig,
}
impl<W: AsyncWrite> AssWriter<W> {
pub fn new(f: W, title: String, canvas_config: CanvasConfig) -> Self {
AssWriter {
// 对于 HDD、docker 之类的场景,磁盘 IO 是非常大的瓶颈。使用大缓存
f: Box::pin(BufWriter::with_capacity(10 << 20, f)),
title,
canvas_config,
}
}
pub async fn construct(f: W, title: String, canvas_config: CanvasConfig) -> Result<Self> {
let mut res = Self::new(f, title, canvas_config);
res.init().await?;
Ok(res)
}
pub async fn init(&mut self) -> Result<()> {
self.f
.write_all(
format!(
"\
[Script Info]\n\
; Script generated by danmu2ass\n\
Title: {title}\n\
Script Updated By: danmu2ass (https://github.com/gwy15/danmu2ass)\n\
ScriptType: v4.00+\n\
PlayResX: {width}\n\
PlayResY: {height}\n\
Aspect Ratio: {width}:{height}\n\
Collisions: Normal\n\
WrapStyle: 2\n\
ScaledBorderAndShadow: yes\n\
YCbCr Matrix: TV.601\n\
\n\
\n\
[V4+ Styles]\n\
Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, \
Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, \
Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding\n\
{styles}\
\n\
[Events]\n\
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text\n\
",
title = self.title,
width = self.canvas_config.width,
height = self.canvas_config.height,
styles = CanvasStyles(self.canvas_config.danmaku_option.ass_styles()),
)
.into_bytes()
.as_slice(),
)
.await?;
Ok(())
}
pub async fn write(&mut self, drawable: Drawable) -> Result<()> {
self.f
.write_all(
format!(
// Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
"Dialogue: 2,{start},{end},{style},,0,0,0,,{{{effect}\\c&H{b:02x}{g:02x}{r:02x}&}}{text}\n",
start = TimePoint {
t: drawable.danmu.timeline_s
},
end = TimePoint {
t: drawable.danmu.timeline_s + drawable.duration
},
style = drawable.style_name,
effect = AssEffect {
effect: drawable.effect
},
b = drawable.danmu.rgb.2,
g = drawable.danmu.rgb.1,
r = drawable.danmu.rgb.0,
text = escape_text(&drawable.danmu.content),
// text = (0..drawable.danmu.content.chars().count()).map(|_| '晚').collect::<String>(),
)
.into_bytes()
.as_slice(),
)
.await?;
Ok(())
}
pub async fn flush(&mut self) -> Result<()> {
Ok(self.f.flush().await?)
}
}
fn escape_text(text: &str) -> Cow<str> {
let text = text.trim();
if memchr::memchr(b'\n', text.as_bytes()).is_some() {
Cow::from(text.replace('\n', "\\N"))
} else {
Cow::from(text)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn time_point_fmt() {
assert_eq!(format!("{}", TimePoint { t: 0.0 }), "0:00:00.00");
assert_eq!(format!("{}", TimePoint { t: 1.0 }), "0:00:01.00");
assert_eq!(format!("{}", TimePoint { t: 60.0 }), "0:01:00.00");
assert_eq!(format!("{}", TimePoint { t: 3600.0 }), "1:00:00.00");
assert_eq!(format!("{}", TimePoint { t: 3600.0 + 60.0 }), "1:01:00.00");
assert_eq!(format!("{}", TimePoint { t: 3600.0 + 60.0 + 1.0 }), "1:01:01.00");
assert_eq!(
format!(
"{}",
TimePoint {
t: 3600.0 + 60.0 + 1.0 + 0.5
}
),
"1:01:01.50"
);
assert_eq!(
format!(
"{}",
TimePoint {
t: 3600.0 + 1.0 + 0.01234
}
),
"1:00:01.01"
);
}
#[test]
fn test_escape_text() {
assert_eq!(
escape_text("\n\n\n\n\n\n\n\n\n").as_ref(),
r"呵\N呵\N比\N你\N们\N更\N喜\N欢\N晚\N晚"
);
}
}

View File

@@ -0,0 +1,88 @@
use crate::bilibili::danmaku::canvas::CanvasConfig;
use crate::bilibili::danmaku::Danmu;
pub enum Collision {
// 会越来越远
Separate,
// 时间够可以追上,但是时间不够
NotEnoughTime,
// 需要额外的时间才可以避免碰撞
Collide { time_needed: f64 },
}
/// 表示一个弹幕槽位
#[derive(Debug, Clone)]
pub struct Lane {
last_shoot_time: f64,
last_length: f64,
}
impl Lane {
pub fn draw(danmu: &Danmu, config: &CanvasConfig) -> Self {
Lane {
last_shoot_time: danmu.timeline_s,
last_length: danmu.length(config),
}
}
/// 这个槽位是否可以发射另外一条弹幕,返回可能的情形
pub fn available_for(&self, other: &Danmu, config: &CanvasConfig) -> Collision {
#[allow(non_snake_case)]
let T = config.danmaku_option.duration;
#[allow(non_snake_case)]
let W = config.width as f64;
let gap = config.danmaku_option.horizontal_gap;
// 先计算我的速度
let t1 = self.last_shoot_time;
let t2 = other.timeline_s;
let l1 = self.last_length;
let l2 = other.length(config);
let v1 = (W + l1) / T;
let v2 = (W + l2) / T;
let delta_t = t2 - t1;
// 第一条弹幕右边到屏幕右边的距离
let delta_x = v1 * delta_t - l1;
// 没有足够的空间,必定碰撞
if delta_x < gap {
if l2 <= l1 {
// l2 比 l1 短,因此比它慢
// 只需要把 l2 安排在 l1 之后就可以避免碰撞
Collision::Collide {
time_needed: (gap - delta_x) / v1,
}
} else {
// 需要延长额外的时间,使得当第一条消失的时候,第二条也有足够的距离
// 第一条消失的时间点是 (t1 + T)
// 这个时候第二条的左侧应该在距离出发点 W - gap 处,
// 第二条已经出发 (W - gap) / v2 时间,因此在 t1 + T - (W - gap) / v2 出发
// 所需要的额外时间就 - t2
// let time_needed = (t1 + T - (W - gap) / v2) - t2;
let time_needed = (T - (W - gap) / v2) - delta_t;
Collision::Collide { time_needed }
}
} else {
// 第一条已经发射
if l2 <= l1 {
// 如果 l2 < l1则它永远追不上前者可以发射
Collision::Separate
} else {
// 需要算追击问题了,
// l1 已经消失,但是 l2 可能追上,我们计算 l1 刚好消失的时候:
// 此刻是 t1 + T
// l2 的头部应该在距离起点 v2 * (t1 + T - t2) 处
let pos = v2 * (T - delta_t);
if pos < (W - gap) {
Collision::NotEnoughTime
} else {
// 需要额外的时间
Collision::Collide {
time_needed: (pos - (W - gap)) / v2,
}
}
}
}
}
}

View File

@@ -0,0 +1,172 @@
//! 决定绘画策略
mod lane;
use anyhow::Result;
use float_ord::FloatOrd;
use lane::Lane;
use crate::bilibili::danmaku::canvas::lane::Collision;
use crate::bilibili::danmaku::danmu::DanmuType;
use crate::bilibili::danmaku::{Danmu, DrawEffect, Drawable};
use crate::bilibili::PageInfo;
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct DanmakuOption {
pub duration: f64,
pub font: String,
pub font_size: u32,
pub width_ratio: f64,
/// 两条弹幕之间最小的水平距离
pub horizontal_gap: f64,
/// lane 大小
pub lane_size: u32,
/// 屏幕上滚动弹幕最多高度百分比
pub float_percentage: f64,
/// 屏幕上底部弹幕最多高度百分比
pub bottom_percentage: f64,
/// 透明度0-255
pub opacity: u8,
/// 是否加粗1代表是0代表否
pub bold: bool,
/// 描边
pub outline: f64,
/// 时间轴偏移
pub time_offset: f64,
}
impl Default for DanmakuOption {
fn default() -> Self {
Self {
duration: 15.0,
font: "黑体".to_string(),
font_size: 25,
width_ratio: 1.2,
horizontal_gap: 20.0,
lane_size: 32,
float_percentage: 0.5,
bottom_percentage: 0.3,
opacity: (0.3 * 255.0) as u8,
bold: true,
outline: 0.8,
time_offset: 0.0,
}
}
}
#[derive(Clone)]
pub struct CanvasConfig {
pub width: u64,
pub height: u64,
pub danmaku_option: &'static DanmakuOption,
}
impl CanvasConfig {
pub fn new(danmaku_option: &'static DanmakuOption, page: &PageInfo) -> Self {
let (width, height) = Self::dimension(page);
Self {
width,
height,
danmaku_option,
}
}
/// 获取画布的宽高
fn dimension(page: &PageInfo) -> (u64, u64) {
let (width, height) = match &page.dimension {
Some(d) => {
if d.rotate == 0 {
(d.width, d.height)
} else {
(d.height, d.width)
}
}
None => (1280, 720),
};
// 对于指定的字体大小,画布的大小同样会影响到字体的实际显示大小
// 怀疑字体的大小会根据 height 缩放,尝试将视频的 height 对齐到 720
((720.0 / height as f64 * width as f64) as u64, 720)
}
pub fn canvas(self) -> Canvas {
let float_lanes_cnt =
(self.danmaku_option.float_percentage * self.height as f64 / self.danmaku_option.lane_size as f64) as usize;
Canvas {
config: self,
float_lanes: vec![None; float_lanes_cnt],
}
}
}
pub struct Canvas {
pub config: CanvasConfig,
pub float_lanes: Vec<Option<Lane>>,
}
impl Canvas {
pub fn draw(&mut self, mut danmu: Danmu) -> Result<Option<Drawable>> {
danmu.timeline_s += self.config.danmaku_option.time_offset;
if danmu.timeline_s < 0.0 {
return Ok(None);
}
match danmu.r#type {
DanmuType::Float => Ok(self.draw_float(danmu)),
DanmuType::Bottom | DanmuType::Top | DanmuType::Reverse => {
// 不喜欢底部弹幕,直接转成 Bottom
// 这是 feature 不是 bug
danmu.r#type = DanmuType::Float;
Ok(self.draw_float(danmu))
}
}
}
fn draw_float(&mut self, mut danmu: Danmu) -> Option<Drawable> {
let mut collisions = Vec::with_capacity(self.float_lanes.len());
for (idx, lane) in self.float_lanes.iter_mut().enumerate() {
match lane {
// 优先画不存在的槽位
None => {
return Some(self.draw_float_in_lane(danmu, idx));
}
Some(l) => {
let col = l.available_for(&danmu, &self.config);
match col {
Collision::Separate | Collision::NotEnoughTime => {
return Some(self.draw_float_in_lane(danmu, idx));
}
Collision::Collide { time_needed } => {
collisions.push((FloatOrd(time_needed), idx));
}
}
}
}
}
// 允许部分弹幕在延迟后填充
if !collisions.is_empty() {
collisions.sort_unstable();
let (FloatOrd(time_need), lane_idx) = collisions[0];
if time_need < 1.0 {
debug!("延迟弹幕 {} 秒", time_need);
// 只允许延迟 1s
danmu.timeline_s += time_need + 0.01; // 间隔也不要太小了
return Some(self.draw_float_in_lane(danmu, lane_idx));
}
}
debug!("skipping danmu: {}", danmu.content);
None
}
fn draw_float_in_lane(&mut self, danmu: Danmu, lane_idx: usize) -> Drawable {
self.float_lanes[lane_idx] = Some(Lane::draw(&danmu, &self.config));
let y = lane_idx as i32 * self.config.danmaku_option.lane_size as i32;
let l = danmu.length(&self.config);
Drawable::new(
danmu,
self.config.danmaku_option.duration,
"Float",
DrawEffect::Move {
start: (self.config.width as i32, y),
end: (-(l as i32), y),
},
)
}
}

View File

@@ -0,0 +1,54 @@
//! 一个弹幕实例,但是没有位置信息
use anyhow::{bail, Result};
use crate::bilibili::danmaku::canvas::CanvasConfig;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum DanmuType {
#[default]
Float,
Top,
Bottom,
Reverse,
}
impl DanmuType {
pub fn from_num(num: i32) -> Result<Self> {
Ok(match num {
1 => DanmuType::Float,
4 => DanmuType::Bottom,
5 => DanmuType::Top,
6 => DanmuType::Reverse,
// 高级弹幕、代码弹幕等,不支持,这里 return error外面 unwrap_or_default 当成 Float 处理
_ => bail!("UnSupported danmu type"),
})
}
}
#[derive(Debug, Clone, PartialEq, Default)]
pub struct Danmu {
pub timeline_s: f64,
pub content: String,
pub r#type: DanmuType,
/// 虽然这里有 fontsize但是我们实际上使用 canvas config 的 font size
/// 否在在调节分辨率的时候字体会发生变化。
pub fontsize: u32,
pub rgb: (u8, u8, u8),
}
impl Danmu {
/// 计算弹幕的“像素长度”,会乘上一个缩放因子
///
/// 汉字算一个全宽英文算2/3宽
pub fn length(&self, config: &CanvasConfig) -> f64 {
let pts = config.danmaku_option.font_size
* self
.content
.chars()
.map(|ch| if ch.is_ascii() { 2 } else { 3 })
.sum::<u32>()
/ 3;
pts as f64 * config.danmaku_option.width_ratio
}
}

View File

@@ -0,0 +1,28 @@
//! 可以绘制的实体
use crate::bilibili::danmaku::Danmu;
/// 弹幕开始绘制的时间就是 danmu 的时间
pub struct Drawable {
pub danmu: Danmu,
/// 弹幕一共绘制的时间
pub duration: f64,
/// 弹幕的绘制 style
pub style_name: &'static str,
/// 绘制的“特效”
pub effect: DrawEffect,
}
impl Drawable {
pub fn new(danmu: Danmu, duration: f64, style_name: &'static str, effect: DrawEffect) -> Self {
Drawable {
danmu,
duration,
style_name,
effect,
}
}
}
pub enum DrawEffect {
Move { start: (i32, i32), end: (i32, i32) },
}

View File

@@ -0,0 +1,13 @@
mod ass_writer;
mod canvas;
mod danmu;
mod drawable;
mod model;
mod writer;
pub use ass_writer::AssWriter;
pub use canvas::DanmakuOption;
pub use danmu::Danmu;
pub use drawable::{DrawEffect, Drawable};
pub use model::{DanmakuElem, DmSegMobileReply};
pub use writer::DanmakuWriter;

View File

@@ -0,0 +1,84 @@
//! 出于减少编译引入考虑,直接翻译了一下 pb不引入 prost-build
//!
//! 可以看旁边的 dm.proto
use prost::Message;
use crate::bilibili::danmaku::danmu::{Danmu, DanmuType};
/// 弹幕 pb 定义
#[derive(Clone, Message)]
pub struct DanmakuElem {
/// 弹幕 dmid
#[prost(int64, tag = "1")]
pub id: i64,
/// 弹幕出现位置(单位 ms
#[prost(int32, tag = "2")]
pub progress: i32,
/// 弹幕类型
#[prost(int32, tag = "3")]
pub mode: i32,
/// 弹幕字号
#[prost(int32, tag = "4")]
pub fontsize: i32,
/// 弹幕颜色
#[prost(uint32, tag = "5")]
pub color: u32,
/// 发送者 mid hash
#[prost(string, tag = "6")]
pub mid_hash: String,
/// 弹幕正文
#[prost(string, tag = "7")]
pub content: String,
/// 弹幕发送时间
#[prost(int64, tag = "8")]
pub ctime: i64,
/// 弹幕权重
#[prost(int32, tag = "9")]
pub weight: i32,
/// 动作?
#[prost(string, tag = "10")]
pub action: String,
/// 弹幕池
#[prost(int32, tag = "11")]
pub pool: i32,
/// 弹幕 dmid str
#[prost(string, tag = "12")]
pub dmid_str: String,
/// 弹幕属性
#[prost(int32, tag = "13")]
pub attr: i32,
}
#[derive(Clone, Message)]
pub struct DmSegMobileReply {
#[prost(message, repeated, tag = "1")]
pub elems: Vec<DanmakuElem>,
}
impl From<DanmakuElem> for Danmu {
fn from(elem: DanmakuElem) -> Self {
Self {
timeline_s: elem.progress as f64 / 1000.0,
content: elem.content,
r#type: DanmuType::from_num(elem.mode).unwrap_or_default(),
fontsize: elem.fontsize as u32,
rgb: (
((elem.color >> 16) & 0xFF) as u8,
((elem.color >> 8) & 0xFF) as u8,
(elem.color & 0xFF) as u8,
),
}
}
}

View File

@@ -0,0 +1,37 @@
use std::path::PathBuf;
use anyhow::Result;
use tokio::fs::{self, File};
use crate::bilibili::danmaku::canvas::CanvasConfig;
use crate::bilibili::danmaku::{AssWriter, Danmu};
use crate::bilibili::PageInfo;
use crate::config::CONFIG;
pub struct DanmakuWriter<'a> {
page: &'a PageInfo,
danmaku: Vec<Danmu>,
}
impl<'a> DanmakuWriter<'a> {
pub fn new(page: &'a PageInfo, danmaku: Vec<Danmu>) -> Self {
DanmakuWriter { page, danmaku }
}
pub async fn write(self, path: PathBuf) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await?;
}
let canvas_config = CanvasConfig::new(&CONFIG.danmaku_option, self.page);
let mut writer =
AssWriter::construct(File::create(path).await?, self.page.name.clone(), canvas_config.clone()).await?;
let mut canvas = canvas_config.canvas();
for danmuku in self.danmaku {
if let Some(drawable) = canvas.draw(danmuku)? {
writer.write(drawable).await?;
}
}
writer.flush().await?;
Ok(())
}
}

View File

@@ -0,0 +1,9 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum BiliError {
#[error("risk control occurred")]
RiskControlOccurred,
#[error("request failed, status code: {0}, message: {1}")]
RequestFailed(i64, String),
}

View File

@@ -0,0 +1,116 @@
use anyhow::Result;
use async_stream::stream;
use chrono::serde::ts_seconds;
use chrono::{DateTime, Utc};
use futures::Stream;
use serde_json::Value;
use crate::bilibili::{BiliClient, Validate};
pub struct FavoriteList<'a> {
client: &'a BiliClient,
fid: String,
}
#[derive(Debug, serde::Deserialize)]
pub struct FavoriteListInfo {
pub id: i64,
pub title: String,
}
#[derive(Debug, serde::Deserialize)]
pub struct VideoInfo {
pub title: String,
#[serde(rename = "type")]
pub vtype: i32,
pub bvid: String,
pub intro: String,
pub cover: String,
pub upper: Upper,
#[serde(with = "ts_seconds")]
pub ctime: DateTime<Utc>,
#[serde(with = "ts_seconds")]
pub fav_time: DateTime<Utc>,
#[serde(with = "ts_seconds")]
pub pubtime: DateTime<Utc>,
pub attr: i32,
}
#[derive(Debug, serde::Deserialize)]
pub struct Upper {
pub mid: i64,
pub name: String,
pub face: String,
}
impl<'a> FavoriteList<'a> {
pub fn new(client: &'a BiliClient, fid: String) -> Self {
Self { client, fid }
}
pub async fn get_info(&self) -> Result<FavoriteListInfo> {
let mut res = self
.client
.request(reqwest::Method::GET, "https://api.bilibili.com/x/v3/fav/folder/info")
.query(&[("media_id", &self.fid)])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(serde_json::from_value(res["data"].take())?)
}
async fn get_videos(&self, page: u32) -> Result<Value> {
self.client
.request(reqwest::Method::GET, "https://api.bilibili.com/x/v3/fav/resource/list")
.query(&[
("media_id", self.fid.as_str()),
("pn", &page.to_string()),
("ps", "20"),
("order", "mtime"),
("type", "0"),
("tid", "0"),
])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()
}
// 拿到收藏夹的所有权,返回一个收藏夹下的视频流
pub fn into_video_stream(self) -> impl Stream<Item = VideoInfo> + 'a {
stream! {
let mut page = 1;
loop {
let mut videos = match self.get_videos(page).await {
Ok(v) => v,
Err(e) => {
error!("failed to get videos of favorite {} page {}: {}", self.fid, page, e);
break;
},
};
if !videos["data"]["medias"].is_array() {
warn!("no medias found in favorite {} page {}", self.fid, page);
break;
}
let videos_info = match serde_json::from_value::<Vec<VideoInfo>>(videos["data"]["medias"].take()) {
Ok(v) => v,
Err(e) => {
error!("failed to parse videos of favorite {} page {}: {}", self.fid, page, e);
break;
},
};
for video_info in videos_info.into_iter(){
yield video_info;
}
if videos["data"]["has_more"].is_boolean() && videos["data"]["has_more"].as_bool().unwrap(){
page += 1;
continue;
}
break;
}
}
}
}

View File

@@ -0,0 +1,37 @@
pub use analyzer::{BestStream, FilterOption};
use anyhow::{bail, Result};
pub use client::{BiliClient, Client};
pub use credential::Credential;
pub use danmaku::DanmakuOption;
pub use error::BiliError;
pub use favorite_list::{FavoriteList, FavoriteListInfo, VideoInfo};
pub use video::{Dimension, PageInfo, Video};
mod analyzer;
mod client;
mod credential;
mod danmaku;
mod error;
mod favorite_list;
mod video;
pub(crate) trait Validate {
type Output;
fn validate(self) -> Result<Self::Output>;
}
impl Validate for serde_json::Value {
type Output = serde_json::Value;
fn validate(self) -> Result<Self::Output> {
let (code, msg) = match (self["code"].as_i64(), self["message"].as_str()) {
(Some(code), Some(msg)) => (code, msg),
_ => bail!("no code or message found"),
};
if code != 0 {
bail!(BiliError::RequestFailed(code, msg.to_owned()));
}
Ok(self)
}
}

View File

@@ -0,0 +1,166 @@
use anyhow::{bail, Result};
use futures::stream::FuturesUnordered;
use futures::TryStreamExt;
use prost::Message;
use reqwest::Method;
use crate::bilibili::analyzer::PageAnalyzer;
use crate::bilibili::client::BiliClient;
use crate::bilibili::danmaku::{DanmakuElem, DanmakuWriter, DmSegMobileReply};
use crate::bilibili::Validate;
static MASK_CODE: u64 = 2251799813685247;
static XOR_CODE: u64 = 23442827791579;
static BASE: u64 = 58;
static DATA: &[char] = &[
'F', 'c', 'w', 'A', 'P', 'N', 'K', 'T', 'M', 'u', 'g', '3', 'G', 'V', '5', 'L', 'j', '7', 'E', 'J', 'n', 'H', 'p',
'W', 's', 'x', '4', 't', 'b', '8', 'h', 'a', 'Y', 'e', 'v', 'i', 'q', 'B', 'z', '6', 'r', 'k', 'C', 'y', '1', '2',
'm', 'U', 'S', 'D', 'Q', 'X', '9', 'R', 'd', 'o', 'Z', 'f',
];
pub struct Video<'a> {
client: &'a BiliClient,
pub aid: String,
pub bvid: String,
}
#[derive(Debug, serde::Deserialize)]
pub struct Tag {
pub tag_name: String,
}
impl serde::Serialize for Tag {
fn serialize<S>(&self, serializer: S) -> core::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.tag_name)
}
}
#[derive(Debug, serde::Deserialize, Default)]
pub struct PageInfo {
pub cid: i64,
pub page: i32,
#[serde(rename = "part")]
pub name: String,
pub duration: u32,
pub first_frame: Option<String>,
pub dimension: Option<Dimension>,
}
#[derive(Debug, serde::Deserialize, Default)]
pub struct Dimension {
pub width: u32,
pub height: u32,
pub rotate: u32,
}
impl<'a> Video<'a> {
pub fn new(client: &'a BiliClient, bvid: String) -> Self {
let aid = bvid_to_aid(&bvid).to_string();
Self { client, aid, bvid }
}
pub async fn get_pages(&self) -> Result<Vec<PageInfo>> {
let mut res = self
.client
.request(Method::GET, "https://api.bilibili.com/x/player/pagelist")
.query(&[("aid", &self.aid), ("bvid", &self.bvid)])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(serde_json::from_value(res["data"].take())?)
}
pub async fn get_tags(&self) -> Result<Vec<Tag>> {
let mut res = self
.client
.request(Method::GET, "https://api.bilibili.com/x/web-interface/view/detail/tag")
.query(&[("aid", &self.aid), ("bvid", &self.bvid)])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(serde_json::from_value(res["data"].take())?)
}
pub async fn get_danmaku_writer(&self, page: &'a PageInfo) -> Result<DanmakuWriter> {
let tasks = FuturesUnordered::new();
for i in 1..=(page.duration + 359) / 360 {
tasks.push(self.get_danmaku_segment(page, i as i64));
}
let result: Vec<Vec<DanmakuElem>> = tasks.try_collect().await?;
let mut result: Vec<DanmakuElem> = result.into_iter().flatten().collect();
result.sort_by_key(|d| d.progress);
Ok(DanmakuWriter::new(page, result.into_iter().map(|x| x.into()).collect()))
}
async fn get_danmaku_segment(&self, page: &PageInfo, segment_idx: i64) -> Result<Vec<DanmakuElem>> {
let mut res = self
.client
.request(Method::GET, "http://api.bilibili.com/x/v2/dm/web/seg.so")
.query(&[("type", 1), ("oid", page.cid), ("segment_index", segment_idx)])
.send()
.await?
.error_for_status()?;
let headers = std::mem::take(res.headers_mut());
let content_type = headers.get("content-type");
if !content_type.is_some_and(|v| v == "application/octet-stream") {
bail!(
"unexpected content type: {:?}, body: {:?}",
content_type,
res.text().await
);
}
Ok(DmSegMobileReply::decode(res.bytes().await?)?.elems)
}
pub async fn get_page_analyzer(&self, page: &PageInfo) -> Result<PageAnalyzer> {
let mut res = self
.client
.request(Method::GET, "https://api.bilibili.com/x/player/wbi/playurl")
.query(&[
("avid", self.aid.as_str()),
("cid", page.cid.to_string().as_str()),
("qn", "127"),
("otype", "json"),
("fnval", "4048"),
("fourk", "1"),
])
.send()
.await?
.error_for_status()?
.json::<serde_json::Value>()
.await?
.validate()?;
Ok(PageAnalyzer::new(res["data"].take()))
}
}
fn bvid_to_aid(bvid: &str) -> u64 {
let mut bvid = bvid.chars().collect::<Vec<_>>();
(bvid[3], bvid[9]) = (bvid[9], bvid[3]);
(bvid[4], bvid[7]) = (bvid[7], bvid[4]);
let mut tmp = 0u64;
for char in bvid.into_iter().skip(3) {
let idx = DATA.iter().position(|&x| x == char).unwrap();
tmp = tmp * BASE + idx as u64;
}
(tmp & MASK_CODE) ^ XOR_CODE
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_bvid_to_aid() {
assert_eq!(bvid_to_aid("BV1Tr421n746"), 1401752220u64);
assert_eq!(bvid_to_aid("BV1sH4y1s7fe"), 1051892992u64);
}
}

View File

@@ -0,0 +1,140 @@
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Result;
use arc_swap::ArcSwapOption;
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use crate::bilibili::{Credential, DanmakuOption, FilterOption};
pub static CONFIG: Lazy<Config> = Lazy::new(|| {
let config = Config::load().unwrap_or_else(|err| {
if err
.downcast_ref::<std::io::Error>()
.map_or(true, |e| e.kind() != std::io::ErrorKind::NotFound)
{
panic!("加载配置文件失败,错误为: {err}");
}
warn!("配置文件不存在,使用默认配置...");
Config::default()
});
// 放到外面,确保新的配置项被保存
info!("配置加载完毕,覆盖刷新原有配置");
config.save().unwrap();
// 检查配置文件内容
info!("校验配置文件内容...");
config.check();
config
});
pub static CONFIG_DIR: Lazy<PathBuf> =
Lazy::new(|| dirs::config_dir().expect("No config path found").join("bili-sync"));
#[derive(Serialize, Deserialize)]
pub struct Config {
pub credential: ArcSwapOption<Credential>,
pub filter_option: FilterOption,
#[serde(default)]
pub danmaku_option: DanmakuOption,
pub favorite_list: HashMap<String, PathBuf>,
pub video_name: Cow<'static, str>,
pub page_name: Cow<'static, str>,
pub interval: u64,
pub upper_path: PathBuf,
#[serde(default)]
pub nfo_time_type: NFOTimeType,
}
#[derive(Serialize, Deserialize, Default)]
#[serde(rename_all = "lowercase")]
pub enum NFOTimeType {
#[default]
FavTime,
PubTime,
}
impl Default for Config {
fn default() -> Self {
Self {
credential: ArcSwapOption::from(Some(Arc::new(Credential::default()))),
filter_option: FilterOption::default(),
danmaku_option: DanmakuOption::default(),
favorite_list: HashMap::new(),
video_name: Cow::Borrowed("{{title}}"),
page_name: Cow::Borrowed("{{bvid}}"),
interval: 1200,
upper_path: CONFIG_DIR.join("upper_face"),
nfo_time_type: NFOTimeType::FavTime,
}
}
}
impl Config {
/// 简单的预检查
pub fn check(&self) {
let mut ok = true;
if self.favorite_list.is_empty() {
ok = false;
error!("未设置需监听的收藏夹,程序空转没有意义");
}
for path in self.favorite_list.values() {
if !path.is_absolute() {
ok = false;
error!("收藏夹保存的路径应为绝对路径,检测到: {}", path.display());
}
}
if !self.upper_path.is_absolute() {
ok = false;
error!("up 主头像保存的路径应为绝对路径");
}
if self.video_name.is_empty() {
ok = false;
error!("未设置 video_name 模板");
}
if self.page_name.is_empty() {
ok = false;
error!("未设置 page_name 模板");
}
let credential = self.credential.load();
match credential.as_deref() {
Some(credential) => {
if credential.sessdata.is_empty()
|| credential.bili_jct.is_empty()
|| credential.buvid3.is_empty()
|| credential.dedeuserid.is_empty()
|| credential.ac_time_value.is_empty()
{
ok = false;
error!("Credential 信息不完整,请确保填写完整");
}
}
None => {
ok = false;
error!("未设置 Credential 信息");
}
}
if !ok {
panic!(
"位于 {} 的配置文件不合法,请参考提示信息修复后继续运行",
CONFIG_DIR.join("config.toml").display()
);
}
}
fn load() -> Result<Self> {
let config_path = CONFIG_DIR.join("config.toml");
let config_content = std::fs::read_to_string(config_path)?;
Ok(toml::from_str(&config_content)?)
}
pub fn save(&self) -> Result<()> {
let config_path = CONFIG_DIR.join("config.toml");
std::fs::create_dir_all(&*CONFIG_DIR)?;
std::fs::write(config_path, toml::to_string_pretty(self)?)?;
Ok(())
}
}

View File

@@ -0,0 +1,704 @@
use std::collections::HashMap;
use std::env::{args, var};
use std::path::{Path, PathBuf};
use std::pin::Pin;
use anyhow::{bail, Result};
use bili_sync_entity::{favorite, page, video};
use filenamify::filenamify;
use futures::stream::{FuturesOrdered, FuturesUnordered};
use futures::{pin_mut, Future, StreamExt};
use once_cell::sync::Lazy;
use sea_orm::entity::prelude::*;
use sea_orm::ActiveValue::Set;
use sea_orm::TransactionTrait;
use serde_json::json;
use tokio::fs;
use tokio::sync::{Mutex, Semaphore};
use crate::bilibili::{BestStream, BiliClient, BiliError, Dimension, FavoriteList, PageInfo, Video};
use crate::config::CONFIG;
use crate::core::status::{PageStatus, VideoStatus};
use crate::core::utils::{
create_video_pages, create_videos, exist_labels, filter_unfilled_videos, handle_favorite_info, total_video_count,
unhandled_videos_pages, update_pages_model, update_videos_model, ModelWrapper, NFOMode, NFOSerializer, TEMPLATE,
};
use crate::downloader::Downloader;
use crate::error::{DownloadAbortError, ProcessPageError};
pub static SCAN_ONLY: Lazy<bool> = Lazy::new(|| var("SCAN_ONLY").is_ok() || args().any(|arg| arg == "--scan-only"));
/// 处理某个收藏夹,首先刷新收藏夹信息,然后下载收藏夹中未下载成功的视频
pub async fn process_favorite_list(
bili_client: &BiliClient,
fid: &str,
path: &Path,
connection: &DatabaseConnection,
) -> Result<()> {
let favorite_model = refresh_favorite_list(bili_client, fid, path, connection).await?;
let favorite_model = fetch_video_details(bili_client, favorite_model, connection).await?;
if *SCAN_ONLY {
warn!("已开启仅扫描模式,跳过视频下载...");
return Ok(());
}
download_unprocessed_videos(bili_client, favorite_model, connection).await
}
/// 获取收藏夹 Model从收藏夹列表中获取所有新添加的视频将其写入数据库
pub async fn refresh_favorite_list(
bili_client: &BiliClient,
fid: &str,
path: &Path,
connection: &DatabaseConnection,
) -> Result<favorite::Model> {
let bili_favorite_list = FavoriteList::new(bili_client, fid.to_owned());
let favorite_list_info = bili_favorite_list.get_info().await?;
let favorite_model = handle_favorite_info(&favorite_list_info, path, connection).await?;
info!("开始扫描收藏夹: {} - {}...", favorite_model.f_id, favorite_model.name);
// 每十个视频一组,避免太多的数据库操作
let video_stream = bili_favorite_list.into_video_stream().chunks(10);
pin_mut!(video_stream);
let mut got_count = 0;
let total_count = total_video_count(&favorite_model, connection).await?;
while let Some(videos_info) = video_stream.next().await {
got_count += videos_info.len();
let exist_labels = exist_labels(&videos_info, &favorite_model, connection).await?;
// 如果发现有视频的收藏时间和 bvid 和数据库中重合,说明到达了上次处理到的地方,可以直接退出
let should_break = videos_info
.iter()
.any(|v| exist_labels.contains(&(v.bvid.clone(), v.fav_time.naive_utc())));
// 将视频信息写入数据库
create_videos(&videos_info, &favorite_model, connection).await?;
if should_break {
info!("到达上一次处理的位置,提前中止");
break;
}
}
let total_count = total_video_count(&favorite_model, connection).await? - total_count;
info!(
"扫描收藏夹: {} - {} 完成, 获取了 {} 条视频, 其中有 {} 条新视频",
favorite_model.f_id, favorite_model.name, got_count, total_count
);
Ok(favorite_model)
}
/// 筛选出所有没有获取到分页信息和 tag 的视频,请求分页信息和 tag 并写入数据库
pub async fn fetch_video_details(
bili_client: &BiliClient,
favorite_model: favorite::Model,
connection: &DatabaseConnection,
) -> Result<favorite::Model> {
info!(
"开始获取收藏夹 {} - {} 的视频与分页信息...",
favorite_model.f_id, favorite_model.name
);
let videos_model = filter_unfilled_videos(&favorite_model, connection).await?;
for video_model in videos_model {
let bili_video = Video::new(bili_client, video_model.bvid.clone());
let tags = match bili_video.get_tags().await {
Ok(tags) => tags,
Err(e) => {
error!(
"获取视频 {} - {} 的标签失败,错误为:{}",
&video_model.bvid, &video_model.name, e
);
if let Some(BiliError::RequestFailed(code, _)) = e.downcast_ref::<BiliError>() {
if *code == -404 {
let mut video_active_model: video::ActiveModel = video_model.into();
video_active_model.valid = Set(false);
video_active_model.save(connection).await?;
}
}
continue;
}
};
let pages_info = match bili_video.get_pages().await {
Ok(pages) => pages,
Err(e) => {
error!(
"获取视频 {} - {} 的分页信息失败,错误为:{}",
&video_model.bvid, &video_model.name, e
);
if let Some(BiliError::RequestFailed(code, _)) = e.downcast_ref::<BiliError>() {
if *code == -404 {
let mut video_active_model: video::ActiveModel = video_model.into();
video_active_model.valid = Set(false);
video_active_model.save(connection).await?;
}
}
continue;
}
};
let txn = connection.begin().await?;
// 将分页信息写入数据库
create_video_pages(&pages_info, &video_model, &txn).await?;
// 将页标记和 tag 写入数据库
let mut video_active_model: video::ActiveModel = video_model.into();
video_active_model.single_page = Set(Some(pages_info.len() == 1));
video_active_model.tags = Set(Some(serde_json::to_value(tags).unwrap()));
video_active_model.save(&txn).await?;
txn.commit().await?;
}
info!(
"获取收藏夹 {} - {} 的视频与分页信息完成",
favorite_model.f_id, favorite_model.name
);
Ok(favorite_model)
}
/// 下载所有未处理成功的视频
pub async fn download_unprocessed_videos(
bili_client: &BiliClient,
favorite_model: favorite::Model,
connection: &DatabaseConnection,
) -> Result<()> {
info!(
"开始下载收藏夹: {} - {} 中所有未处理过的视频...",
favorite_model.f_id, favorite_model.name
);
let unhandled_videos_pages = unhandled_videos_pages(&favorite_model, connection).await?;
// 对于视频,允许三个同时下载(视频内还有分页、不同分页还有多种下载任务)
let semaphore = Semaphore::new(3);
let downloader = Downloader::new(bili_client.client.clone());
let mut uppers_mutex: HashMap<i64, (Mutex<()>, Mutex<()>)> = HashMap::new();
for (video_model, _) in &unhandled_videos_pages {
uppers_mutex.insert(video_model.upper_id, (Mutex::new(()), Mutex::new(())));
}
let mut tasks = unhandled_videos_pages
.into_iter()
.map(|(video_model, pages_model)| {
let upper_mutex = uppers_mutex.get(&video_model.upper_id).unwrap();
download_video_pages(
bili_client,
video_model,
pages_model,
connection,
&semaphore,
&downloader,
&CONFIG.upper_path,
upper_mutex,
)
})
.collect::<FuturesUnordered<_>>();
let mut models = Vec::with_capacity(10);
while let Some(res) = tasks.next().await {
match res {
Ok(model) => {
models.push(model);
}
Err(e) => {
if e.downcast_ref::<DownloadAbortError>().is_some() {
error!("下载视频时触发风控,将终止收藏夹下所有下载任务,等待下一轮执行");
break;
}
}
}
// 满十个就写入数据库
if models.len() == 10 {
update_videos_model(std::mem::replace(&mut models, Vec::with_capacity(10)), connection).await?;
}
}
if !models.is_empty() {
update_videos_model(models, connection).await?;
}
info!(
"下载收藏夹: {} - {} 中未处理过的视频完成",
favorite_model.f_id, favorite_model.name
);
Ok(())
}
/// 暂时这样做,后面提取成上下文
#[allow(clippy::too_many_arguments)]
pub async fn download_video_pages(
bili_client: &BiliClient,
video_model: video::Model,
pages: Vec<page::Model>,
connection: &DatabaseConnection,
semaphore: &Semaphore,
downloader: &Downloader,
upper_path: &Path,
upper_mutex: &(Mutex<()>, Mutex<()>),
) -> Result<video::ActiveModel> {
let permit = semaphore.acquire().await;
if let Err(e) = permit {
bail!(e);
}
let mut status = VideoStatus::new(video_model.download_status);
let seprate_status = status.should_run();
let base_path = Path::new(&video_model.path);
let upper_id = video_model.upper_id.to_string();
let base_upper_path = upper_path
.join(upper_id.chars().next().unwrap().to_string())
.join(upper_id);
let is_single_page = video_model.single_page.unwrap();
// 对于单页视频page 的下载已经足够
// 对于多页视频page 下载仅包含了分集内容,需要额外补上视频的 poster 的 tvshow.nfo
let tasks: Vec<Pin<Box<dyn Future<Output = Result<()>>>>> = vec![
// 下载视频封面
Box::pin(fetch_video_poster(
seprate_status[0] && !is_single_page,
&video_model,
downloader,
base_path.join("poster.jpg"),
base_path.join("fanart.jpg"),
)),
// 生成视频信息的 nfo
Box::pin(generate_video_nfo(
seprate_status[1] && !is_single_page,
&video_model,
base_path.join("tvshow.nfo"),
)),
// 下载 Up 主头像
Box::pin(fetch_upper_face(
seprate_status[2],
&video_model,
downloader,
&upper_mutex.0,
base_upper_path.join("folder.jpg"),
)),
// 生成 Up 主信息的 nfo
Box::pin(generate_upper_nfo(
seprate_status[3],
&video_model,
&upper_mutex.1,
base_upper_path.join("person.nfo"),
)),
// 分发并执行分 P 下载的任务
Box::pin(dispatch_download_page(
seprate_status[4],
bili_client,
&video_model,
pages,
connection,
downloader,
)),
];
let tasks: FuturesOrdered<_> = tasks.into_iter().collect();
let results: Vec<Result<()>> = tasks.collect().await;
status.update_status(&results);
results
.iter()
.take(4)
.zip(["封面", "视频 nfo", "up 主头像", "up 主 nfo"])
.for_each(|(res, task_name)| match res {
Ok(_) => info!(
"处理视频 {} - {} 的 {} 成功",
&video_model.bvid, &video_model.name, task_name
),
Err(e) => error!(
"处理视频 {} - {} 的 {} 失败: {}",
&video_model.bvid, &video_model.name, task_name, e
),
});
if let Err(e) = results.into_iter().nth(4).unwrap() {
if e.downcast_ref::<DownloadAbortError>().is_some() {
return Err(e);
}
}
let mut video_active_model: video::ActiveModel = video_model.into();
video_active_model.download_status = Set(status.into());
Ok(video_active_model)
}
pub async fn dispatch_download_page(
should_run: bool,
bili_client: &BiliClient,
video_model: &video::Model,
pages: Vec<page::Model>,
connection: &DatabaseConnection,
downloader: &Downloader,
) -> Result<()> {
if !should_run {
return Ok(());
}
// 对于视频的分页,允许两个同时下载(绝大部分是单页视频)
let child_semaphore = Semaphore::new(2);
let mut tasks = pages
.into_iter()
.map(|page_model| download_page(bili_client, video_model, page_model, &child_semaphore, downloader))
.collect::<FuturesUnordered<_>>();
let mut models = Vec::with_capacity(10);
let (mut should_error, mut is_break) = (false, false);
while let Some(res) = tasks.next().await {
match res {
Ok(model) => {
if let Set(status) = model.download_status {
let status = PageStatus::new(status);
if status.should_run().iter().any(|v| *v) {
// 有一个分页没变成终止状态(即下载成功或者重试次数达到限制),就应该向上层传递 Error
should_error = true;
}
}
models.push(model);
}
Err(e) => {
if e.downcast_ref::<DownloadAbortError>().is_some() {
should_error = true;
is_break = true;
break;
}
}
}
if models.len() == 10 {
update_pages_model(std::mem::replace(&mut models, Vec::with_capacity(10)), connection).await?;
}
}
if !models.is_empty() {
update_pages_model(models, connection).await?;
}
if should_error {
if is_break {
error!(
"下载视频 {} - {} 的分页时触发风控,将异常向上传递...",
&video_model.bvid, &video_model.name
);
bail!(DownloadAbortError());
} else {
error!(
"下载视频 {} - {} 的分页时出现了错误,将在下一轮尝试重新处理",
&video_model.bvid, &video_model.name
);
bail!(ProcessPageError());
}
}
Ok(())
}
pub async fn download_page(
bili_client: &BiliClient,
video_model: &video::Model,
page_model: page::Model,
semaphore: &Semaphore,
downloader: &Downloader,
) -> Result<page::ActiveModel> {
let permit = semaphore.acquire().await;
if let Err(e) = permit {
return Err(e.into());
}
let mut status = PageStatus::new(page_model.download_status);
let seprate_status = status.should_run();
let is_single_page = video_model.single_page.unwrap();
let base_path = Path::new(&video_model.path);
let base_name = filenamify(TEMPLATE.render(
"page",
&json!({
"bvid": &video_model.bvid,
"title": &video_model.name,
"upper_name": &video_model.upper_name,
"upper_mid": &video_model.upper_id,
"ptitle": &page_model.name,
"pid": page_model.pid,
}),
)?);
let (poster_path, video_path, nfo_path, danmaku_path, fanart_path) = if is_single_page {
(
base_path.join(format!("{}-poster.jpg", &base_name)),
base_path.join(format!("{}.mp4", &base_name)),
base_path.join(format!("{}.nfo", &base_name)),
base_path.join(format!("{}.zh-CN.default.ass", &base_name)),
Some(base_path.join(format!("{}-fanart.jpg", &base_name))),
)
} else {
(
base_path
.join("Season 1")
.join(format!("{} - S01E{:0>2}-thumb.jpg", &base_name, page_model.pid)),
base_path
.join("Season 1")
.join(format!("{} - S01E{:0>2}.mp4", &base_name, page_model.pid)),
base_path
.join("Season 1")
.join(format!("{} - S01E{:0>2}.nfo", &base_name, page_model.pid)),
base_path
.join("Season 1")
.join(format!("{} - S01E{:0>2}.zh-CN.default.ass", &base_name, page_model.pid)),
// 对于多页视频,会在上一步 fetch_video_poster 中获取剧集的 fanart无需在此处下载单集的
None,
)
};
let dimension = if page_model.width.is_some() && page_model.height.is_some() {
Some(Dimension {
width: page_model.width.unwrap(),
height: page_model.height.unwrap(),
rotate: 0,
})
} else {
None
};
let page_info = PageInfo {
cid: page_model.cid,
duration: page_model.duration,
dimension,
..Default::default()
};
let tasks: Vec<Pin<Box<dyn Future<Output = Result<()>>>>> = vec![
Box::pin(fetch_page_poster(
seprate_status[0],
video_model,
&page_model,
downloader,
poster_path,
fanart_path,
)),
Box::pin(fetch_page_video(
seprate_status[1],
bili_client,
video_model,
downloader,
&page_info,
video_path.clone(),
)),
Box::pin(generate_page_nfo(seprate_status[2], video_model, &page_model, nfo_path)),
Box::pin(fetch_page_danmaku(
seprate_status[3],
bili_client,
video_model,
&page_info,
danmaku_path,
)),
];
let tasks: FuturesOrdered<_> = tasks.into_iter().collect();
let results: Vec<Result<()>> = tasks.collect().await;
status.update_status(&results);
results
.iter()
.zip(["封面", "视频", "视频 nfo", "弹幕"])
.for_each(|(res, task_name)| match res {
Ok(_) => info!(
"处理视频 {} - {} 第 {} 页的 {} 成功",
&video_model.bvid, &video_model.name, page_model.pid, task_name
),
Err(e) => error!(
"处理视频 {} - {} 第 {} 页的 {} 失败: {}",
&video_model.bvid, &video_model.name, page_model.pid, task_name, e
),
});
// 查看下载视频的状态,该状态会影响上层是否 break
if let Err(e) = results.into_iter().nth(1).unwrap() {
if let Ok(BiliError::RiskControlOccurred) = e.downcast::<BiliError>() {
bail!(DownloadAbortError());
}
}
let mut page_active_model: page::ActiveModel = page_model.into();
page_active_model.download_status = Set(status.into());
page_active_model.path = Set(Some(video_path.to_str().unwrap().to_string()));
Ok(page_active_model)
}
pub async fn fetch_page_poster(
should_run: bool,
video_model: &video::Model,
page_model: &page::Model,
downloader: &Downloader,
poster_path: PathBuf,
fanart_path: Option<PathBuf>,
) -> Result<()> {
if !should_run {
return Ok(());
}
let single_page = video_model.single_page.unwrap();
let url = if single_page {
// 单页视频直接用视频的封面
video_model.cover.as_str()
} else {
// 多页视频,如果单页没有封面,就使用视频的封面
match &page_model.image {
Some(url) => url.as_str(),
None => video_model.cover.as_str(),
}
};
downloader.fetch(url, &poster_path).await?;
if let Some(fanart_path) = fanart_path {
fs::copy(&poster_path, &fanart_path).await?;
}
Ok(())
}
pub async fn fetch_page_video(
should_run: bool,
bili_client: &BiliClient,
video_model: &video::Model,
downloader: &Downloader,
page_info: &PageInfo,
page_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
let bili_video = Video::new(bili_client, video_model.bvid.clone());
let streams = bili_video
.get_page_analyzer(page_info)
.await?
.best_stream(&CONFIG.filter_option)?;
match streams {
BestStream::Mixed(mix_stream) => {
downloader.fetch(mix_stream.url(), &page_path).await?;
}
BestStream::VideoAudio {
video: video_stream,
audio: None,
} => {
downloader.fetch(video_stream.url(), &page_path).await?;
}
BestStream::VideoAudio {
video: video_stream,
audio: Some(audio_stream),
} => {
let (tmp_video_path, tmp_audio_path) = (
page_path.with_extension("tmp_video"),
page_path.with_extension("tmp_audio"),
);
downloader.fetch(video_stream.url(), &tmp_video_path).await?;
downloader.fetch(audio_stream.url(), &tmp_audio_path).await?;
downloader.merge(&tmp_video_path, &tmp_audio_path, &page_path).await?;
}
}
Ok(())
}
pub async fn fetch_page_danmaku(
should_run: bool,
bili_client: &BiliClient,
video_model: &video::Model,
page_info: &PageInfo,
danmaku_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
let bili_video = Video::new(bili_client, video_model.bvid.clone());
bili_video
.get_danmaku_writer(page_info)
.await?
.write(danmaku_path)
.await?;
Ok(())
}
pub async fn generate_page_nfo(
should_run: bool,
video_model: &video::Model,
page_model: &page::Model,
nfo_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
let single_page = video_model.single_page.unwrap();
let nfo_serializer = if single_page {
NFOSerializer(ModelWrapper::Video(video_model), NFOMode::MOVIE)
} else {
NFOSerializer(ModelWrapper::Page(page_model), NFOMode::EPOSODE)
};
generate_nfo(nfo_serializer, nfo_path).await
}
pub async fn fetch_video_poster(
should_run: bool,
video_model: &video::Model,
downloader: &Downloader,
poster_path: PathBuf,
fanart_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
downloader.fetch(&video_model.cover, &poster_path).await?;
fs::copy(&poster_path, &fanart_path).await?;
Ok(())
}
pub async fn fetch_upper_face(
should_run: bool,
video_model: &video::Model,
downloader: &Downloader,
upper_face_mutex: &Mutex<()>,
upper_face_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
// 这个锁只是为了避免多个视频同时下载同一个 up 主的头像,不携带实际内容
let _ = upper_face_mutex.lock().await;
if !upper_face_path.exists() {
return downloader.fetch(&video_model.upper_face, &upper_face_path).await;
}
Ok(())
}
pub async fn generate_upper_nfo(
should_run: bool,
video_model: &video::Model,
upper_nfo_mutex: &Mutex<()>,
nfo_path: PathBuf,
) -> Result<()> {
if !should_run {
return Ok(());
}
let _ = upper_nfo_mutex.lock().await;
if !nfo_path.exists() {
let nfo_serializer = NFOSerializer(ModelWrapper::Video(video_model), NFOMode::UPPER);
return generate_nfo(nfo_serializer, nfo_path).await;
}
Ok(())
}
pub async fn generate_video_nfo(should_run: bool, video_model: &video::Model, nfo_path: PathBuf) -> Result<()> {
if !should_run {
return Ok(());
}
let nfo_serializer = NFOSerializer(ModelWrapper::Video(video_model), NFOMode::TVSHOW);
generate_nfo(nfo_serializer, nfo_path).await
}
/// 创建 nfo_path 的父目录,然后写入 nfo 文件
async fn generate_nfo(serializer: NFOSerializer<'_>, nfo_path: PathBuf) -> Result<()> {
if let Some(parent) = nfo_path.parent() {
fs::create_dir_all(parent).await?;
}
fs::write(
nfo_path,
serializer.generate_nfo(&CONFIG.nfo_time_type).await?.as_bytes(),
)
.await?;
Ok(())
}
#[cfg(test)]
mod tests {
use handlebars::handlebars_helper;
use super::*;
#[test]
fn test_template_usage() {
let mut template = handlebars::Handlebars::new();
handlebars_helper!(truncate: |s: String, len: usize| {
if s.chars().count() > len {
s.chars().take(len).collect::<String>()
} else {
s.to_string()
}
});
template.register_helper("truncate", Box::new(truncate));
let _ = template.register_template_string("video", "test{{bvid}}test");
let _ = template.register_template_string("test_truncate", "哈哈,{{ truncate title 30 }}");
assert_eq!(
template.render("video", &json!({"bvid": "BV1b5411h7g7"})).unwrap(),
"testBV1b5411h7g7test"
);
assert_eq!(
template
.render(
"test_truncate",
&json!({"title": "你说得对,但是 Rust 是由 Mozilla 自主研发的一款全新的编译期格斗游戏。\
编译将发生在一个被称作「Cargo」的构建系统中。在这里被引用的指针将被授予「生命周期」之力导引对象安全。\
你将扮演一位名为「Rustacean」的神秘角色, 在与「Rustc」的搏斗中邂逅各种骨骼惊奇的傲娇报错。\
征服她们、通过编译同时逐步发掘「C++」程序崩溃的真相。"})
)
.unwrap(),
"哈哈,你说得对,但是 Rust 是由 Mozilla 自主研发的一"
);
}
}

View File

@@ -0,0 +1,3 @@
pub mod command;
pub mod status;
pub mod utils;

View File

@@ -0,0 +1,182 @@
use anyhow::Result;
static STATUS_MAX_RETRY: u32 = 0b100;
static STATUS_OK: u32 = 0b111;
/// 用来表示下载的状态,不想写太多列了,所以仅使用一个 u32 表示。
/// 从低位开始,固定每三位表示一种数据的状态,从 0b000 开始,每失败一次加一,最多 0b100即重试 4 次),
/// 如果成功,将对应的三位设置为 0b111。
/// 当所有任务都成功或者由于尝试次数过多失败,为 status 最高位打上标记 1将来不再继续尝试。
#[derive(Clone)]
pub struct Status(u32);
impl Status {
/// 如果 status 整体大于等于 1 << 31则表示任务已经被处理过不再需要重试。
/// 数据库可以使用 status < Status::handled() 来筛选需要处理的内容。
pub fn handled() -> u32 {
1 << 31
}
fn new(status: u32) -> Self {
Self(status)
}
/// 一般仅需要被内部调用,用来设置最高位的标记
fn set_flag(&mut self, handled: bool) {
if handled {
self.0 |= 1 << 31;
} else {
self.0 &= !(1 << 31);
}
}
/// 从低到高检查状态,如果该位置的任务应该继续尝试执行,则返回 true否则返回 false
fn should_run(&self, size: usize) -> Vec<bool> {
assert!(size < 10, "u32 can only store 10 status");
(0..size).map(|x| self.check_continue(x)).collect()
}
/// 如果任务的执行次数小于 STATUS_MAX_RETRY说明可以继续运行
fn check_continue(&self, offset: usize) -> bool {
assert!(offset < 10, "u32 can only store 10 status");
self.get_status(offset) < STATUS_MAX_RETRY
}
/// 根据任务结果更新状态,如果任务成功,设置为 STATUS_OK否则加一
fn update_status(&mut self, result: &[Result<()>]) {
assert!(result.len() < 10, "u32 can only store 10 status");
for (i, res) in result.iter().enumerate() {
self.set_result(res, i);
}
if self.should_run(result.len()).iter().all(|x| !x) {
// 所有任务都成功或者由于尝试次数过多失败,为 status 最高位打上标记,将来不再重试
self.set_flag(true)
}
}
fn set_result(&mut self, result: &Result<()>, offset: usize) {
if result.is_ok() {
// 如果任务已经执行到最大次数,那么此时 Result 也是 Ok此时不应该更新状态
if self.get_status(offset) < STATUS_MAX_RETRY {
self.set_ok(offset);
}
} else {
self.plus_one(offset);
}
}
/// 根据 mask 设置状态,如果 mask 为 false则清除对应的状态
fn set_mask(&mut self, mask: &[bool]) {
assert!(mask.len() < 10, "u32 can only store 10 status");
for (i, &m) in mask.iter().enumerate() {
if !m {
self.clear(i);
self.set_flag(false);
}
}
}
fn plus_one(&mut self, offset: usize) {
self.0 += 1 << (3 * offset);
}
fn set_ok(&mut self, offset: usize) {
self.0 |= STATUS_OK << (3 * offset);
}
fn clear(&mut self, offset: usize) {
self.0 &= !(STATUS_OK << (3 * offset));
}
fn get_status(&self, offset: usize) -> u32 {
let helper = !0u32;
(self.0 & (helper << (offset * 3)) & (helper >> (32 - 3 * offset - 3))) >> (offset * 3)
}
}
impl From<Status> for u32 {
fn from(status: Status) -> Self {
status.0
}
}
/// 从前到后分别表示视频封面、视频信息、Up 主头像、Up 主信息、分 P 下载
#[derive(Clone)]
pub struct VideoStatus(Status);
impl VideoStatus {
pub fn new(status: u32) -> Self {
Self(Status::new(status))
}
pub fn set_mask(&mut self, clear: &[bool]) {
assert!(clear.len() == 5, "VideoStatus should have 5 status");
self.0.set_mask(clear)
}
pub fn should_run(&self) -> Vec<bool> {
self.0.should_run(5)
}
pub fn update_status(&mut self, result: &[Result<()>]) {
assert!(result.len() == 5, "VideoStatus should have 5 status");
self.0.update_status(result)
}
}
impl From<VideoStatus> for u32 {
fn from(status: VideoStatus) -> Self {
status.0.into()
}
}
/// 从前到后分别表示:视频封面、视频内容、视频信息
#[derive(Clone)]
pub struct PageStatus(Status);
impl PageStatus {
pub fn new(status: u32) -> Self {
Self(Status::new(status))
}
pub fn set_mask(&mut self, clear: &[bool]) {
assert!(clear.len() == 4, "PageStatus should have 4 status");
self.0.set_mask(clear)
}
pub fn should_run(&self) -> Vec<bool> {
self.0.should_run(4)
}
pub fn update_status(&mut self, result: &[Result<()>]) {
assert!(result.len() == 4, "PageStatus should have 4 status");
self.0.update_status(result)
}
}
impl From<PageStatus> for u32 {
fn from(status: PageStatus) -> Self {
status.0.into()
}
}
#[cfg(test)]
mod test {
use anyhow::anyhow;
use super::*;
#[test]
fn test_status() {
let mut status = Status::new(0);
assert_eq!(status.should_run(3), vec![true, true, true]);
for count in 1..=3 {
status.update_status(&[Err(anyhow!("")), Ok(()), Ok(())]);
assert_eq!(status.should_run(3), vec![true, false, false]);
assert_eq!(u32::from(status.clone()), 0b111_111_000 + count);
}
status.update_status(&[Err(anyhow!("")), Ok(()), Ok(())]);
assert_eq!(status.should_run(3), vec![false, false, false]);
assert_eq!(u32::from(status), 0b111_111_100 | Status::handled());
}
}

View File

@@ -0,0 +1,585 @@
use std::collections::HashSet;
use std::path::Path;
use anyhow::Result;
use bili_sync_entity::*;
use bili_sync_migration::OnConflict;
use filenamify::filenamify;
use handlebars::handlebars_helper;
use once_cell::sync::Lazy;
use quick_xml::events::{BytesCData, BytesText};
use quick_xml::writer::Writer;
use quick_xml::Error;
use sea_orm::entity::prelude::*;
use sea_orm::ActiveValue::Set;
use sea_orm::QuerySelect;
use serde_json::json;
use tokio::io::AsyncWriteExt;
use crate::bilibili::{FavoriteListInfo, PageInfo, VideoInfo};
use crate::config::{NFOTimeType, CONFIG};
use crate::core::status::Status;
pub static TEMPLATE: Lazy<handlebars::Handlebars> = Lazy::new(|| {
let mut handlebars = handlebars::Handlebars::new();
handlebars_helper!(truncate: |s: String, len: usize| {
if s.chars().count() > len {
s.chars().take(len).collect::<String>()
} else {
s.to_string()
}
});
handlebars.register_helper("truncate", Box::new(truncate));
handlebars
.register_template_string("video", &CONFIG.video_name)
.unwrap();
handlebars.register_template_string("page", &CONFIG.page_name).unwrap();
handlebars
});
#[allow(clippy::upper_case_acronyms)]
pub enum NFOMode {
MOVIE,
TVSHOW,
EPOSODE,
UPPER,
}
pub enum ModelWrapper<'a> {
Video(&'a video::Model),
Page(&'a page::Model),
}
pub struct NFOSerializer<'a>(pub ModelWrapper<'a>, pub NFOMode);
/// 根据获得的收藏夹信息,插入或更新数据库中的收藏夹,并返回收藏夹对象
pub async fn handle_favorite_info(
info: &FavoriteListInfo,
path: &Path,
connection: &DatabaseConnection,
) -> Result<favorite::Model> {
favorite::Entity::insert(favorite::ActiveModel {
f_id: Set(info.id),
name: Set(info.title.clone()),
path: Set(path.to_string_lossy().to_string()),
..Default::default()
})
.on_conflict(
OnConflict::column(favorite::Column::FId)
.update_columns([favorite::Column::Name, favorite::Column::Path])
.to_owned(),
)
.exec(connection)
.await?;
Ok(favorite::Entity::find()
.filter(favorite::Column::FId.eq(info.id))
.one(connection)
.await?
.unwrap())
}
/// 获取数据库中存在的与该视频 favorite_id 和 bvid 重合的视频中的 bvid 和 favtime
/// 如果 bvid 和 favtime 均相同,说明到达了上次处理到的位置
pub async fn exist_labels(
videos_info: &[VideoInfo],
favorite_model: &favorite::Model,
connection: &DatabaseConnection,
) -> Result<HashSet<(String, DateTime)>> {
let bvids = videos_info.iter().map(|v| v.bvid.clone()).collect::<Vec<String>>();
let exist_labels = video::Entity::find()
.filter(
video::Column::FavoriteId
.eq(favorite_model.id)
.and(video::Column::Bvid.is_in(bvids)),
)
.select_only()
.columns([video::Column::Bvid, video::Column::Favtime])
.into_tuple()
.all(connection)
.await?
.into_iter()
.collect::<HashSet<(String, DateTime)>>();
Ok(exist_labels)
}
/// 尝试创建 Video Model如果发生冲突则忽略
pub async fn create_videos(
videos_info: &[VideoInfo],
favorite: &favorite::Model,
connection: &DatabaseConnection,
) -> Result<()> {
let video_models = videos_info
.iter()
.map(move |v| video::ActiveModel {
favorite_id: Set(favorite.id),
bvid: Set(v.bvid.clone()),
name: Set(v.title.clone()),
path: Set(Path::new(&favorite.path)
.join(filenamify(
TEMPLATE
.render(
"video",
&json!({
"bvid": &v.bvid,
"title": &v.title,
"upper_name": &v.upper.name,
"upper_mid": &v.upper.mid,
}),
)
.unwrap_or_else(|_| v.bvid.clone()),
))
.to_str()
.unwrap()
.to_owned()),
category: Set(v.vtype),
intro: Set(v.intro.clone()),
cover: Set(v.cover.clone()),
ctime: Set(v.ctime.naive_utc()),
pubtime: Set(v.pubtime.naive_utc()),
favtime: Set(v.fav_time.naive_utc()),
download_status: Set(0),
valid: Set(v.attr == 0),
tags: Set(None),
single_page: Set(None),
upper_id: Set(v.upper.mid),
upper_name: Set(v.upper.name.clone()),
upper_face: Set(v.upper.face.clone()),
..Default::default()
})
.collect::<Vec<video::ActiveModel>>();
video::Entity::insert_many(video_models)
.on_conflict(
OnConflict::columns([video::Column::FavoriteId, video::Column::Bvid])
.do_nothing()
.to_owned(),
)
.do_nothing()
.exec(connection)
.await?;
Ok(())
}
pub async fn total_video_count(favorite_model: &favorite::Model, connection: &DatabaseConnection) -> Result<u64> {
Ok(video::Entity::find()
.filter(video::Column::FavoriteId.eq(favorite_model.id))
.count(connection)
.await?)
}
/// 筛选所有未
pub async fn filter_unfilled_videos(
favorite_model: &favorite::Model,
connection: &DatabaseConnection,
) -> Result<Vec<video::Model>> {
Ok(video::Entity::find()
.filter(
video::Column::FavoriteId
.eq(favorite_model.id)
.and(video::Column::Valid.eq(true))
.and(video::Column::DownloadStatus.eq(0))
.and(video::Column::Category.eq(2))
.and(video::Column::SinglePage.is_null()),
)
.all(connection)
.await?)
}
/// 创建视频的所有分 P
pub async fn create_video_pages(
pages_info: &[PageInfo],
video_model: &video::Model,
connection: &impl ConnectionTrait,
) -> Result<()> {
let page_models = pages_info
.iter()
.map(move |p| {
let (width, height) = match &p.dimension {
Some(d) => {
if d.rotate == 0 {
(Some(d.width), Some(d.height))
} else {
(Some(d.height), Some(d.width))
}
}
None => (None, None),
};
page::ActiveModel {
video_id: Set(video_model.id),
cid: Set(p.cid),
pid: Set(p.page),
name: Set(p.name.clone()),
width: Set(width),
height: Set(height),
duration: Set(p.duration),
image: Set(p.first_frame.clone()),
download_status: Set(0),
..Default::default()
}
})
.collect::<Vec<page::ActiveModel>>();
page::Entity::insert_many(page_models)
.on_conflict(
OnConflict::columns([page::Column::VideoId, page::Column::Pid])
.do_nothing()
.to_owned(),
)
.do_nothing()
.exec(connection)
.await?;
Ok(())
}
/// 获取所有未处理的视频和页
pub async fn unhandled_videos_pages(
favorite_model: &favorite::Model,
connection: &DatabaseConnection,
) -> Result<Vec<(video::Model, Vec<page::Model>)>> {
Ok(video::Entity::find()
.filter(
video::Column::FavoriteId
.eq(favorite_model.id)
.and(video::Column::Valid.eq(true))
.and(video::Column::DownloadStatus.lt(Status::handled()))
.and(video::Column::Category.eq(2))
.and(video::Column::SinglePage.is_not_null()),
)
.find_with_related(page::Entity)
.all(connection)
.await?)
}
/// 更新视频 model 的下载状态
pub async fn update_videos_model(videos: Vec<video::ActiveModel>, connection: &DatabaseConnection) -> Result<()> {
video::Entity::insert_many(videos)
.on_conflict(
OnConflict::column(video::Column::Id)
.update_column(video::Column::DownloadStatus)
.to_owned(),
)
.exec(connection)
.await?;
Ok(())
}
/// 更新视频页 model 的下载状态
pub async fn update_pages_model(pages: Vec<page::ActiveModel>, connection: &DatabaseConnection) -> Result<()> {
let query = page::Entity::insert_many(pages).on_conflict(
OnConflict::column(page::Column::Id)
.update_columns([page::Column::DownloadStatus, page::Column::Path])
.to_owned(),
);
query.exec(connection).await?;
Ok(())
}
/// serde xml 似乎不太好用,先这么裸着写
/// (真是又臭又长啊
impl<'a> NFOSerializer<'a> {
pub async fn generate_nfo(self, nfo_time_type: &NFOTimeType) -> Result<String> {
let mut buffer = r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
"#
.as_bytes()
.to_vec();
let mut tokio_buffer = tokio::io::BufWriter::new(&mut buffer);
let mut writer = Writer::new_with_indent(&mut tokio_buffer, b' ', 4);
match self {
NFOSerializer(ModelWrapper::Video(v), NFOMode::MOVIE) => {
let nfo_time = match nfo_time_type {
NFOTimeType::FavTime => v.favtime,
NFOTimeType::PubTime => v.pubtime,
};
writer
.create_element("movie")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("plot")
.write_cdata_content_async(BytesCData::new(&v.intro))
.await
.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.name))
.await
.unwrap();
writer
.create_element("actor")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("name")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
writer
.create_element("role")
.write_text_content_async(BytesText::new(&v.upper_name))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
writer
.create_element("year")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
.await
.unwrap();
if let Some(tags) = &v.tags {
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap();
for tag in tags {
writer
.create_element("genre")
.write_text_content_async(BytesText::new(&tag))
.await
.unwrap();
}
}
writer
.create_element("uniqueid")
.with_attribute(("type", "bilibili"))
.write_text_content_async(BytesText::new(&v.bvid))
.await
.unwrap();
writer
.create_element("aired")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
}
NFOSerializer(ModelWrapper::Video(v), NFOMode::TVSHOW) => {
let nfo_time = match nfo_time_type {
NFOTimeType::FavTime => v.favtime,
NFOTimeType::PubTime => v.pubtime,
};
writer
.create_element("tvshow")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("plot")
.write_cdata_content_async(BytesCData::new(&v.intro))
.await
.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.name))
.await
.unwrap();
writer
.create_element("actor")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("name")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
writer
.create_element("role")
.write_text_content_async(BytesText::new(&v.upper_name))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
writer
.create_element("year")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
.await
.unwrap();
if let Some(tags) = &v.tags {
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap();
for tag in tags {
writer
.create_element("genre")
.write_text_content_async(BytesText::new(&tag))
.await
.unwrap();
}
}
writer
.create_element("uniqueid")
.with_attribute(("type", "bilibili"))
.write_text_content_async(BytesText::new(&v.bvid))
.await
.unwrap();
writer
.create_element("aired")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
}
NFOSerializer(ModelWrapper::Video(v), NFOMode::UPPER) => {
writer
.create_element("person")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer.create_element("plot").write_empty_async().await.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer
.create_element("lockdata")
.write_text_content_async(BytesText::new("false"))
.await
.unwrap();
writer
.create_element("dateadded")
.write_text_content_async(BytesText::new(
&v.pubtime.format("%Y-%m-%d %H:%M:%S").to_string(),
))
.await
.unwrap();
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
writer
.create_element("sorttitle")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
}
NFOSerializer(ModelWrapper::Page(p), NFOMode::EPOSODE) => {
writer
.create_element("episodedetails")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer.create_element("plot").write_empty_async().await.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer
.create_element("title")
.write_text_content_async(BytesText::new(&p.name))
.await
.unwrap();
writer
.create_element("season")
.write_text_content_async(BytesText::new("1"))
.await
.unwrap();
writer
.create_element("episode")
.write_text_content_async(BytesText::new(&p.pid.to_string()))
.await
.unwrap();
Ok(writer)
})
.await
.unwrap();
}
_ => unreachable!(),
}
tokio_buffer.flush().await?;
Ok(std::str::from_utf8(&buffer).unwrap().to_owned())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[tokio::test]
async fn test_generate_nfo() {
let video = video::Model {
intro: "intro".to_string(),
name: "name".to_string(),
upper_id: 1,
upper_name: "upper_name".to_string(),
favtime: chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(2022, 2, 2).unwrap(),
chrono::NaiveTime::from_hms_opt(2, 2, 2).unwrap(),
),
pubtime: chrono::NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(2033, 3, 3).unwrap(),
chrono::NaiveTime::from_hms_opt(3, 3, 3).unwrap(),
),
bvid: "bvid".to_string(),
tags: Some(serde_json::json!(["tag1", "tag2"])),
..Default::default()
};
assert_eq!(
NFOSerializer(ModelWrapper::Video(&video), NFOMode::MOVIE)
.generate_nfo(&NFOTimeType::PubTime)
.await
.unwrap(),
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<movie>
<plot><![CDATA[intro]]></plot>
<outline/>
<title>name</title>
<actor>
<name>1</name>
<role>upper_name</role>
</actor>
<year>2033</year>
<genre>tag1</genre>
<genre>tag2</genre>
<uniqueid type="bilibili">bvid</uniqueid>
<aired>2033-03-03</aired>
</movie>"#,
);
assert_eq!(
NFOSerializer(ModelWrapper::Video(&video), NFOMode::TVSHOW)
.generate_nfo(&NFOTimeType::FavTime)
.await
.unwrap(),
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<tvshow>
<plot><![CDATA[intro]]></plot>
<outline/>
<title>name</title>
<actor>
<name>1</name>
<role>upper_name</role>
</actor>
<year>2022</year>
<genre>tag1</genre>
<genre>tag2</genre>
<uniqueid type="bilibili">bvid</uniqueid>
<aired>2022-02-02</aired>
</tvshow>"#,
);
assert_eq!(
NFOSerializer(ModelWrapper::Video(&video), NFOMode::UPPER)
.generate_nfo(&NFOTimeType::FavTime)
.await
.unwrap(),
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<person>
<plot/>
<outline/>
<lockdata>false</lockdata>
<dateadded>2033-03-03 03:03:03</dateadded>
<title>1</title>
<sorttitle>1</sorttitle>
</person>"#,
);
let page = page::Model {
name: "name".to_string(),
pid: 3,
..Default::default()
};
assert_eq!(
NFOSerializer(ModelWrapper::Page(&page), NFOMode::EPOSODE)
.generate_nfo(&NFOTimeType::FavTime)
.await
.unwrap(),
r#"<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<episodedetails>
<plot/>
<outline/>
<title>name</title>
<season>1</season>
<episode>3</episode>
</episodedetails>"#,
);
}
}

View File

@@ -0,0 +1,20 @@
use anyhow::Result;
use bili_sync_migration::{Migrator, MigratorTrait};
use sea_orm::{ConnectOptions, Database, DatabaseConnection};
use tokio::fs;
use crate::config::CONFIG_DIR;
pub async fn database_connection() -> Result<DatabaseConnection> {
let target = CONFIG_DIR.join("data.sqlite");
fs::create_dir_all(&*CONFIG_DIR).await?;
let mut option = ConnectOptions::new(format!("sqlite://{}?mode=rwc", target.to_str().unwrap()));
option
.max_connections(100)
.min_connections(5)
.acquire_timeout(std::time::Duration::from_secs(90));
Ok(Database::connect(option).await?)
}
pub async fn migrate_database(connection: &DatabaseConnection) -> Result<()> {
Ok(Migrator::up(connection, None).await?)
}

View File

@@ -0,0 +1,58 @@
use std::path::Path;
use anyhow::{anyhow, Result};
use futures::StreamExt;
use reqwest::Method;
use tokio::fs::{self, File};
use tokio::io;
use crate::bilibili::Client;
pub struct Downloader {
client: Client,
}
impl Downloader {
// Downloader 使用带有默认 Header 的 Client 构建
// 拿到 url 后下载文件不需要任何 cookie 作为身份凭证
// 但如果不设置默认 Header下载时会遇到 403 Forbidden 错误
pub fn new(client: Client) -> Self {
Self { client }
}
pub async fn fetch(&self, url: &str, path: &Path) -> Result<()> {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await?;
}
let mut file = File::create(path).await?;
let mut res = self.client.request(Method::GET, url, None).send().await?.bytes_stream();
while let Some(item) = res.next().await {
io::copy(&mut item?.as_ref(), &mut file).await?;
}
Ok(())
}
pub async fn merge(&self, video_path: &Path, audio_path: &Path, output_path: &Path) -> Result<()> {
let output = tokio::process::Command::new("ffmpeg")
.args([
"-i",
video_path.to_str().unwrap(),
"-i",
audio_path.to_str().unwrap(),
"-c",
"copy",
"-y",
output_path.to_str().unwrap(),
])
.output()
.await?;
if !output.status.success() {
return match String::from_utf8(output.stderr) {
Ok(err) => Err(anyhow!(err)),
_ => Err(anyhow!("ffmpeg error")),
};
}
let _ = fs::remove_file(video_path).await;
let _ = fs::remove_file(audio_path).await;
Ok(())
}
}

View File

@@ -0,0 +1,9 @@
use thiserror::Error;
#[derive(Error, Debug)]
#[error("Request too frequently")]
pub struct DownloadAbortError();
#[derive(Error, Debug)]
#[error("Process page error")]
pub struct ProcessPageError();

View File

@@ -0,0 +1,62 @@
#[macro_use]
extern crate tracing;
mod bilibili;
mod config;
mod core;
mod database;
mod downloader;
mod error;
use std::time::Duration;
use once_cell::sync::Lazy;
use tokio::time;
use tracing_subscriber::util::SubscriberInitExt;
use crate::bilibili::BiliClient;
use crate::config::CONFIG;
use crate::core::command::{process_favorite_list, SCAN_ONLY};
use crate::database::{database_connection, migrate_database};
#[tokio::main]
async fn main() -> ! {
let default_log_level = std::env::var("RUST_LOG").unwrap_or("None,bili_sync=info".to_owned());
tracing_subscriber::fmt::Subscriber::builder()
.with_env_filter(tracing_subscriber::EnvFilter::builder().parse_lossy(default_log_level))
.with_timer(tracing_subscriber::fmt::time::ChronoLocal::new(
"%Y-%m-%d %H:%M:%S%.3f".to_owned(),
))
.finish()
.try_init()
.expect("初始化日志失败");
Lazy::force(&SCAN_ONLY);
Lazy::force(&CONFIG);
let mut anchor = chrono::Local::now().date_naive();
let bili_client = BiliClient::new();
let connection = database_connection().await.unwrap();
migrate_database(&connection).await.unwrap();
loop {
if let Err(e) = bili_client.is_login().await {
error!("检查登录状态时遇到错误:{e},等待下一轮执行");
time::sleep(Duration::from_secs(CONFIG.interval)).await;
continue;
}
if anchor != chrono::Local::now().date_naive() {
if let Err(e) = bili_client.check_refresh().await {
error!("检查刷新 Credential 遇到错误:{e},等待下一轮执行");
time::sleep(Duration::from_secs(CONFIG.interval)).await;
continue;
}
anchor = chrono::Local::now().date_naive();
}
for (fid, path) in &CONFIG.favorite_list {
if let Err(e) = process_favorite_list(&bili_client, fid, path, &connection).await {
// 可预期的错误都被内部处理了,这里漏出来应该是大问题
error!("处理收藏夹 {fid} 时遇到非预期的错误:{e}");
}
}
info!("所有收藏夹处理完毕,等待下一轮执行");
time::sleep(Duration::from_secs(CONFIG.interval)).await;
}
}