refactor: 优化部分代码,移除一批 unwrap

This commit is contained in:
amtoaer
2025-01-21 03:12:45 +08:00
parent c10c14c125
commit cdc30e1b32
9 changed files with 70 additions and 100 deletions

View File

@@ -133,7 +133,7 @@ impl<'a> Collection<'a> {
("pn", page.as_str()),
("ps", "30"),
],
MIXIN_KEY.load().as_deref().map(|x| x.as_str()),
MIXIN_KEY.load().as_deref(),
),
),
CollectionType::Season => (
@@ -146,7 +146,7 @@ impl<'a> Collection<'a> {
("page_num", page.as_str()),
("page_size", "30"),
],
MIXIN_KEY.load().as_deref().map(|x| x.as_str()),
MIXIN_KEY.load().as_deref(),
),
),
};

View File

@@ -210,10 +210,10 @@ fn get_filename(url: &str) -> Option<&str> {
pub fn encoded_query<'a>(
params: Vec<(&'a str, impl Into<Cow<'a, str>>)>,
mixin_key: Option<&str>,
mixin_key: Option<impl AsRef<str>>,
) -> Vec<(&'a str, Cow<'a, str>)> {
match mixin_key {
Some(key) => _encoded_query(params, key, chrono::Local::now().timestamp().to_string()),
Some(key) => _encoded_query(params, key.as_ref(), chrono::Local::now().timestamp().to_string()),
None => params.into_iter().map(|(k, v)| (k, v.into())).collect(),
}
}

View File

@@ -47,7 +47,7 @@ impl<'a> Submission<'a> {
("pn", page.to_string()),
("ps", "30".to_string()),
],
MIXIN_KEY.load().as_deref().map(|x| x.as_str()),
MIXIN_KEY.load().as_deref(),
))
.send()
.await?

View File

@@ -154,7 +154,7 @@ impl<'a> Video<'a> {
("fnval", "4048"),
("fourk", "1"),
],
MIXIN_KEY.load().as_deref().map(|x| x.as_str()),
MIXIN_KEY.load().as_deref(),
))
.send()
.await?

View File

@@ -46,11 +46,9 @@ fn load_config() -> Config {
}
warn!("配置文件不存在,使用默认配置...");
let default_config = Config::default();
if let Err(err) = default_config.save() {
panic!("保存默认配置时遇到错误: {err}");
}
info!("已将默认配置写入文件,请在修改后重新启动程序...");
std::process::exit(1);
default_config.save().expect("保存默认配置时遇到错误");
info!("已将默认配置写入 {}", CONFIG_DIR.join("config.toml").display());
default_config
});
// 检查配置文件内容
info!("校验配置文件内容...");

View File

@@ -136,7 +136,7 @@ impl Config {
}
if !(self.concurrent_limit.video > 0 && self.concurrent_limit.page > 0) {
ok = false;
error!("允许的并发数必须大于 0");
error!("video 和 page 允许的并发数必须大于 0");
}
if !ok {
panic!(

View File

@@ -35,13 +35,13 @@ impl Downloader {
let output = tokio::process::Command::new("ffmpeg")
.args([
"-i",
video_path.to_str().unwrap(),
video_path.to_string_lossy().as_ref(),
"-i",
audio_path.to_str().unwrap(),
audio_path.to_string_lossy().as_ref(),
"-c",
"copy",
"-y",
output_path.to_str().unwrap(),
output_path.to_string_lossy().as_ref(),
])
.output()
.await?;

View File

@@ -44,61 +44,51 @@ impl NFOSerializer<'_> {
writer
.create_element("plot")
.write_cdata_content_async(BytesCData::new(&v.intro))
.await
.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
.await?;
writer.create_element("outline").write_empty_async().await?;
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.name))
.await
.unwrap();
.await?;
writer
.create_element("actor")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("name")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
.await?;
writer
.create_element("role")
.write_text_content_async(BytesText::new(&v.upper_name))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
writer
.create_element("year")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
.await
.unwrap();
.await?;
if let Some(tags) = &v.tags {
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap();
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap_or_default();
for tag in tags {
writer
.create_element("genre")
.write_text_content_async(BytesText::new(&tag))
.await
.unwrap();
.await?;
}
}
writer
.create_element("uniqueid")
.with_attribute(("type", "bilibili"))
.write_text_content_async(BytesText::new(&v.bvid))
.await
.unwrap();
.await?;
writer
.create_element("aired")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
}
NFOSerializer(ModelWrapper::Video(v), NFOMode::TVSHOW) => {
let nfo_time = match nfo_time_type {
@@ -111,125 +101,106 @@ impl NFOSerializer<'_> {
writer
.create_element("plot")
.write_cdata_content_async(BytesCData::new(&v.intro))
.await
.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
.await?;
writer.create_element("outline").write_empty_async().await?;
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.name))
.await
.unwrap();
.await?;
writer
.create_element("actor")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer
.create_element("name")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
.await?;
writer
.create_element("role")
.write_text_content_async(BytesText::new(&v.upper_name))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
writer
.create_element("year")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y").to_string()))
.await
.unwrap();
.await?;
if let Some(tags) = &v.tags {
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap();
let tags: Vec<String> = serde_json::from_value(tags.clone()).unwrap_or_default();
for tag in tags {
writer
.create_element("genre")
.write_text_content_async(BytesText::new(&tag))
.await
.unwrap();
.await?;
}
}
writer
.create_element("uniqueid")
.with_attribute(("type", "bilibili"))
.write_text_content_async(BytesText::new(&v.bvid))
.await
.unwrap();
.await?;
writer
.create_element("aired")
.write_text_content_async(BytesText::new(&nfo_time.format("%Y-%m-%d").to_string()))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
}
NFOSerializer(ModelWrapper::Video(v), NFOMode::UPPER) => {
writer
.create_element("person")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer.create_element("plot").write_empty_async().await.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer.create_element("plot").write_empty_async().await?;
writer.create_element("outline").write_empty_async().await?;
writer
.create_element("lockdata")
.write_text_content_async(BytesText::new("false"))
.await
.unwrap();
.await?;
writer
.create_element("dateadded")
.write_text_content_async(BytesText::new(
&v.pubtime.format("%Y-%m-%d %H:%M:%S").to_string(),
))
.await
.unwrap();
.await?;
writer
.create_element("title")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
.await?;
writer
.create_element("sorttitle")
.write_text_content_async(BytesText::new(&v.upper_id.to_string()))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
}
NFOSerializer(ModelWrapper::Page(p), NFOMode::EPOSODE) => {
writer
.create_element("episodedetails")
.write_inner_content_async::<_, _, Error>(|writer| async move {
writer.create_element("plot").write_empty_async().await.unwrap();
writer.create_element("outline").write_empty_async().await.unwrap();
writer.create_element("plot").write_empty_async().await?;
writer.create_element("outline").write_empty_async().await?;
writer
.create_element("title")
.write_text_content_async(BytesText::new(&p.name))
.await
.unwrap();
.await?;
writer
.create_element("season")
.write_text_content_async(BytesText::new("1"))
.await
.unwrap();
.await?;
writer
.create_element("episode")
.write_text_content_async(BytesText::new(&p.pid.to_string()))
.await
.unwrap();
.await?;
Ok(writer)
})
.await
.unwrap();
.await?;
}
_ => unreachable!(),
}
tokio_buffer.flush().await?;
Ok(std::str::from_utf8(&buffer).unwrap().to_owned())
Ok(String::from_utf8(buffer)?)
}
}

View File

@@ -2,7 +2,7 @@ use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::pin::Pin;
use anyhow::{bail, Result};
use anyhow::{anyhow, bail, Result};
use bili_sync_entity::*;
use futures::stream::{FuturesOrdered, FuturesUnordered};
use futures::{Future, Stream, StreamExt};
@@ -94,12 +94,14 @@ pub async fn download_unprocessed_videos(
let downloader = Downloader::new(bili_client.client.clone());
let mut uppers_mutex: HashMap<i64, (Mutex<()>, Mutex<()>)> = HashMap::new();
for (video_model, _) in &unhandled_videos_pages {
uppers_mutex.insert(video_model.upper_id, (Mutex::new(()), Mutex::new(())));
uppers_mutex
.entry(video_model.upper_id)
.or_insert_with(|| (Mutex::new(()), Mutex::new(())));
}
let mut tasks = unhandled_videos_pages
.into_iter()
.map(|(video_model, pages_model)| {
let upper_mutex = uppers_mutex.get(&video_model.upper_id).unwrap();
let upper_mutex = uppers_mutex.get(&video_model.upper_id).expect("upper mutex not found");
download_video_pages(
bili_client,
video_model,
@@ -158,9 +160,9 @@ pub async fn download_video_pages(
let base_path = Path::new(&video_model.path);
let upper_id = video_model.upper_id.to_string();
let base_upper_path = upper_path
.join(upper_id.chars().next().unwrap().to_string())
.join(upper_id.chars().next().ok_or(anyhow!("upper_id is empty"))?.to_string())
.join(upper_id);
let is_single_page = video_model.single_page.unwrap();
let is_single_page = video_model.single_page.ok_or(anyhow!("single_page is null"))?;
// 对于单页视频page 的下载已经足够
// 对于多页视频page 下载仅包含了分集内容,需要额外补上视频的 poster 的 tvshow.nfo
let tasks: Vec<Pin<Box<dyn Future<Output = Result<()>>>>> = vec![
@@ -220,7 +222,7 @@ pub async fn download_video_pages(
&video_model.bvid, &video_model.name, task_name, e
),
});
if let Err(e) = results.into_iter().nth(4).unwrap() {
if let Err(e) = results.into_iter().nth(4).expect("not enough results") {
if e.downcast_ref::<DownloadAbortError>().is_some() {
return Err(e);
}
@@ -306,7 +308,7 @@ pub async fn download_page(
}
let mut status = PageStatus::new(page_model.download_status);
let seprate_status = status.should_run();
let is_single_page = video_model.single_page.unwrap();
let is_single_page = video_model.single_page.ok_or(anyhow!("single_page is null"))?;
let base_path = Path::new(&video_model.path);
let base_name = TEMPLATE.path_safe_render(
"page",
@@ -347,14 +349,13 @@ pub async fn download_page(
None,
)
};
let dimension = if page_model.width.is_some() && page_model.height.is_some() {
Some(Dimension {
width: page_model.width.unwrap(),
height: page_model.height.unwrap(),
let dimension = match (page_model.width, page_model.height) {
(Some(width), Some(height)) => Some(Dimension {
width,
height,
rotate: 0,
})
} else {
None
}),
_ => None,
};
let page_info = PageInfo {
cid: page_model.cid,
@@ -405,14 +406,14 @@ pub async fn download_page(
),
});
// 查看下载视频的状态,该状态会影响上层是否 break
if let Err(e) = results.into_iter().nth(1).unwrap() {
if let Err(e) = results.into_iter().nth(1).expect("not enough results") {
if let Ok(BiliError::RiskControlOccurred) = e.downcast::<BiliError>() {
bail!(DownloadAbortError());
}
}
let mut page_active_model: page::ActiveModel = page_model.into();
page_active_model.download_status = Set(status.into());
page_active_model.path = Set(Some(video_path.to_str().unwrap().to_string()));
page_active_model.path = Set(Some(video_path.to_string_lossy().to_string()));
Ok(page_active_model)
}
@@ -427,7 +428,7 @@ pub async fn fetch_page_poster(
if !should_run {
return Ok(());
}
let single_page = video_model.single_page.unwrap();
let single_page = video_model.single_page.ok_or(anyhow!("single_page is null"))?;
let url = if single_page {
// 单页视频直接用视频的封面
video_model.cover.as_str()
@@ -515,7 +516,7 @@ pub async fn generate_page_nfo(
if !should_run {
return Ok(());
}
let single_page = video_model.single_page.unwrap();
let single_page = video_model.single_page.ok_or(anyhow!("single_page is null"))?;
let nfo_serializer = if single_page {
NFOSerializer(ModelWrapper::Video(video_model), NFOMode::MOVIE)
} else {