This commit is contained in:
2025-01-05 16:38:36 +08:00
commit 5d66f5dfc1
4 changed files with 2837 additions and 0 deletions

1
.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

2619
Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

15
Cargo.toml Normal file
View File

@@ -0,0 +1,15 @@
[package]
name = "imageproxy"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow = "1.0.95"
axum = { version = "0.8.1", features = ["http2"] }
colored = "2.2.0"
image = "0.25.5"
libwebp-sys = "0.11.0"
reqwest = { version = "0.12.12", features = ["json", "stream", "rustls-tls"] }
thiserror = "2.0.9"
tokio = { version = "1.42.0", features = ["full"] }
webp = "0.3.0"

202
src/main.rs Normal file
View File

@@ -0,0 +1,202 @@
use std::{collections::HashMap, sync::Arc};
use axum::{
body::Bytes,
extract::{OriginalUri, State},
response::Response,
routing::get,
};
use image::GenericImageView;
use reqwest::StatusCode;
use tokio::sync::{Mutex, Notify};
use webp::WebPConfig;
#[derive(Debug)]
struct Downloader {
states: Arc<Mutex<HashMap<String, Arc<DownloadState>>>>,
}
#[derive(Debug)]
struct DownloadState {
notify: Notify,
result: Mutex<Option<Result<DownloadData, (StatusCode, String)>>>,
}
#[derive(Debug)]
struct DownloadData {
body: Bytes,
headers: reqwest::header::HeaderMap,
}
impl Downloader {
fn new() -> Self {
Self {
states: Arc::new(Mutex::new(HashMap::new())),
}
}
async fn download(&self, target_url: &str) -> Result<DownloadData, (StatusCode, String)> {
// check if the url is already downloading
{
let states = self.states.lock().await;
if let Some(state) = states.get(target_url) {
// wait for notify
state.notify.notified().await;
let result = state.result.lock().await;
match &*result {
Some(Ok(result)) => {
return Ok(DownloadData {
body: result.body.clone(),
headers: result.headers.clone(),
})
}
Some(Err(e)) => return Err(e.clone()),
None => {}
}
}
}
// create a new download state for this url
let mut states = self.states.lock().await;
let state = Arc::new(DownloadState {
notify: Notify::new(),
result: Mutex::new(None),
});
states.insert(target_url.to_string(), state.clone());
drop(states);
let response = reqwest::get(target_url)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
if response.content_length().unwrap_or(0) > 10_000_000 {
return Err((
StatusCode::FORBIDDEN,
format!(
"content too large: {}",
response.content_length().unwrap_or(0)
),
));
}
let headers = response.headers().clone();
let body = response
.bytes()
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
// notify all waiters
let mut states = self.states.lock().await;
let state = states.remove(target_url).unwrap();
state.result.lock().await.replace(Ok(DownloadData {
body: body.clone(),
headers: headers.clone(),
}));
state.notify.notify_waiters();
drop(states);
Ok(DownloadData { body, headers })
}
}
#[tokio::main]
async fn main() -> anyhow::Result<()> {
let downloader = Downloader::new();
let downloader = Arc::new(downloader);
let app = axum::Router::new()
.fallback(get(api_proxy_image))
.with_state(downloader);
let listener = tokio::net::TcpListener::bind("0.0.0.0:2999").await?;
axum::serve(listener, app).await?;
Ok(())
}
async fn api_proxy_image(
State(downloader): State<Arc<Downloader>>,
OriginalUri(uri): OriginalUri,
) -> Result<Response, (StatusCode, String)> {
let target_url = uri.to_string();
let mut target_url = target_url.trim_start_matches("/");
let mut max_size = 300;
if target_url.starts_with("max_size/") {
target_url = target_url.trim_start_matches("max_size/");
let parts = target_url.splitn(2, '/').collect::<Vec<_>>();
max_size = parts
.get(0)
.ok_or((StatusCode::BAD_REQUEST, "invalid max_size".to_string()))?
.parse()
.map_err(|e| {
(
StatusCode::BAD_REQUEST,
format!("max_szie parse error: {}", e),
)
})?;
target_url = parts
.get(1)
.ok_or((StatusCode::BAD_REQUEST, "target_url is missing".to_string()))?;
}
if !target_url.starts_with("http://") && !target_url.starts_with("https://") {
return Err((
StatusCode::BAD_REQUEST,
format!(
"invalid url, url should start with 'http://' or 'https://', your target_url: '{}'",
target_url
),
));
}
let downlaod_begin = std::time::Instant::now();
let DownloadData { body, headers } = downloader.download(target_url).await?;
let ori_content_type = headers.get("Content-Type").unwrap().to_str().unwrap();
if !ori_content_type.starts_with("image/") {
return Err((
StatusCode::FORBIDDEN,
format!("invalid remote content type: {}", ori_content_type),
));
}
let original_size = body.len();
let download_time = downlaod_begin.elapsed();
let resize_begin = std::time::Instant::now();
let body = image::load_from_memory(&body)
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let (ori_width, ori_height) = body.dimensions();
let size = ori_width.max(ori_height);
let size = max_size.min(size);
let body = body.resize(size, size, image::imageops::FilterType::Nearest);
let resize_time = resize_begin.elapsed();
let convert_begin = std::time::Instant::now();
let mut config = WebPConfig::new().expect("webp config init failed");
config.quality = 69.0;
config.lossless = 0;
config.method = 0; // 压缩方法 0-60最快6最好
config.alpha_quality = 0;
let webp = webp::Encoder::from_image(&body)
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
.encode_advanced(&config)
.map_err(|_e| {
(
StatusCode::INTERNAL_SERVER_ERROR,
"webp encoding error".to_string(),
)
})?;
let convert_time = convert_begin.elapsed();
Ok(Response::builder()
.header("Content-Type", "image/webp")
.header("X-Original-Content-Type", ori_content_type)
.header("X-Original-Size", original_size.to_string())
.header("X-Original-Width", ori_width.to_string())
.header("X-Original-Height", ori_height.to_string())
.header("X-Output-Max-Size", size.to_string())
.header("X-Converted-Size", webp.len().to_string())
.header("X-Download-Time", download_time.as_millis().to_string())
.header("X-Resize-Time", resize_time.as_millis().to_string())
.header("X-Convert-Time", convert_time.as_millis().to_string())
.body(webp.to_vec().into())
.unwrap())
}