|
use crate::{ |
|
app::{ |
|
constant::EMPTY_STRING, |
|
model::TokenInfo, |
|
lazy::{TOKEN_FILE, TOKEN_LIST_FILE}, |
|
}, |
|
common::utils::generate_checksum_with_default, |
|
}; |
|
|
|
|
|
fn normalize_and_write(content: &str, file_path: &str) -> String { |
|
let normalized = content.replace("\r\n", "\n"); |
|
if normalized != content { |
|
if let Err(e) = std::fs::write(file_path, &normalized) { |
|
eprintln!("警告: 无法更新规范化的文件: {}", e); |
|
} |
|
} |
|
normalized |
|
} |
|
|
|
|
|
fn parse_token(token_part: &str) -> Option<String> { |
|
|
|
let colon_pos = token_part.rfind(':'); |
|
let encoded_colon_pos = token_part.rfind("%3A"); |
|
|
|
match (colon_pos, encoded_colon_pos) { |
|
(None, None) => Some(token_part.to_string()), |
|
(Some(pos1), None) => Some(token_part[(pos1 + 1)..].to_string()), |
|
(None, Some(pos2)) => Some(token_part[(pos2 + 3)..].to_string()), |
|
(Some(pos1), Some(pos2)) => { |
|
|
|
let pos = pos1.max(pos2); |
|
let start = if pos == pos2 { pos + 3 } else { pos + 1 }; |
|
Some(token_part[start..].to_string()) |
|
} |
|
} |
|
} |
|
|
|
|
|
pub fn load_tokens() -> Vec<TokenInfo> { |
|
let token_file = TOKEN_FILE.as_str(); |
|
let token_list_file = TOKEN_LIST_FILE.as_str(); |
|
|
|
|
|
for file in [&token_file, &token_list_file] { |
|
if !std::path::Path::new(file).exists() { |
|
if let Err(e) = std::fs::write(file, EMPTY_STRING) { |
|
eprintln!("警告: 无法创建文件 '{}': {}", file, e); |
|
} |
|
} |
|
} |
|
|
|
|
|
let token_entries = match std::fs::read_to_string(&token_file) { |
|
Ok(content) => { |
|
let normalized = content.replace("\r\n", "\n"); |
|
normalized |
|
.lines() |
|
.filter_map(|line| { |
|
let line = line.trim(); |
|
if line.is_empty() || line.starts_with('#') { |
|
return None; |
|
} |
|
let parsed = parse_token(line); |
|
if parsed.is_none() || !validate_token(&parsed.as_ref().unwrap()) { |
|
return None; |
|
} |
|
parsed |
|
}) |
|
.collect::<Vec<_>>() |
|
} |
|
Err(e) => { |
|
eprintln!("警告: 无法读取token文件 '{}': {}", token_file, e); |
|
Vec::new() |
|
} |
|
}; |
|
|
|
|
|
let mut token_map: std::collections::HashMap<String, String> = |
|
match std::fs::read_to_string(&token_list_file) { |
|
Ok(content) => { |
|
let normalized = normalize_and_write(&content, &token_list_file); |
|
normalized |
|
.lines() |
|
.filter_map(|line| { |
|
let line = line.trim(); |
|
if line.is_empty() || line.starts_with('#') { |
|
return None; |
|
} |
|
|
|
let parts: Vec<&str> = line.split(',').collect(); |
|
match parts[..] { |
|
[token_part, checksum] => { |
|
let token = parse_token(token_part)?; |
|
Some((token, checksum.to_string())) |
|
} |
|
_ => { |
|
eprintln!("警告: 忽略无效的token-list行: {}", line); |
|
None |
|
} |
|
} |
|
}) |
|
.collect() |
|
} |
|
Err(e) => { |
|
eprintln!("警告: 无法读取token-list文件: {}", e); |
|
std::collections::HashMap::new() |
|
} |
|
}; |
|
|
|
|
|
for token in token_entries { |
|
if !token_map.contains_key(&token) { |
|
|
|
let checksum = generate_checksum_with_default(); |
|
token_map.insert(token, checksum); |
|
} |
|
} |
|
|
|
|
|
let token_list_content = token_map |
|
.iter() |
|
.map(|(token, checksum)| { |
|
format!("{},{}", token, checksum) |
|
}) |
|
.collect::<Vec<_>>() |
|
.join("\n"); |
|
|
|
if let Err(e) = std::fs::write(&token_list_file, token_list_content) { |
|
eprintln!("警告: 无法更新token-list文件: {}", e); |
|
} |
|
|
|
|
|
token_map |
|
.into_iter() |
|
.map(|(token, checksum)| TokenInfo { |
|
token: token.clone(), |
|
checksum, |
|
profile: None, |
|
}) |
|
.collect() |
|
} |
|
|
|
use base64::{engine::general_purpose::URL_SAFE_NO_PAD, Engine}; |
|
use chrono::{DateTime, Local, TimeZone}; |
|
|
|
|
|
pub fn validate_token(token: &str) -> bool { |
|
|
|
let parts: Vec<&str> = token.split('.').collect(); |
|
if parts.len() != 3 { |
|
return false; |
|
} |
|
|
|
if parts[0] != "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9" { |
|
return false; |
|
} |
|
|
|
|
|
let payload = match URL_SAFE_NO_PAD.decode(parts[1]) { |
|
Ok(decoded) => decoded, |
|
Err(_) => return false, |
|
}; |
|
|
|
|
|
let payload_str = match String::from_utf8(payload) { |
|
Ok(s) => s, |
|
Err(_) => return false, |
|
}; |
|
|
|
|
|
let payload_json: serde_json::Value = match serde_json::from_str(&payload_str) { |
|
Ok(v) => v, |
|
Err(_) => return false, |
|
}; |
|
|
|
|
|
let required_fields = ["sub", "time", "randomness", "exp", "iss", "scope", "aud"]; |
|
for field in required_fields { |
|
if !payload_json.get(field).is_some() { |
|
return false; |
|
} |
|
} |
|
|
|
|
|
if let Some(time) = payload_json["time"].as_str() { |
|
|
|
if let Ok(time_value) = time.parse::<i64>() { |
|
let current_time = chrono::Utc::now().timestamp(); |
|
if time_value > current_time { |
|
return false; |
|
} |
|
} else { |
|
return false; |
|
} |
|
} else { |
|
return false; |
|
} |
|
|
|
|
|
if let Some(randomness) = payload_json["randomness"].as_str() { |
|
if randomness.len() != 18 { |
|
return false; |
|
} |
|
} else { |
|
return false; |
|
} |
|
|
|
|
|
if let Some(exp) = payload_json["exp"].as_i64() { |
|
let current_time = chrono::Utc::now().timestamp(); |
|
if current_time > exp { |
|
return false; |
|
} |
|
} else { |
|
return false; |
|
} |
|
|
|
|
|
if payload_json["iss"].as_str() != Some("https://authentication.cursor.sh") { |
|
return false; |
|
} |
|
|
|
|
|
if payload_json["scope"].as_str() != Some("openid profile email offline_access") { |
|
return false; |
|
} |
|
|
|
|
|
if payload_json["aud"].as_str() != Some("https://cursor.com") { |
|
return false; |
|
} |
|
|
|
true |
|
} |
|
|
|
|
|
pub fn extract_user_id(token: &str) -> Option<String> { |
|
|
|
let parts: Vec<&str> = token.split('.').collect(); |
|
if parts.len() != 3 { |
|
return None; |
|
} |
|
|
|
|
|
let payload = match URL_SAFE_NO_PAD.decode(parts[1]) { |
|
Ok(decoded) => decoded, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
let payload_str = match String::from_utf8(payload) { |
|
Ok(s) => s, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
let payload_json: serde_json::Value = match serde_json::from_str(&payload_str) { |
|
Ok(v) => v, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
payload_json["sub"] |
|
.as_str() |
|
.map(|s| s.split('|').nth(1).unwrap_or(s).to_string()) |
|
} |
|
|
|
|
|
pub fn extract_time(token: &str) -> Option<DateTime<Local>> { |
|
|
|
let parts: Vec<&str> = token.split('.').collect(); |
|
if parts.len() != 3 { |
|
return None; |
|
} |
|
|
|
|
|
let payload = match URL_SAFE_NO_PAD.decode(parts[1]) { |
|
Ok(decoded) => decoded, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
let payload_str = match String::from_utf8(payload) { |
|
Ok(s) => s, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
let payload_json: serde_json::Value = match serde_json::from_str(&payload_str) { |
|
Ok(v) => v, |
|
Err(_) => return None, |
|
}; |
|
|
|
|
|
payload_json["time"] |
|
.as_str() |
|
.and_then(|t| t.parse::<i64>().ok()) |
|
.and_then(|timestamp| Local.timestamp_opt(timestamp, 0).single()) |
|
} |
|
|