Compare commits
10 commits
ffef41c289
...
c786084dc4
Author | SHA1 | Date | |
---|---|---|---|
c786084dc4 | |||
98e9969d6a | |||
a527c01e8f | |||
1c37106b30 | |||
f89b35615a | |||
8090b03f13 | |||
dc2a01c7d2 | |||
3f6368c9d4 | |||
fad40be482 | |||
58a9e6b4a2 |
7 changed files with 232 additions and 23 deletions
1
.dockerignore
Normal file
1
.dockerignore
Normal file
|
@ -0,0 +1 @@
|
|||
target
|
33
Cargo.lock
generated
33
Cargo.lock
generated
|
@ -513,6 +513,12 @@ dependencies = [
|
|||
"pin-project-lite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "http-range-header"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c"
|
||||
|
||||
[[package]]
|
||||
name = "httparse"
|
||||
version = "1.10.1"
|
||||
|
@ -882,6 +888,16 @@ version = "0.3.17"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "mime_guess"
|
||||
version = "2.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e"
|
||||
dependencies = [
|
||||
"mime",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.9"
|
||||
|
@ -961,6 +977,7 @@ dependencies = [
|
|||
"serde_derive",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"tower-http",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
"zip",
|
||||
|
@ -1603,14 +1620,24 @@ checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2"
|
|||
dependencies = [
|
||||
"bitflags",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"http",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"http-range-header",
|
||||
"httpdate",
|
||||
"iri-string",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -1695,6 +1722,12 @@ version = "1.18.0"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1dccffe3ce07af9386bfd29e80c0ab1a8205a2fc34e4bcd40364df902cfa8f3f"
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539"
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.18"
|
||||
|
|
|
@ -11,6 +11,7 @@ serde = "1.0.219"
|
|||
serde_derive = "1.0.219"
|
||||
serde_json = "1.0.142"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
tower-http = { version = "0.6.6", features = ["fs"] }
|
||||
tracing = "0.1.41"
|
||||
tracing-subscriber = "0.3.19"
|
||||
zip = "4.3.0"
|
||||
|
|
13
Dockerfile
Normal file
13
Dockerfile
Normal file
|
@ -0,0 +1,13 @@
|
|||
FROM rust:latest AS builder
|
||||
|
||||
COPY ./ /src
|
||||
|
||||
RUN cd /src && cargo build --release
|
||||
|
||||
FROM debian:trixie
|
||||
|
||||
RUN apt-get update && apt-get install -y ca-certificates
|
||||
|
||||
COPY --from=builder /src/target/release/openeuicc-site /openeuicc-site
|
||||
|
||||
CMD [ "/openeuicc-site" ]
|
35
fly.toml
Normal file
35
fly.toml
Normal file
|
@ -0,0 +1,35 @@
|
|||
app = 'openeuicc'
|
||||
primary_region = 'yyz'
|
||||
|
||||
[build]
|
||||
|
||||
[mounts]
|
||||
source = "openeuicc_site_cache"
|
||||
destination = "/cache"
|
||||
|
||||
[env]
|
||||
CACHE_DIR = "/cache"
|
||||
ROOT_DOMAIN = "openeuicc.com"
|
||||
|
||||
[http_service]
|
||||
internal_port = 3000
|
||||
force_https = true
|
||||
auto_stop_machines = 'stop'
|
||||
auto_start_machines = true
|
||||
min_machines_running = 0
|
||||
processes = ['app']
|
||||
|
||||
[http_service.concurrency]
|
||||
type = "requests"
|
||||
soft_limit = 7000
|
||||
hard_limit = 8000
|
||||
|
||||
[[http_service.checks]]
|
||||
grace_period = "10s"
|
||||
interval = "30s"
|
||||
method = "GET"
|
||||
timeout = "5s"
|
||||
path = "/"
|
||||
|
||||
[[vm]]
|
||||
size = 'shared-cpu-1x'
|
23
src/main.rs
23
src/main.rs
|
@ -1,9 +1,8 @@
|
|||
use std::sync::LazyLock;
|
||||
|
||||
use axum::{Router, routing::get};
|
||||
use tracing::info;
|
||||
|
||||
use crate::update::update_latest_build_loop;
|
||||
use tower_http::services::ServeDir;
|
||||
use tracing::{info, warn};
|
||||
|
||||
mod update;
|
||||
|
||||
|
@ -14,9 +13,21 @@ pub static ROOT_DOMAIN: LazyLock<String> = LazyLock::new(|| std::env::var("ROOT_
|
|||
#[tokio::main]
|
||||
async fn main() -> eyre::Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
tokio::spawn(update_latest_build_loop());
|
||||
tokio::fs::create_dir_all(&*CACHE_DIR).await?;
|
||||
if let Err(e) = crate::update::maybe_init().await {
|
||||
warn!("Error while trying to init from existing cache: {e:?}, skipping");
|
||||
}
|
||||
tokio::spawn(crate::update::update_latest_build_loop());
|
||||
|
||||
let app = Router::new().route("/", get(root));
|
||||
let static_cache_files = ServeDir::new(&*CACHE_DIR);
|
||||
|
||||
let app = Router::new()
|
||||
.route("/", get(root))
|
||||
.route(
|
||||
"/magisk/magisk-debug.json",
|
||||
get(crate::update::serve_latest_metadata),
|
||||
)
|
||||
.nest_service("/magisk", static_cache_files);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await?;
|
||||
info!("Listening on port 3000");
|
||||
|
@ -25,5 +36,5 @@ async fn main() -> eyre::Result<()> {
|
|||
}
|
||||
|
||||
async fn root() -> &'static str {
|
||||
"Hello, world"
|
||||
"This is the OpenEUICC homepage."
|
||||
}
|
||||
|
|
149
src/update.rs
149
src/update.rs
|
@ -1,5 +1,10 @@
|
|||
use std::{io::Read, sync::RwLock, time::Duration};
|
||||
use std::{
|
||||
io::Read,
|
||||
sync::{LazyLock, RwLock},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use axum::Json;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use tokio::io::AsyncWriteExt;
|
||||
use tracing::info;
|
||||
|
@ -7,6 +12,33 @@ use tracing::info;
|
|||
use crate::{CACHE_DIR, ROOT_DOMAIN};
|
||||
|
||||
pub static LATEST_METADATA: RwLock<Option<MagiskZipMetadata>> = RwLock::new(None);
|
||||
pub static INITIALIZE_CHANNEL: LazyLock<
|
||||
RwLock<
|
||||
Option<(
|
||||
tokio::sync::broadcast::Sender<()>,
|
||||
tokio::sync::broadcast::Receiver<()>,
|
||||
)>,
|
||||
>,
|
||||
> = LazyLock::new(|| RwLock::new(Some(tokio::sync::broadcast::channel(1))));
|
||||
|
||||
pub async fn serve_latest_metadata() -> Json<MagiskZipMetadata> {
|
||||
if let Some(metadata) = &*LATEST_METADATA.read().unwrap() {
|
||||
return Json(metadata.clone());
|
||||
}
|
||||
|
||||
// Try waiting for initialization
|
||||
let rx = if let Some((tx, _)) = &*INITIALIZE_CHANNEL.read().unwrap() {
|
||||
Some(tx.subscribe())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(mut rx) = rx {
|
||||
rx.recv().await.ok();
|
||||
}
|
||||
|
||||
Json(LATEST_METADATA.read().unwrap().clone().unwrap())
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[derive(Clone, Deserialize)]
|
||||
|
@ -31,27 +63,43 @@ struct WorkflowTasksResponse {
|
|||
workflow_runs: Vec<WorkflowRun>,
|
||||
}
|
||||
|
||||
pub async fn maybe_init() -> eyre::Result<()> {
|
||||
let Some((latest_zip, latest_run)) = cleanup_and_get_latest_zip().await? else {
|
||||
info!(
|
||||
"Could not find existing, cached latest magisk zip; will initialize after the first update loop run"
|
||||
);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let path = format!("{}/{}", *CACHE_DIR, latest_zip);
|
||||
|
||||
info!("Found existing Magisk zip {path}, initializing metadata with it");
|
||||
parse_magisk_zip_and_update(path, latest_run).await
|
||||
}
|
||||
|
||||
pub async fn update_latest_build_loop() {
|
||||
let mut interval = tokio::time::interval(Duration::from_mins(15));
|
||||
let mut interval = tokio::time::interval(Duration::from_secs(5 * 60));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let mut cur_attempts = 0;
|
||||
while let Err(e) = update_latest_build().await
|
||||
&& cur_attempts < 5
|
||||
&& cur_attempts < 20
|
||||
{
|
||||
tracing::error!("Failed to fetch latest build: {e:?}, retrying in 60 seconds");
|
||||
tracing::error!("Failed to fetch latest build: {e:?}, retrying in 30 seconds");
|
||||
cur_attempts += 1;
|
||||
tokio::time::sleep(Duration::from_secs(60)).await;
|
||||
tokio::time::sleep(Duration::from_secs(30)).await;
|
||||
}
|
||||
|
||||
cleanup_and_get_latest_zip().await.ok();
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_latest_build() -> eyre::Result<()> {
|
||||
let client = reqwest::Client::new();
|
||||
let req = client
|
||||
.get("https://gitea.angry.im/api/v1/repos/PeterCxy/OpenEUICC/actions/tasks")
|
||||
.get("https://gitea.angry.im/api/v1/repos/PeterCxy/OpenEUICC/actions/tasks?limit=200")
|
||||
.header("Accept-Encoding", "application/json")
|
||||
.build()?;
|
||||
let mut resp: WorkflowTasksResponse = client.execute(req).await?.json().await?;
|
||||
|
@ -61,7 +109,22 @@ async fn update_latest_build() -> eyre::Result<()> {
|
|||
resp.workflow_runs.sort_by_key(|run| run.run_number);
|
||||
resp.workflow_runs.reverse();
|
||||
|
||||
if resp.workflow_runs.is_empty() {
|
||||
tracing::error!("Couldn't find the latest successful workflow, aborting");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let latest_run = resp.workflow_runs[0].clone();
|
||||
if let Some(metadata) = &*LATEST_METADATA.read().unwrap()
|
||||
&& metadata.run_number == latest_run.run_number
|
||||
{
|
||||
info!(
|
||||
"Latest run has the same run number as existing {}, skipping",
|
||||
latest_run.run_number
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let latest_module_url = format!(
|
||||
"https://gitea.angry.im/PeterCxy/OpenEUICC/actions/runs/{}/artifacts/magisk-debug",
|
||||
latest_run.run_number
|
||||
|
@ -71,7 +134,10 @@ async fn update_latest_build() -> eyre::Result<()> {
|
|||
let download_req = client.get(latest_module_url).build()?;
|
||||
let download_buf = client.execute(download_req).await?.bytes().await?;
|
||||
|
||||
let target_path = format!("{}/magisk-{}.zip", *CACHE_DIR, latest_run.run_number);
|
||||
let target_path = format!(
|
||||
"{}/magisk-openeuicc-debug-{}.zip",
|
||||
*CACHE_DIR, latest_run.run_number
|
||||
);
|
||||
info!("Downloading Magisk zip to {target_path}");
|
||||
{
|
||||
let mut target_file = tokio::fs::File::create(&target_path).await?;
|
||||
|
@ -79,17 +145,12 @@ async fn update_latest_build() -> eyre::Result<()> {
|
|||
}
|
||||
|
||||
info!("Magisk zip downloaded");
|
||||
let metadata = parse_magisk_zip(target_path, latest_run.run_number).await?;
|
||||
info!(
|
||||
"Generated update manifest: {}",
|
||||
serde_json::to_string_pretty(&metadata)?
|
||||
);
|
||||
*LATEST_METADATA.write().unwrap() = Some(metadata);
|
||||
parse_magisk_zip_and_update(target_path, latest_run.run_number).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[derive(Clone, Serialize)]
|
||||
pub struct MagiskZipMetadata {
|
||||
version: String,
|
||||
#[serde(rename = "versionCode")]
|
||||
|
@ -97,10 +158,27 @@ pub struct MagiskZipMetadata {
|
|||
#[serde(rename = "zipUrl")]
|
||||
zip_url: String,
|
||||
changelog: String,
|
||||
#[serde(skip)]
|
||||
run_number: u64,
|
||||
}
|
||||
|
||||
async fn parse_magisk_zip(path: String, run_number: u64) -> eyre::Result<MagiskZipMetadata> {
|
||||
Ok(tokio::task::spawn_blocking(move || do_parse_magisk_zip(path, run_number)).await??)
|
||||
async fn parse_magisk_zip_and_update(path: String, run_number: u64) -> eyre::Result<()> {
|
||||
info!("Parsing {path}");
|
||||
let res = tokio::task::spawn_blocking(move || do_parse_magisk_zip(path, run_number)).await??;
|
||||
info!(
|
||||
"Generated update manifest: {}",
|
||||
serde_json::to_string_pretty(&res)?
|
||||
);
|
||||
*LATEST_METADATA.write().unwrap() = Some(res);
|
||||
|
||||
// If the initialize channel is still there, tell everyone we have finished initialization and LATEST_METADATA is no longer None
|
||||
if let Some((tx, _)) = &*INITIALIZE_CHANNEL.read().unwrap() {
|
||||
tx.send(()).ok();
|
||||
};
|
||||
// Drop the sender here; any subscriber that might have raced with us will receive a closed error
|
||||
INITIALIZE_CHANNEL.write().unwrap().take();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn do_parse_magisk_zip(path: String, run_number: u64) -> eyre::Result<MagiskZipMetadata> {
|
||||
|
@ -114,8 +192,12 @@ fn do_parse_magisk_zip(path: String, run_number: u64) -> eyre::Result<MagiskZipM
|
|||
let mut ret = MagiskZipMetadata {
|
||||
version: "".to_string(),
|
||||
version_code: 0,
|
||||
zip_url: format!("https://{}/magisk/magisk-{}.zip", *ROOT_DOMAIN, run_number),
|
||||
zip_url: format!(
|
||||
"https://{}/magisk/magisk-openeuicc-debug-{}.zip",
|
||||
*ROOT_DOMAIN, run_number
|
||||
),
|
||||
changelog: "https://gitea.angry.im/PeterCxy/OpenEUICC/commits/branch/master".to_string(),
|
||||
run_number,
|
||||
};
|
||||
|
||||
for line in module_prop.lines() {
|
||||
|
@ -134,3 +216,36 @@ fn do_parse_magisk_zip(path: String, run_number: u64) -> eyre::Result<MagiskZipM
|
|||
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
async fn cleanup_and_get_latest_zip() -> eyre::Result<Option<(String, u64)>> {
|
||||
let mut dir = tokio::fs::read_dir(&*CACHE_DIR).await?;
|
||||
let mut zips = Vec::new();
|
||||
|
||||
while let Some(entry) = dir.next_entry().await? {
|
||||
if entry.file_type().await?.is_file() {
|
||||
let name = entry.file_name().into_string().unwrap();
|
||||
if name.ends_with(".zip") && name.starts_with("magisk-openeuicc-debug-") {
|
||||
let run_number: u64 = name
|
||||
.replacen("magisk-openeuicc-debug-", "", 1)
|
||||
.replacen(".zip", "", 1)
|
||||
.parse()?;
|
||||
zips.push((name, run_number));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if zips.is_empty() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
zips.sort_by_key(|z| z.1);
|
||||
zips.reverse();
|
||||
|
||||
for zip in zips.iter().skip(1) {
|
||||
let path = format!("{}/{}", *CACHE_DIR, zip.0);
|
||||
info!("Deleting {path}");
|
||||
tokio::fs::remove_file(&path).await.ok();
|
||||
}
|
||||
|
||||
Ok(Some(zips[0].clone()))
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue