1#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
2#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
3
4#[derive(Clone, Debug)]
33struct PlatformTarget {
34 DownloadIdentifier:String,
37
38 ArchiveExtension:String,
40
41 TauriTargetTriple:String,
44}
45
46#[derive(Clone, Debug, PartialEq)]
49enum ArchiveType {
50 Zip,
51
52 TarGz,
53}
54
55#[derive(Deserialize, Debug)]
58struct NodeVersionInfo {
59 version:String,
60}
61
62#[derive(Clone, Debug)]
66struct DownloadTask {
67 SidecarName:String,
69
70 MajorVersion:String,
72
73 FullVersion:String,
75
76 DownloadURL:String,
78
79 TempParentDirectory:PathBuf,
81
82 DestinationDirectory:PathBuf,
84
85 ArchiveType:ArchiveType,
87
88 ExtractedFolderName:String,
90
91 TauriTargetTriple:String,
93}
94
95#[derive(Serialize, Deserialize, Debug, Default)]
100struct DownloadCache {
101 Entries:HashMap<String, String>,
105}
106
107impl DownloadCache {
108 fn Load(CachePath:&Path) -> Self {
111 if !CachePath.exists() {
112 info!("Cache file not found. A new one will be created.");
113
114 return DownloadCache::default();
115 }
116
117 let FileContents = match fs::read_to_string(CachePath) {
118 Ok(Contents) => Contents,
119
120 Err(Error) => {
121 warn!("Failed to read cache file: {}. Starting with an empty cache.", Error);
122
123 return DownloadCache::default();
124 },
125 };
126
127 match serde_json::from_str(&FileContents) {
128 Ok(Cache) => {
129 info!("Successfully loaded download cache.");
130
131 Cache
132 },
133
134 Err(Error) => {
135 warn!("Failed to parse cache file: {}. Starting with an empty cache.", Error);
136
137 DownloadCache::default()
138 },
139 }
140 }
141
142 fn Save(&self, CachePath:&Path) -> Result<()> {
146 let SortedEntries:BTreeMap<_, _> = self.Entries.iter().collect();
148
149 let CacheToSerialize = serde_json::json!({
151
152 "Entries": SortedEntries
153 });
154
155 let mut Buffer = Vec::new();
157
158 let Formatter = serde_json::ser::PrettyFormatter::with_indent(b"\t");
160
161 let mut Serializer = serde_json::Serializer::with_formatter(&mut Buffer, Formatter);
163
164 CacheToSerialize.serialize(&mut Serializer)?;
166
167 fs::write(CachePath, &Buffer)
169 .with_context(|| format!("Failed to write tab-formatted cache to {:?}", CachePath))?;
170
171 Ok(())
172 }
173}
174
175fn GetBaseSidecarDirectory() -> Result<PathBuf> {
184 let CurrentExePath = env::current_exe().context("Failed to get the path of the current executable.")?;
186
187 let mut CurrentDir = CurrentExePath
189 .parent()
190 .context("Executable must be in a directory (not the root).")?;
191
192 loop {
193 let LibraryRsPath = CurrentDir.join("Source").join("Library.rs");
196
197 if LibraryRsPath.exists() {
198 return Ok(CurrentDir.to_path_buf());
199 }
200
201 let CargoTomlPath = CurrentDir.join("Cargo.toml");
204
205 if CargoTomlPath.exists() {
206 if let Ok(CargoContents) = fs::read_to_string(&CargoTomlPath) {
207 if let Ok(Toml) = toml::from_str::<toml::Value>(&CargoContents) {
208 if let Some(Package) = Toml.get("package") {
209 if let Some(PackageName) = Package.get("name").and_then(|v| v.as_str()) {
210 if PackageName == "SideCar" {
211 let SourceDir = CurrentDir.join("Source");
213
214 if SourceDir.exists() {
215 return Ok(CurrentDir.to_path_buf());
216 }
217 }
218 }
219 }
220 }
221 }
222 }
223
224 let SubdirCargoTomlPath = CurrentDir.join("Element").join("SideCar").join("Cargo.toml");
228
229 if SubdirCargoTomlPath.exists() {
230 if let Ok(CargoContents) = fs::read_to_string(&SubdirCargoTomlPath) {
231 if let Ok(Toml) = toml::from_str::<toml::Value>(&CargoContents) {
232 if let Some(Package) = Toml.get("package") {
233 if let Some(PackageName) = Package.get("name").and_then(|v| v.as_str()) {
234 if PackageName == "SideCar" {
235 let SourceDir = CurrentDir.join("Element").join("SideCar").join("Source");
237
238 if SourceDir.exists() {
239 return Ok(CurrentDir.join("Element").join("SideCar"));
241 }
242 }
243 }
244 }
245 }
246 }
247 }
248
249 let NextDir = match CurrentDir.parent() {
251 Some(Parent) => Parent,
252
253 None => break, };
255
256 CurrentDir = NextDir;
257 }
258
259 Err(anyhow!(
260 "Could not determine the SideCar base directory. The executable should be built from within the SideCar crate \
261 or from the workspace containing Element/SideCar. Searched up from: {}",
262 CurrentExePath.display()
263 ))
264}
265
266fn GetPlatformMatrix() -> Vec<PlatformTarget> {
269 vec![
270 PlatformTarget {
271 DownloadIdentifier:"win-x64".to_string(),
272
273 ArchiveExtension:"zip".to_string(),
274
275 TauriTargetTriple:"x86_64-pc-windows-msvc".to_string(),
276 },
277 PlatformTarget {
278 DownloadIdentifier:"linux-x64".to_string(),
279
280 ArchiveExtension:"tar.gz".to_string(),
281
282 TauriTargetTriple:"x86_64-unknown-linux-gnu".to_string(),
283 },
284 PlatformTarget {
285 DownloadIdentifier:"linux-arm64".to_string(),
286
287 ArchiveExtension:"tar.gz".to_string(),
288
289 TauriTargetTriple:"aarch64-unknown-linux-gnu".to_string(),
290 },
291 PlatformTarget {
292 DownloadIdentifier:"darwin-x64".to_string(),
293
294 ArchiveExtension:"tar.gz".to_string(),
295
296 TauriTargetTriple:"x86_64-apple-darwin".to_string(),
297 },
298 PlatformTarget {
299 DownloadIdentifier:"darwin-arm64".to_string(),
300
301 ArchiveExtension:"tar.gz".to_string(),
302
303 TauriTargetTriple:"aarch64-apple-darwin".to_string(),
304 },
305 ]
306}
307
308fn GetSidecarsToFetch() -> HashMap<String, Vec<String>> {
311 let mut Sidecars = HashMap::new();
312
313 Sidecars.insert(
314 "NODE".to_string(),
315 vec!["24", "23", "22", "21", "20", "19", "18", "17", "16"]
316 .into_iter()
317 .map(String::from)
318 .collect(),
319 );
320
321 Sidecars
322}
323
324pub const LogEnv:&str = "RUST_LOG";
328
329fn UpdateGitattributes(BaseDirectory:&Path) -> Result<()> {
333 const GITATTRIBUTES_HEADER:&str = r#"################################################################################
334# Git LFS configuration for vendored Tauri Sidecars
335#
336# This file tells Git to use LFS (Large File Storage) for the heavy binary
337# files and modules downloaded by the sidecar vendoring script. This keeps the
338# main repository history small and fast.
339#
340# The `-text` attribute is used to prevent Git from normalizing line endings,
341
342# which is critical for binary files and scripts.
343#
344# This file is automatically managed by the sidecar vendor script.
345################################################################################
346
347# --- Rule Definitions ---"#;
348
349 const GITATTRIBUTES_RULES:&[&str] = &[
350 "**/NODE/**/bin/node filter=lfs diff=lfs merge=lfs -text",
351 "**/NODE/**/node.exe filter=lfs diff=lfs merge=lfs -text",
352 "**/NODE/**/bin/npm filter=lfs diff=lfs merge=lfs -text",
353 "**/NODE/**/bin/npx filter=lfs diff=lfs merge=lfs -text",
354 "**/NODE/**/bin/corepack filter=lfs diff=lfs merge=lfs -text",
355 "**/NODE/**/npm filter=lfs diff=lfs merge=lfs -text",
356 "**/NODE/**/npm.cmd filter=lfs diff=lfs merge=lfs -text",
357 "**/NODE/**/npx filter=lfs diff=lfs merge=lfs -text",
358 "**/NODE/**/npx.cmd filter=lfs diff=lfs merge=lfs -text",
359 "**/NODE/**/corepack filter=lfs diff=lfs merge=lfs -text",
360 "**/NODE/**/corepack.cmd filter=lfs diff=lfs merge=lfs -text",
361 "",
362 "# --- Rules for the SideCar build artifacts ---",
363 "",
364 "Target/debug/*.exe filter=lfs diff=lfs merge=lfs -text",
365 "Target/release/*.exe filter=lfs diff=lfs merge=lfs -text",
366 "",
367 "Target/debug/SideCar filter=lfs diff=lfs merge=lfs -text",
368 "Target/release/SideCar filter=lfs diff=lfs merge=lfs -text",
369 "",
370 "Target/debug/Download filter=lfs diff=lfs merge=lfs -text",
371 "Target/release/Download filter=lfs diff=lfs merge=lfs -text",
372 ];
373
374 let GitattributesPath = BaseDirectory.join(".gitattributes");
375
376 if !GitattributesPath.exists() {
377 info!("Creating .gitattributes file to track binaries with Git LFS.");
378
379 let mut File = File::create(&GitattributesPath)
380 .with_context(|| format!("Failed to create .gitattributes file at {:?}", GitattributesPath))?;
381
382 writeln!(File, "{}", GITATTRIBUTES_HEADER)?;
383
384 for Rule in GITATTRIBUTES_RULES {
385 writeln!(File, "{}", Rule)?;
387 }
388 } else {
389 info!(".gitattributes file found. Verifying LFS rules...");
390
391 let Content = fs::read_to_string(&GitattributesPath)?;
392
393 let MissingRules:Vec<_> = GITATTRIBUTES_RULES
394 .iter()
395 .filter(|rule| !rule.is_empty() && !rule.starts_with('#'))
397 .filter(|rule| !Content.contains(*rule))
398 .collect();
399
400 if !MissingRules.is_empty() {
401 info!("Adding {} missing LFS rules to .gitattributes.", MissingRules.len());
402
403 let mut File = fs::OpenOptions::new()
404 .append(true)
405 .open(&GitattributesPath)
406 .with_context(|| format!("Failed to open .gitattributes for appending at {:?}", GitattributesPath))?;
407
408 writeln!(File, "\n\n# --- Rules Automatically Added by Vendor Script ---")?;
409
410 for Rule in MissingRules {
411 writeln!(File, "{}", Rule)?;
412 }
413 } else {
414 info!(".gitattributes is already up to date.");
415 }
416 }
417
418 Ok(())
419}
420
421async fn FetchNodeVersions(Client:&Client) -> Result<Vec<NodeVersionInfo>> {
425 info!("Fetching Node.js version index for resolving versions...");
426
427 let Response = Client
428 .get("https://nodejs.org/dist/index.json")
429 .send()
430 .await
431 .context("Failed to send request to Node.js version index.")?;
432
433 if !Response.status().is_success() {
434 return Err(anyhow!("Received non-success status from Node.js index: {}", Response.status()));
435 }
436
437 let Versions = Response
438 .json::<Vec<NodeVersionInfo>>()
439 .await
440 .context("Failed to parse Node.js version index JSON.")?;
441
442 Ok(Versions)
443}
444
445fn ResolveLatestPatchVersion(MajorVersion:&str, AllVersions:&[NodeVersionInfo]) -> Option<String> {
448 let VersionPrefix = format!("v{}.", MajorVersion);
449
450 AllVersions
451 .iter()
452 .find(|v| v.version.starts_with(&VersionPrefix))
453 .map(|v| v.version.clone())
454}
455
456async fn DownloadFile(Client:&Client, URL:&str, DestinationPath:&Path) -> Result<()> {
458 let mut Response = Client.get(URL).send().await?.error_for_status()?;
459
460 let mut DestinationFile =
461 File::create(DestinationPath).with_context(|| format!("Failed to create file at {:?}", DestinationPath))?;
462
463 while let Some(Chunk) = Response.chunk().await? {
465 DestinationFile.write_all(&Chunk)?;
466 }
467
468 Ok(())
469}
470
471fn ExtractArchive(ArchiveType:&ArchiveType, ArchivePath:&Path, ExtractionDirectory:&Path) -> Result<()> {
475 info!("Performing a full extraction of the archive...");
476
477 match ArchiveType {
478 ArchiveType::Zip => {
479 let File = File::open(ArchivePath)?;
480
481 let mut Archive = zip::ZipArchive::new(File)?;
482
483 Archive.extract(ExtractionDirectory)?;
484 },
485
486 ArchiveType::TarGz => {
487 let File = File::open(ArchivePath)?;
488
489 let Decompressor = flate2::read::GzDecoder::new(File);
490
491 let mut Archive = tar::Archive::new(Decompressor);
492
493 Archive.unpack(ExtractionDirectory)?;
494 },
495 }
496
497 Ok(())
498}
499
500async fn ProcessDownloadTask(Task:DownloadTask, Client:Client, Cache:Arc<Mutex<DownloadCache>>) -> Result<()> {
503 let TempDirectory = Builder::new()
505 .prefix("SideCar-Download-")
506 .tempdir_in(&Task.TempParentDirectory)
507 .context("Failed to create temporary directory.")?;
508
509 let ArchiveName = Task.DownloadURL.split('/').last().unwrap_or("Download.tmp");
510
511 let ArchivePath = TempDirectory.path().join(ArchiveName);
512
513 info!(
514 " [{}/{}] Downloading from: {}",
515 Task.TauriTargetTriple, Task.SidecarName, Task.DownloadURL
516 );
517
518 if let Err(Error) = DownloadFile(&Client, &Task.DownloadURL, &ArchivePath).await {
519 error!(
520 " [{}/{}] Failed to download {}: {}",
521 Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
522 );
523
524 return Err(Error.into());
525 }
526
527 info!(" [{}/{}] Extracting archive...", Task.TauriTargetTriple, Task.SidecarName);
528
529 if let Err(Error) = ExtractArchive(&Task.ArchiveType, &ArchivePath, TempDirectory.path()) {
530 error!(
531 " [{}/{}] Failed to extract {}: {}",
532 Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
533 );
534
535 return Err(Error.into());
536 }
537
538 let ExtractedPath = TempDirectory.path().join(&Task.ExtractedFolderName);
539
540 if !ExtractedPath.exists() {
541 let ErrorMessage = format!(" Could not find extracted folder: {:?}", ExtractedPath);
542
543 error!("{}", ErrorMessage);
544
545 return Err(anyhow!(ErrorMessage));
546 }
547
548 if Task.DestinationDirectory.exists() {
550 info!(" Removing old version at: {:?}", Task.DestinationDirectory);
551
552 fs::remove_dir_all(&Task.DestinationDirectory)?;
553 }
554
555 if let Some(Parent) = Task.DestinationDirectory.parent() {
557 fs::create_dir_all(Parent)?;
558 }
559
560 info!(" Installing to: {:?}", Task.DestinationDirectory);
561
562 fs::rename(&ExtractedPath, &Task.DestinationDirectory).with_context(|| {
563 format!(
564 "Failed to rename/move extracted directory from {:?} to {:?}",
565 ExtractedPath, Task.DestinationDirectory
566 )
567 })?;
568
569 let CacheKey = format!("{}/{}/{}", Task.TauriTargetTriple, Task.SidecarName, Task.MajorVersion);
571
572 let mut LockedCache = Cache.lock().unwrap();
573
574 LockedCache.Entries.insert(CacheKey, Task.FullVersion.clone());
575
576 info!(
577 " v{} ({}) for '{}' is now up to date.",
578 Task.MajorVersion, Task.FullVersion, Task.TauriTargetTriple
579 );
580
581 Ok(())
582}
583
584pub fn Logger() {
586 let LevelText = env::var(LogEnv).unwrap_or_else(|_| "info".to_string());
587
588 let LogLevel = LevelText.parse::<LevelFilter>().unwrap_or(LevelFilter::Info);
589
590 env_logger::Builder::new()
591 .filter_level(LogLevel)
592 .format(|Buffer, Record| {
593 let LevelStyle = match Record.level() {
594 log::Level::Error => "ERROR".red().bold(),
595
596 log::Level::Warn => "WARN".yellow().bold(),
597
598 log::Level::Info => "INFO".green(),
599
600 log::Level::Debug => "DEBUG".blue(),
601
602 log::Level::Trace => "TRACE".magenta(),
603 };
604
605 writeln!(Buffer, "[{}] [{}]: {}", "Download".red(), LevelStyle, Record.args())
606 })
607 .parse_default_env()
608 .init();
609}
610
611#[tokio::main]
612pub async fn Fn() -> Result<()> {
613 Logger();
614
615 info!("Starting Universal Sidecar vendoring process...");
616
617 let BaseSidecarDirectory = GetBaseSidecarDirectory()?;
619
620 UpdateGitattributes(&BaseSidecarDirectory)?;
622
623 let TempDownloadsDirectory = BaseSidecarDirectory.join("Temporary");
625
626 fs::create_dir_all(&TempDownloadsDirectory)
627 .with_context(|| format!("Failed to create temporary directory at {:?}", TempDownloadsDirectory))?;
628
629 let CachePath = BaseSidecarDirectory.join("Cache.json");
630
631 let Cache = Arc::new(Mutex::new(DownloadCache::Load(&CachePath)));
632
633 let HttpClient = Client::new();
634
635 let PlatformMatrix = GetPlatformMatrix();
636
637 let SidecarsToFetch = GetSidecarsToFetch();
638
639 let NodeVersions = FetchNodeVersions(&HttpClient).await?;
641
642 let mut TasksToRun = Vec::new();
643
644 for Platform in &PlatformMatrix {
647 info!("--- Processing architecture: '{}' ---", Platform.TauriTargetTriple);
648
649 for (SidecarName, MajorVersions) in &SidecarsToFetch {
650 info!(" -> Processing sidecar: '{}'", SidecarName);
651
652 for MajorVersion in MajorVersions {
653 let DestinationDirectory = BaseSidecarDirectory
654 .join(&Platform.TauriTargetTriple)
655 .join(SidecarName)
656 .join(MajorVersion);
657
658 if SidecarName == "NODE" {
660 let FullVersion = match ResolveLatestPatchVersion(MajorVersion, &NodeVersions) {
661 Some(Version) => Version,
662
663 None => {
664 warn!(
665 " Could not resolve a specific version for Node.js v{}. Skipping.",
666 MajorVersion
667 );
668
669 continue;
670 },
671 };
672
673 let CacheKey = format!("{}/{}/{}", &Platform.TauriTargetTriple, SidecarName, MajorVersion);
675
676 let CachedVersion = Cache.lock().unwrap().Entries.get(&CacheKey).cloned();
677
678 if Some(FullVersion.clone()) == CachedVersion {
679 info!(" v{} ({}) is already up to date, skipping.", MajorVersion, FullVersion);
680
681 continue;
682 }
683
684 if CachedVersion.is_some() {
685 info!(
686 " Found newer patch for v{}: {} -> {}. Scheduling update.",
687 MajorVersion,
688 CachedVersion.unwrap(),
689 FullVersion
690 );
691 } else {
692 info!(" Processing v{} (resolved to {})...", MajorVersion, FullVersion);
693 }
694
695 let ArchiveExtension = &Platform.ArchiveExtension;
696
697 let ArchiveName =
698 format!("node-{}-{}.{}", FullVersion, Platform.DownloadIdentifier, ArchiveExtension);
699
700 let DownloadURL = format!("https://nodejs.org/dist/{}/{}", FullVersion, ArchiveName);
701
702 let ExtractedFolderName = format!("node-{}-{}", FullVersion, Platform.DownloadIdentifier);
703
704 let Task = DownloadTask {
705 SidecarName:SidecarName.clone(),
706
707 MajorVersion:MajorVersion.clone(),
708
709 FullVersion,
710
711 DownloadURL,
712
713 TempParentDirectory:TempDownloadsDirectory.clone(),
714
715 DestinationDirectory,
716
717 ArchiveType:if ArchiveExtension == "zip" { ArchiveType::Zip } else { ArchiveType::TarGz },
718
719 ExtractedFolderName,
720
721 TauriTargetTriple:Platform.TauriTargetTriple.clone(),
722 };
723
724 TasksToRun.push(Task);
725 }
726
727 }
730 }
731 }
732
733 if TasksToRun.is_empty() {
735 info!("All sidecar binaries are already up to date.");
736 } else {
737 info!("Found {} tasks to run. Starting concurrent downloads...", TasksToRun.len());
738
739 let NumberOfConcurrentJobs = num_cpus::get().min(8);
741
742 let Results = stream::iter(TasksToRun)
745 .map(|Task| {
746 let Client = HttpClient.clone();
747
748 let Cache = Arc::clone(&Cache);
749
750 tokio::spawn(async move { ProcessDownloadTask(Task, Client, Cache).await })
751 })
752 .buffer_unordered(NumberOfConcurrentJobs)
753 .collect::<Vec<_>>()
754 .await;
755
756 let mut ErrorsEncountered = 0;
758
759 for Result in Results {
760 if let Err(JoinError) = Result {
762 error!("A download task panicked or was cancelled: {}", JoinError);
763
764 ErrorsEncountered += 1;
765 } else if let Ok(Err(AppError)) = Result {
766 error!("A download task failed: {}", AppError);
769
770 ErrorsEncountered += 1;
771 }
772 }
773
774 if ErrorsEncountered > 0 {
775 error!("Completed with {} errors.", ErrorsEncountered);
776 }
777 }
778
779 info!("Saving updated cache...");
781
782 Cache.lock().unwrap().Save(&CachePath)?;
783
784 info!("All sidecar binaries have been successfully processed and organized.");
785
786 Ok(())
787}
788
789#[allow(unused)]
791fn main() {
792 if let Err(Error) = Fn() {
794 error!("The application encountered a fatal error: {}", Error);
796
797 std::process::exit(1);
798 }
799}
800
801use std::{
803 collections::{BTreeMap, HashMap},
804 env,
805 fs::{self, File},
806 io::Write,
807 path::{Path, PathBuf},
808 sync::{Arc, Mutex},
809};
810
811use anyhow::{Context, Result, anyhow};
812use colored::*;
813use futures::stream::{self, StreamExt};
814use log::{LevelFilter, error, info, warn};
815use reqwest::Client;
816use serde::{Deserialize, Serialize};
817use tempfile::Builder;
818use toml;