SideCar/
Download.rs

1#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
2#![allow(non_snake_case, non_camel_case_types, non_upper_case_globals)]
3
4//! ==============================================================================
5//! Universal Sidecar Vendor - Rust Edition
6//!
7//! This program automates downloading and organizing full distributions of
8//! various sidecar runtimes (like Node.js) for a Tauri application. It is a
9//! Rust rewrite of the original shell script, enhanced with modern features.
10//!
11//! Key Features:
12//!   - Asynchronous, Concurrent Downloads: Leverages Tokio to download multiple
13//!     binaries in parallel, significantly speeding up the process.
14//!   - Intelligent Caching: Maintains a `Cache.json` file to track downloaded
15//!     versions. It automatically detects if a newer patch version is available
16//!     for a requested major version and updates the binary.
17//!   - Git LFS Management: Automatically creates or updates the
18//!     `.gitattributes` file to ensure large binaries are tracked by Git LFS.
19//!   - Extensible Design: Easily configured to support new sidecars, versions,
20//!     and platforms.
21//!   - Robust Error Handling: Uses `anyhow` for clear and concise error
22//!     reporting.
23//!   - Preserved File Structure: The final output directory structure remains
24//!     identical to the original script (`Architecture/SidecarName/Version`).
25//!
26//! ==============================================================================
27
28// --- Type Definitions and Structs ---
29
30/// Represents a single platform target for which binaries will be downloaded.
31/// This struct holds all the necessary identifiers for a given platform.
32#[derive(Clone, Debug)]
33struct PlatformTarget {
34	/// The identifier used in the download URL (e.g., "win-x64",
35	/// "linux-arm64").
36	DownloadIdentifier:String,
37
38	/// The file extension of the archive (e.g., "zip", "tar.gz").
39	ArchiveExtension:String,
40
41	/// The official Tauri target triple for this platform (e.g.,
42	/// "x86_64-pc-windows-msvc").
43	TauriTargetTriple:String,
44}
45
46/// Defines the type of archive being handled, which determines the extraction
47/// logic.
48#[derive(Clone, Debug, PartialEq)]
49enum ArchiveType {
50	Zip,
51
52	TarGz,
53}
54
55/// Represents a specific version of Node.js as returned by the official index.
56/// Used for deserializing the JSON response from `nodejs.org`.
57#[derive(Deserialize, Debug)]
58struct NodeVersionInfo {
59	version:String,
60}
61
62/// Contains all the necessary information to perform a single download and
63/// installation task. An instance of this struct is created for each binary
64/// that needs to be downloaded.
65#[derive(Clone, Debug)]
66struct DownloadTask {
67	/// The name of the sidecar (e.g., "NODE").
68	SidecarName:String,
69
70	/// The major version string requested (e.g., "24").
71	MajorVersion:String,
72
73	/// The full, resolved version string (e.g., "v24.0.0").
74	FullVersion:String,
75
76	/// The complete URL to download the archive from.
77	DownloadURL:String,
78
79	/// The directory where temporary folders for this task will be created.
80	TempParentDirectory:PathBuf,
81
82	/// The final destination directory for the extracted binaries.
83	DestinationDirectory:PathBuf,
84
85	/// The type of archive to be downloaded.
86	ArchiveType:ArchiveType,
87
88	/// The name of the root folder inside the archive once extracted.
89	ExtractedFolderName:String,
90
91	/// The Tauri target triple for this download task.
92	TauriTargetTriple:String,
93}
94
95/// Represents the structure of the `Cache.json` file.
96/// It uses a HashMap to map a unique key (representing a specific
97/// sidecar/version/platform) to the full version string that was last
98/// downloaded.
99#[derive(Serialize, Deserialize, Debug, Default)]
100struct DownloadCache {
101	/// The core data structure for the cache.
102	/// Key: A unique string like "x86_64-pc-windows-msvc/NODE/24".
103	/// Value: The full version string, like "v24.0.0".
104	Entries:HashMap<String, String>,
105}
106
107impl DownloadCache {
108	/// Loads the cache from the `Cache.json` file in the base sidecar
109	/// directory. If the file doesn't exist, it returns a new, empty cache.
110	fn Load(CachePath:&Path) -> Self {
111		if !CachePath.exists() {
112			info!("Cache file not found. A new one will be created.");
113
114			return DownloadCache::default();
115		}
116
117		let FileContents = match fs::read_to_string(CachePath) {
118			Ok(Contents) => Contents,
119
120			Err(Error) => {
121				warn!("Failed to read cache file: {}. Starting with an empty cache.", Error);
122
123				return DownloadCache::default();
124			},
125		};
126
127		match serde_json::from_str(&FileContents) {
128			Ok(Cache) => {
129				info!("Successfully loaded download cache.");
130
131				Cache
132			},
133
134			Err(Error) => {
135				warn!("Failed to parse cache file: {}. Starting with an empty cache.", Error);
136
137				DownloadCache::default()
138			},
139		}
140	}
141
142	/// Saves the current state of the cache to the `Cache.json` file.
143	/// The JSON is pretty-printed with tabs for indentation.
144	/// Entries are sorted alphabetically by key for consistency.
145	fn Save(&self, CachePath:&Path) -> Result<()> {
146		// Create a BTreeMap to sort entries alphabetically by key
147		let SortedEntries:BTreeMap<_, _> = self.Entries.iter().collect();
148
149		// Create a temporary struct to hold the sorted entries for serialization
150		let CacheToSerialize = serde_json::json!({
151
152			"Entries": SortedEntries
153		});
154
155		// Create an in-memory buffer to write the serialized JSON to.
156		let mut Buffer = Vec::new();
157
158		// Create a formatter that uses a tab character for indentation.
159		let Formatter = serde_json::ser::PrettyFormatter::with_indent(b"\t");
160
161		// Create a serializer with our custom formatter.
162		let mut Serializer = serde_json::Serializer::with_formatter(&mut Buffer, Formatter);
163
164		// Serialize the sorted cache data into the buffer.
165		CacheToSerialize.serialize(&mut Serializer)?;
166
167		// Write the buffer's contents to the actual file on disk.
168		fs::write(CachePath, &Buffer)
169			.with_context(|| format!("Failed to write tab-formatted cache to {:?}", CachePath))?;
170
171		Ok(())
172	}
173}
174
175// --- Configuration ---
176
177/// Returns the root directory where all sidecars will be stored.
178/// This is determined dynamically by navigating up from the executable's
179/// location and detecting the SideCar project root. It handles both:
180/// - Standalone builds: `.../SideCar/Target/release/`
181/// - Workspace builds: `.../workspace/Target/release/SideCar` (where the
182///   workspace root contains multiple crates including Element/SideCar)
183fn GetBaseSidecarDirectory() -> Result<PathBuf> {
184	// Get the full path to the currently running executable.
185	let CurrentExePath = env::current_exe().context("Failed to get the path of the current executable.")?;
186
187	// Start from the directory containing the executable and walk up the tree.
188	let mut CurrentDir = CurrentExePath
189		.parent()
190		.context("Executable must be in a directory (not the root).")?;
191
192	loop {
193		// Check A: Does Source/Library.rs exist in current directory? → return current
194		// directory
195		let LibraryRsPath = CurrentDir.join("Source").join("Library.rs");
196
197		if LibraryRsPath.exists() {
198			return Ok(CurrentDir.to_path_buf());
199		}
200
201		// Check B: Does a Cargo.toml exist in current directory with package.name =
202		// "SideCar"? → return current directory
203		let CargoTomlPath = CurrentDir.join("Cargo.toml");
204
205		if CargoTomlPath.exists() {
206			if let Ok(CargoContents) = fs::read_to_string(&CargoTomlPath) {
207				if let Ok(Toml) = toml::from_str::<toml::Value>(&CargoContents) {
208					if let Some(Package) = Toml.get("package") {
209						if let Some(PackageName) = Package.get("name").and_then(|v| v.as_str()) {
210							if PackageName == "SideCar" {
211								// Verify that Source subdirectory exists as additional confirmation.
212								let SourceDir = CurrentDir.join("Source");
213
214								if SourceDir.exists() {
215									return Ok(CurrentDir.to_path_buf());
216								}
217							}
218						}
219					}
220				}
221			}
222		}
223
224		// Check C: Does Element/SideCar/Cargo.toml exist relative to current directory
225		// AND does it have package.name = "SideCar"? → return Element/SideCar
226		// subdirectory path
227		let SubdirCargoTomlPath = CurrentDir.join("Element").join("SideCar").join("Cargo.toml");
228
229		if SubdirCargoTomlPath.exists() {
230			if let Ok(CargoContents) = fs::read_to_string(&SubdirCargoTomlPath) {
231				if let Ok(Toml) = toml::from_str::<toml::Value>(&CargoContents) {
232					if let Some(Package) = Toml.get("package") {
233						if let Some(PackageName) = Package.get("name").and_then(|v| v.as_str()) {
234							if PackageName == "SideCar" {
235								// Verify that the Element/SideCar/Source subdirectory exists.
236								let SourceDir = CurrentDir.join("Element").join("SideCar").join("Source");
237
238								if SourceDir.exists() {
239									// Return the full path to the Element/SideCar subdirectory.
240									return Ok(CurrentDir.join("Element").join("SideCar"));
241								}
242							}
243						}
244					}
245				}
246			}
247		}
248
249		// Move up one level.
250		let NextDir = match CurrentDir.parent() {
251			Some(Parent) => Parent,
252
253			None => break, // Reached filesystem root without finding the project
254		};
255
256		CurrentDir = NextDir;
257	}
258
259	Err(anyhow!(
260		"Could not determine the SideCar base directory. The executable should be built from within the SideCar crate \
261		 or from the workspace containing Element/SideCar. Searched up from: {}",
262		CurrentExePath.display()
263	))
264}
265
266/// Defines the matrix of platforms to target. Each entry specifies how to
267/// download and identify binaries for a specific architecture.
268fn GetPlatformMatrix() -> Vec<PlatformTarget> {
269	vec![
270		PlatformTarget {
271			DownloadIdentifier:"win-x64".to_string(),
272
273			ArchiveExtension:"zip".to_string(),
274
275			TauriTargetTriple:"x86_64-pc-windows-msvc".to_string(),
276		},
277		PlatformTarget {
278			DownloadIdentifier:"linux-x64".to_string(),
279
280			ArchiveExtension:"tar.gz".to_string(),
281
282			TauriTargetTriple:"x86_64-unknown-linux-gnu".to_string(),
283		},
284		PlatformTarget {
285			DownloadIdentifier:"linux-arm64".to_string(),
286
287			ArchiveExtension:"tar.gz".to_string(),
288
289			TauriTargetTriple:"aarch64-unknown-linux-gnu".to_string(),
290		},
291		PlatformTarget {
292			DownloadIdentifier:"darwin-x64".to_string(),
293
294			ArchiveExtension:"tar.gz".to_string(),
295
296			TauriTargetTriple:"x86_64-apple-darwin".to_string(),
297		},
298		PlatformTarget {
299			DownloadIdentifier:"darwin-arm64".to_string(),
300
301			ArchiveExtension:"tar.gz".to_string(),
302
303			TauriTargetTriple:"aarch64-apple-darwin".to_string(),
304		},
305	]
306}
307
308/// Defines which sidecars and versions to fetch. This structure makes it
309/// easy to add more sidecars like Deno in the future.
310fn GetSidecarsToFetch() -> HashMap<String, Vec<String>> {
311	let mut Sidecars = HashMap::new();
312
313	Sidecars.insert(
314		"NODE".to_string(),
315		vec!["24", "23", "22", "21", "20", "19", "18", "17", "16"]
316			.into_iter()
317			.map(String::from)
318			.collect(),
319	);
320
321	Sidecars
322}
323
324// --- Helper Functions ---
325
326/// Environment variable for setting the log level.
327pub const LogEnv:&str = "RUST_LOG";
328
329/// Manages the `.gitattributes` file to ensure binaries are tracked by Git LFS.
330/// If the file does not exist, it is created. If it exists, missing rules are
331/// appended.
332fn UpdateGitattributes(BaseDirectory:&Path) -> Result<()> {
333	const GITATTRIBUTES_HEADER:&str = r#"################################################################################
334# Git LFS configuration for vendored Tauri Sidecars
335#
336# This file tells Git to use LFS (Large File Storage) for the heavy binary
337# files and modules downloaded by the sidecar vendoring script. This keeps the
338# main repository history small and fast.
339#
340# The `-text` attribute is used to prevent Git from normalizing line endings,
341
342# which is critical for binary files and scripts.
343#
344# This file is automatically managed by the sidecar vendor script.
345################################################################################
346
347# --- Rule Definitions ---"#;
348
349	const GITATTRIBUTES_RULES:&[&str] = &[
350		"**/NODE/**/bin/node filter=lfs diff=lfs merge=lfs -text",
351		"**/NODE/**/node.exe filter=lfs diff=lfs merge=lfs -text",
352		"**/NODE/**/bin/npm filter=lfs diff=lfs merge=lfs -text",
353		"**/NODE/**/bin/npx filter=lfs diff=lfs merge=lfs -text",
354		"**/NODE/**/bin/corepack filter=lfs diff=lfs merge=lfs -text",
355		"**/NODE/**/npm filter=lfs diff=lfs merge=lfs -text",
356		"**/NODE/**/npm.cmd filter=lfs diff=lfs merge=lfs -text",
357		"**/NODE/**/npx filter=lfs diff=lfs merge=lfs -text",
358		"**/NODE/**/npx.cmd filter=lfs diff=lfs merge=lfs -text",
359		"**/NODE/**/corepack filter=lfs diff=lfs merge=lfs -text",
360		"**/NODE/**/corepack.cmd filter=lfs diff=lfs merge=lfs -text",
361		"",
362		"# --- Rules for the SideCar build artifacts ---",
363		"",
364		"Target/debug/*.exe filter=lfs diff=lfs merge=lfs -text",
365		"Target/release/*.exe filter=lfs diff=lfs merge=lfs -text",
366		"",
367		"Target/debug/SideCar filter=lfs diff=lfs merge=lfs -text",
368		"Target/release/SideCar filter=lfs diff=lfs merge=lfs -text",
369		"",
370		"Target/debug/Download filter=lfs diff=lfs merge=lfs -text",
371		"Target/release/Download filter=lfs diff=lfs merge=lfs -text",
372	];
373
374	let GitattributesPath = BaseDirectory.join(".gitattributes");
375
376	if !GitattributesPath.exists() {
377		info!("Creating .gitattributes file to track binaries with Git LFS.");
378
379		let mut File = File::create(&GitattributesPath)
380			.with_context(|| format!("Failed to create .gitattributes file at {:?}", GitattributesPath))?;
381
382		writeln!(File, "{}", GITATTRIBUTES_HEADER)?;
383
384		for Rule in GITATTRIBUTES_RULES {
385			// This will write a blank line for any empty strings in the array
386			writeln!(File, "{}", Rule)?;
387		}
388	} else {
389		info!(".gitattributes file found. Verifying LFS rules...");
390
391		let Content = fs::read_to_string(&GitattributesPath)?;
392
393		let MissingRules:Vec<_> = GITATTRIBUTES_RULES
394			.iter()
395			// Filter out blank lines and comments from the check
396			.filter(|rule| !rule.is_empty() && !rule.starts_with('#'))
397			.filter(|rule| !Content.contains(*rule))
398			.collect();
399
400		if !MissingRules.is_empty() {
401			info!("Adding {} missing LFS rules to .gitattributes.", MissingRules.len());
402
403			let mut File = fs::OpenOptions::new()
404				.append(true)
405				.open(&GitattributesPath)
406				.with_context(|| format!("Failed to open .gitattributes for appending at {:?}", GitattributesPath))?;
407
408			writeln!(File, "\n\n# --- Rules Automatically Added by Vendor Script ---")?;
409
410			for Rule in MissingRules {
411				writeln!(File, "{}", Rule)?;
412			}
413		} else {
414			info!(".gitattributes is already up to date.");
415		}
416	}
417
418	Ok(())
419}
420
421// --- Core Logic ---
422
423/// Fetches the official Node.js versions index from nodejs.org.
424async fn FetchNodeVersions(Client:&Client) -> Result<Vec<NodeVersionInfo>> {
425	info!("Fetching Node.js version index for resolving versions...");
426
427	let Response = Client
428		.get("https://nodejs.org/dist/index.json")
429		.send()
430		.await
431		.context("Failed to send request to Node.js version index.")?;
432
433	if !Response.status().is_success() {
434		return Err(anyhow!("Received non-success status from Node.js index: {}", Response.status()));
435	}
436
437	let Versions = Response
438		.json::<Vec<NodeVersionInfo>>()
439		.await
440		.context("Failed to parse Node.js version index JSON.")?;
441
442	Ok(Versions)
443}
444
445/// Resolves a major version string (e.g., "22") to the latest full patch
446/// version (e.g., "v22.3.0") using the fetched version index.
447fn ResolveLatestPatchVersion(MajorVersion:&str, AllVersions:&[NodeVersionInfo]) -> Option<String> {
448	let VersionPrefix = format!("v{}.", MajorVersion);
449
450	AllVersions
451		.iter()
452		.find(|v| v.version.starts_with(&VersionPrefix))
453		.map(|v| v.version.clone())
454}
455
456/// Downloads a file from a URL to a specified path.
457async fn DownloadFile(Client:&Client, URL:&str, DestinationPath:&Path) -> Result<()> {
458	let mut Response = Client.get(URL).send().await?.error_for_status()?;
459
460	let mut DestinationFile =
461		File::create(DestinationPath).with_context(|| format!("Failed to create file at {:?}", DestinationPath))?;
462
463	// Stream the download to handle large files without high memory usage.
464	while let Some(Chunk) = Response.chunk().await? {
465		DestinationFile.write_all(&Chunk)?;
466	}
467
468	Ok(())
469}
470
471/// Extracts the contents of a downloaded archive to a target directory.
472/// This function now performs a full extraction to ensure a complete
473/// distribution.
474fn ExtractArchive(ArchiveType:&ArchiveType, ArchivePath:&Path, ExtractionDirectory:&Path) -> Result<()> {
475	info!("Performing a full extraction of the archive...");
476
477	match ArchiveType {
478		ArchiveType::Zip => {
479			let File = File::open(ArchivePath)?;
480
481			let mut Archive = zip::ZipArchive::new(File)?;
482
483			Archive.extract(ExtractionDirectory)?;
484		},
485
486		ArchiveType::TarGz => {
487			let File = File::open(ArchivePath)?;
488
489			let Decompressor = flate2::read::GzDecoder::new(File);
490
491			let mut Archive = tar::Archive::new(Decompressor);
492
493			Archive.unpack(ExtractionDirectory)?;
494		},
495	}
496
497	Ok(())
498}
499
500/// The main asynchronous function for processing a single download task.
501/// This function is designed to be run concurrently for multiple tasks.
502async fn ProcessDownloadTask(Task:DownloadTask, Client:Client, Cache:Arc<Mutex<DownloadCache>>) -> Result<()> {
503	// Create the temporary directory inside the designated "Temporary" subfolder.
504	let TempDirectory = Builder::new()
505		.prefix("SideCar-Download-")
506		.tempdir_in(&Task.TempParentDirectory)
507		.context("Failed to create temporary directory.")?;
508
509	let ArchiveName = Task.DownloadURL.split('/').last().unwrap_or("Download.tmp");
510
511	let ArchivePath = TempDirectory.path().join(ArchiveName);
512
513	info!(
514		"      [{}/{}] Downloading from: {}",
515		Task.TauriTargetTriple, Task.SidecarName, Task.DownloadURL
516	);
517
518	if let Err(Error) = DownloadFile(&Client, &Task.DownloadURL, &ArchivePath).await {
519		error!(
520			"      [{}/{}] Failed to download {}: {}",
521			Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
522		);
523
524		return Err(Error.into());
525	}
526
527	info!("      [{}/{}] Extracting archive...", Task.TauriTargetTriple, Task.SidecarName);
528
529	if let Err(Error) = ExtractArchive(&Task.ArchiveType, &ArchivePath, TempDirectory.path()) {
530		error!(
531			"      [{}/{}] Failed to extract {}: {}",
532			Task.TauriTargetTriple, Task.SidecarName, ArchiveName, Error
533		);
534
535		return Err(Error.into());
536	}
537
538	let ExtractedPath = TempDirectory.path().join(&Task.ExtractedFolderName);
539
540	if !ExtractedPath.exists() {
541		let ErrorMessage = format!("      Could not find extracted folder: {:?}", ExtractedPath);
542
543		error!("{}", ErrorMessage);
544
545		return Err(anyhow!(ErrorMessage));
546	}
547
548	// If the destination directory already exists, remove it.
549	if Task.DestinationDirectory.exists() {
550		info!("      Removing old version at: {:?}", Task.DestinationDirectory);
551
552		fs::remove_dir_all(&Task.DestinationDirectory)?;
553	}
554
555	// Ensure the parent of the final destination exists.
556	if let Some(Parent) = Task.DestinationDirectory.parent() {
557		fs::create_dir_all(Parent)?;
558	}
559
560	info!("      Installing to: {:?}", Task.DestinationDirectory);
561
562	fs::rename(&ExtractedPath, &Task.DestinationDirectory).with_context(|| {
563		format!(
564			"Failed to rename/move extracted directory from {:?} to {:?}",
565			ExtractedPath, Task.DestinationDirectory
566		)
567	})?;
568
569	// Update the cache with the new version.
570	let CacheKey = format!("{}/{}/{}", Task.TauriTargetTriple, Task.SidecarName, Task.MajorVersion);
571
572	let mut LockedCache = Cache.lock().unwrap();
573
574	LockedCache.Entries.insert(CacheKey, Task.FullVersion.clone());
575
576	info!(
577		"    v{} ({}) for '{}' is now up to date.",
578		Task.MajorVersion, Task.FullVersion, Task.TauriTargetTriple
579	);
580
581	Ok(())
582}
583
584/// Sets up the global logger for the application.
585pub fn Logger() {
586	let LevelText = env::var(LogEnv).unwrap_or_else(|_| "info".to_string());
587
588	let LogLevel = LevelText.parse::<LevelFilter>().unwrap_or(LevelFilter::Info);
589
590	env_logger::Builder::new()
591		.filter_level(LogLevel)
592		.format(|Buffer, Record| {
593			let LevelStyle = match Record.level() {
594				log::Level::Error => "ERROR".red().bold(),
595
596				log::Level::Warn => "WARN".yellow().bold(),
597
598				log::Level::Info => "INFO".green(),
599
600				log::Level::Debug => "DEBUG".blue(),
601
602				log::Level::Trace => "TRACE".magenta(),
603			};
604
605			writeln!(Buffer, "[{}] [{}]: {}", "Download".red(), LevelStyle, Record.args())
606		})
607		.parse_default_env()
608		.init();
609}
610
611#[tokio::main]
612pub async fn Fn() -> Result<()> {
613	Logger();
614
615	info!("Starting Universal Sidecar vendoring process...");
616
617	// --- Setup ---
618	let BaseSidecarDirectory = GetBaseSidecarDirectory()?;
619
620	// Manage the .gitattributes file for Git LFS.
621	UpdateGitattributes(&BaseSidecarDirectory)?;
622
623	// Define and create the dedicated directory for temporary downloads.
624	let TempDownloadsDirectory = BaseSidecarDirectory.join("Temporary");
625
626	fs::create_dir_all(&TempDownloadsDirectory)
627		.with_context(|| format!("Failed to create temporary directory at {:?}", TempDownloadsDirectory))?;
628
629	let CachePath = BaseSidecarDirectory.join("Cache.json");
630
631	let Cache = Arc::new(Mutex::new(DownloadCache::Load(&CachePath)));
632
633	let HttpClient = Client::new();
634
635	let PlatformMatrix = GetPlatformMatrix();
636
637	let SidecarsToFetch = GetSidecarsToFetch();
638
639	// Fetch Node versions once to be used by all tasks.
640	let NodeVersions = FetchNodeVersions(&HttpClient).await?;
641
642	let mut TasksToRun = Vec::new();
643
644	// --- Task Generation Phase (Sequential) ---
645	// First, we determine which downloads are necessary by checking the cache.
646	for Platform in &PlatformMatrix {
647		info!("--- Processing architecture: '{}' ---", Platform.TauriTargetTriple);
648
649		for (SidecarName, MajorVersions) in &SidecarsToFetch {
650			info!("  -> Processing sidecar: '{}'", SidecarName);
651
652			for MajorVersion in MajorVersions {
653				let DestinationDirectory = BaseSidecarDirectory
654					.join(&Platform.TauriTargetTriple)
655					.join(SidecarName)
656					.join(MajorVersion);
657
658				// --- Sidecar-Specific Download Logic ---
659				if SidecarName == "NODE" {
660					let FullVersion = match ResolveLatestPatchVersion(MajorVersion, &NodeVersions) {
661						Some(Version) => Version,
662
663						None => {
664							warn!(
665								"      Could not resolve a specific version for Node.js v{}. Skipping.",
666								MajorVersion
667							);
668
669							continue;
670						},
671					};
672
673					// Check cache to see if we need to download/update.
674					let CacheKey = format!("{}/{}/{}", &Platform.TauriTargetTriple, SidecarName, MajorVersion);
675
676					let CachedVersion = Cache.lock().unwrap().Entries.get(&CacheKey).cloned();
677
678					if Some(FullVersion.clone()) == CachedVersion {
679						info!("    v{} ({}) is already up to date, skipping.", MajorVersion, FullVersion);
680
681						continue;
682					}
683
684					if CachedVersion.is_some() {
685						info!(
686							"    Found newer patch for v{}: {} -> {}. Scheduling update.",
687							MajorVersion,
688							CachedVersion.unwrap(),
689							FullVersion
690						);
691					} else {
692						info!("    Processing v{} (resolved to {})...", MajorVersion, FullVersion);
693					}
694
695					let ArchiveExtension = &Platform.ArchiveExtension;
696
697					let ArchiveName =
698						format!("node-{}-{}.{}", FullVersion, Platform.DownloadIdentifier, ArchiveExtension);
699
700					let DownloadURL = format!("https://nodejs.org/dist/{}/{}", FullVersion, ArchiveName);
701
702					let ExtractedFolderName = format!("node-{}-{}", FullVersion, Platform.DownloadIdentifier);
703
704					let Task = DownloadTask {
705						SidecarName:SidecarName.clone(),
706
707						MajorVersion:MajorVersion.clone(),
708
709						FullVersion,
710
711						DownloadURL,
712
713						TempParentDirectory:TempDownloadsDirectory.clone(),
714
715						DestinationDirectory,
716
717						ArchiveType:if ArchiveExtension == "zip" { ArchiveType::Zip } else { ArchiveType::TarGz },
718
719						ExtractedFolderName,
720
721						TauriTargetTriple:Platform.TauriTargetTriple.clone(),
722					};
723
724					TasksToRun.push(Task);
725				}
726
727				// To add Deno, you would add an `else if SidecarName == "DENO"`
728				// block here.
729			}
730		}
731	}
732
733	// --- Concurrent Execution Phase ---
734	if TasksToRun.is_empty() {
735		info!("All sidecar binaries are already up to date.");
736	} else {
737		info!("Found {} tasks to run. Starting concurrent downloads...", TasksToRun.len());
738
739		// Limit to 8 concurrent jobs or num CPUs, whichever is smaller.
740		let NumberOfConcurrentJobs = num_cpus::get().min(8);
741
742		// Spawn a Tokio task for each download.
743		// Run tasks concurrently.
744		let Results = stream::iter(TasksToRun)
745			.map(|Task| {
746				let Client = HttpClient.clone();
747
748				let Cache = Arc::clone(&Cache);
749
750				tokio::spawn(async move { ProcessDownloadTask(Task, Client, Cache).await })
751			})
752			.buffer_unordered(NumberOfConcurrentJobs)
753			.collect::<Vec<_>>()
754			.await;
755
756		// Check for any errors that occurred during the concurrent tasks.
757		let mut ErrorsEncountered = 0;
758
759		for Result in Results {
760			// The first result is from tokio::spawn, the second from our function
761			if let Err(JoinError) = Result {
762				error!("A download task panicked or was cancelled: {}", JoinError);
763
764				ErrorsEncountered += 1;
765			} else if let Ok(Err(AppError)) = Result {
766				// We already logged the error inside `ProcessDownloadTask`, so just count it.
767				// Re-logging here to ensure it's captured at a higher level if needed.
768				error!("A download task failed: {}", AppError);
769
770				ErrorsEncountered += 1;
771			}
772		}
773
774		if ErrorsEncountered > 0 {
775			error!("Completed with {} errors.", ErrorsEncountered);
776		}
777	}
778
779	// --- Finalization ---
780	info!("Saving updated cache...");
781
782	Cache.lock().unwrap().Save(&CachePath)?;
783
784	info!("All sidecar binaries have been successfully processed and organized.");
785
786	Ok(())
787}
788
789/// Main executable function.
790#[allow(unused)]
791fn main() {
792	// We use a block here to handle the Result from Fn.
793	if let Err(Error) = Fn() {
794		// The logger should already be initialized by Fn, so we can use it.
795		error!("The application encountered a fatal error: {}", Error);
796
797		std::process::exit(1);
798	}
799}
800
801// --- Imports ---
802use std::{
803	collections::{BTreeMap, HashMap},
804	env,
805	fs::{self, File},
806	io::Write,
807	path::{Path, PathBuf},
808	sync::{Arc, Mutex},
809};
810
811use anyhow::{Context, Result, anyhow};
812use colored::*;
813use futures::stream::{self, StreamExt};
814use log::{LevelFilter, error, info, warn};
815use reqwest::Client;
816use serde::{Deserialize, Serialize};
817use tempfile::Builder;
818use toml;