feat: added commit hash based install for security reasons

This commit is contained in:
Byson94
2025-08-24 15:37:56 +05:30
parent c0f5018079
commit 718715843e
13 changed files with 352 additions and 233 deletions

View File

@@ -5,6 +5,12 @@ All notable changes to `eiipm` are documented here.
This changelog follows the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) format, This changelog follows the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) format,
and this project adheres to [Semantic Versioning](https://semver.org/). and this project adheres to [Semantic Versioning](https://semver.org/).
## [0.4.0] - [UNRELEASED]
### Added
- Added commit hash based install/update for security reasons.
## [0.3.0] - 2025-08-22 ## [0.3.0] - 2025-08-22
### Added ### Added

2
Cargo.lock generated
View File

@@ -249,7 +249,7 @@ dependencies = [
[[package]] [[package]]
name = "eiipm" name = "eiipm"
version = "0.3.0" version = "0.4.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"clap", "clap",

View File

@@ -1,6 +1,6 @@
[package] [package]
name = "eiipm" name = "eiipm"
version = "0.3.0" version = "0.4.0"
description = "Eiipm - A simple package manager for ewwii" description = "Eiipm - A simple package manager for ewwii"
authors = ["Byson94 <byson94wastaken@gmail.com>"] authors = ["Byson94 <byson94wastaken@gmail.com>"]
license = "Apache-2.0" license = "Apache-2.0"

182
deprecated/git.mod.bak.rs Normal file
View File

@@ -0,0 +1,182 @@
//! Working with git2 API's
use crate::other::confirm_action::confirm;
use git2::{Cred, Error, FetchOptions, RemoteCallbacks, Repository, build::RepoBuilder};
use std::fs;
use std::path::Path;
pub fn clone_https(repo_url: &str, path: &Path, depth: Option<u32>) -> Result<Repository, Error> {
let callbacks = RemoteCallbacks::new();
// Set up fetch options
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
// Apply shallow clone if depth is specified
if let Some(d) = depth {
fetch_options.depth(d as i32);
}
// Use RepoBuilder directly
let mut builder = RepoBuilder::new();
builder.fetch_options(fetch_options);
let repo = builder.clone(repo_url, path)?;
Ok(repo)
}
pub fn pull_https(repo: &Repository) -> Result<(), Error> {
let head_ref = repo.head()?;
let branch_name = head_ref
.shorthand()
.ok_or_else(|| Error::from_str("Invalid branch"))?;
// Fetch remote branch
let callbacks = RemoteCallbacks::new();
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
let mut remote = repo.find_remote("origin")?;
remote.fetch(&[branch_name], Some(&mut fetch_options), None)?;
// Find fetched commit
let fetch_ref = repo.find_reference(&format!("refs/remotes/origin/{}", branch_name))?;
let fetch_commit = repo.reference_to_annotated_commit(&fetch_ref)?;
// Merge analysis
let analysis = repo.merge_analysis(&[&fetch_commit])?;
if analysis.0.is_fast_forward() {
// Fast-forward
let mut ref_to_update = repo.find_reference(head_ref.name().unwrap())?;
ref_to_update.set_target(fetch_commit.id(), "Fast-forward")?;
repo.set_head(head_ref.name().unwrap())?;
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
log::info!("Fast-forward merge completed");
} else {
// Real merge
log::info!("Fast-forward not possible, performing merge...");
let head_commit = repo.reference_to_annotated_commit(&head_ref)?;
let head_tree = repo.find_commit(head_commit.id())?.tree()?;
let fetch_tree = repo.find_commit(fetch_commit.id())?.tree()?;
let ancestor_commit = repo
.merge_base(head_commit.id(), fetch_commit.id())
.and_then(|oid| repo.find_commit(oid))?;
let ancestor_tree = ancestor_commit.tree()?;
let mut idx = repo.merge_trees(&ancestor_tree, &head_tree, &fetch_tree, None)?;
if idx.has_conflicts() {
return Err(Error::from_str(
"Merge conflicts detected. Please resolve manually.",
));
}
// Write the merged tree
let result_tree_id = idx.write_tree_to(repo)?;
let result_tree = repo.find_tree(result_tree_id)?;
// Create merge commit
let sig = repo.signature()?;
let head_commit_obj = repo.find_commit(head_commit.id())?;
let fetch_commit_obj = repo.find_commit(fetch_commit.id())?;
repo.commit(
Some(head_ref.name().unwrap()), // update current branch
&sig,
&sig,
"Merge commit from pull",
&result_tree,
&[&head_commit_obj, &fetch_commit_obj],
)?;
// Checkout updated HEAD
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
log::info!("Merge completed successfully");
}
Ok(())
}
pub fn pull_but_reclone_on_fail(
repo_url: &str,
repo_path: &Path,
depth: Option<u32>,
) -> Result<Repository, Error> {
// Try opening the repo if it exists
if let Ok(repo) = Repository::open(repo_path) {
// Try to pull
match pull_https(&repo) {
Ok(_) => return Ok(repo),
Err(err) => {
log::warn!("Pull failed: {}.", err);
let user_confirm = confirm("Failed to update cache (outdated). Remove and retry?");
let home_dir = dirs::home_dir()
.ok_or_else(|| Error::from_str("Failed to get home directory"))?;
let cache_root = home_dir.join(".eiipm/cache");
if user_confirm {
if !repo_path.starts_with(cache_root.as_path()) {
return Err(Error::from_str(&format!(
"Refusing to delete outside cache: {}",
repo_path.display()
)));
}
fs::remove_dir_all(repo_path)
.map_err(|e| Error::from_str(&format!("Failed to remove dir: {}", e)))?;
} else {
// user refused, so just return the repo as-is
return Ok(repo);
}
}
}
}
// Either repo didn't exist or we removed it, so clone fresh
clone_https(repo_url, repo_path, depth)
}
/// Checks if the current branch is behind its upstream.
/// Returns `Ok(true)` if the upstream has commits the local branch doesn't have.
pub fn is_upstream_ahead(repo_path: &str) -> Result<bool, Error> {
let repo = Repository::open(repo_path)?;
// Get the current branch
let head_ref = repo.head()?;
let branch_name = head_ref
.shorthand()
.ok_or_else(|| Error::from_str("Invalid branch name"))?;
// Set up fetch options with authentication callbacks
let mut callbacks = RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
// Fetch from origin
let mut remote = repo.find_remote("origin")?;
remote.fetch(&[branch_name], Some(&mut fetch_options), None)?;
// Resolve upstream
let local_branch = repo.find_branch(branch_name, git2::BranchType::Local)?;
let upstream_branch = local_branch.upstream()?;
let local_oid = local_branch
.get()
.target()
.ok_or_else(|| Error::from_str("Local branch has no commit"))?;
let upstream_oid = upstream_branch
.get()
.target()
.ok_or_else(|| Error::from_str("Upstream branch has no commit"))?;
let (_ahead, behind) = repo.graph_ahead_behind(local_oid, upstream_oid)?;
Ok(behind > 0)
}

View File

@@ -49,3 +49,7 @@ I use zsh, so I added the line `export PATH="$HOME/.eiipm/bin:$PATH"` in `~/.zsh
For example, if you use bash, add that line in `~/.bashrc`. For example, if you use bash, add that line in `~/.bashrc`.
> **NOTE:** If you dont want to use echo to add it, then you can manually edit your configuration file and add the line `export PATH="$HOME/.eiipm/bin:$PATH"` in there. > **NOTE:** If you dont want to use echo to add it, then you can manually edit your configuration file and add the line `export PATH="$HOME/.eiipm/bin:$PATH"` in there.
## Security Notice
Third-party packages may contain vulnerabilities. Always verify that you trust the author, even if the package is officially approved and included in [eii-manifests](https://github.com/Ewwii-sh/eii-manifests).

View File

@@ -1,5 +1,4 @@
use super::load_db; use super::{is_update_needed_for, load_db};
use crate::git::is_upstream_ahead;
use colored::Colorize; use colored::Colorize;
use log::info; use log::info;
use std::error::Error; use std::error::Error;
@@ -9,11 +8,11 @@ pub fn check_package_updates(package_name: &Option<String>) -> Result<(), Box<dy
let mut pkg_needing_update: Vec<&String> = Vec::new(); let mut pkg_needing_update: Vec<&String> = Vec::new();
if let Some(name) = package_name { if let Some(name) = package_name {
if let Some(pkg) = db.packages.get_mut(name) { if db.packages.get_mut(name).is_some() {
info!("> Checking for '{}' update", name.yellow().bold()); info!("> Checking for '{}' update", name.yellow().bold());
let need_update = is_upstream_ahead(&pkg.repo_path)?; let need_update = is_update_needed_for(&name)?;
if need_update { if need_update.0 {
pkg_needing_update.push(name); pkg_needing_update.push(name);
} }
} else { } else {
@@ -21,11 +20,11 @@ pub fn check_package_updates(package_name: &Option<String>) -> Result<(), Box<dy
} }
} else { } else {
info!("> Checking for updates in all packages..."); info!("> Checking for updates in all packages...");
for (name, pkg) in db.packages.iter_mut() { for (name, ..) in db.packages.iter_mut() {
info!("Checking '{}'", name.yellow().bold()); info!("Checking '{}'", name.yellow().bold());
let need_update = is_upstream_ahead(&pkg.repo_path)?; let need_update = is_update_needed_for(&name)?;
if need_update { if need_update.0 {
pkg_needing_update.push(name); pkg_needing_update.push(name);
} }
} }

View File

@@ -28,7 +28,7 @@ pub fn clean_package_cache(package_name: Option<String>) -> Result<(), Box<dyn E
} }
fn clear_file_cache(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box<dyn Error>> { fn clear_file_cache(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box<dyn Error>> {
let repo_path = PathBuf::from(&pkg.repo_path); let repo_path = PathBuf::from(&pkg.repo_fs_path);
let home_dir = dirs::home_dir().ok_or("Failed to get home directory")?; let home_dir = dirs::home_dir().ok_or("Failed to get home directory")?;
let cache_root = home_dir.join(".eiipm/cache"); let cache_root = home_dir.join(".eiipm/cache");

View File

@@ -1,30 +1,14 @@
use super::{FileEntry, InstalledPackage, http_get_string, load_db, save_db}; use super::{FileEntry, InstalledPackage, PackageRootMeta, http_get_string, load_db, save_db};
use colored::Colorize; use colored::Colorize;
use dirs; use dirs;
use glob::glob; use glob::glob;
use log::{info, trace}; use log::{info, trace};
use serde::Deserialize;
use std::env; use std::env;
use std::error::Error; use std::error::Error;
use std::fs; use std::fs;
use std::process::Command; use std::process::Command;
use crate::git::{clone_https, pull_but_reclone_on_fail}; use crate::git::{init_and_fetch, update_to_latest};
#[derive(Deserialize, Debug)]
struct PackageRootMeta {
metadata: PackageMeta,
}
#[derive(Deserialize, Debug)]
struct PackageMeta {
name: String,
#[serde(rename = "type")]
pkg_type: String,
src: String,
files: Vec<FileEntry>,
build: Option<String>, // Optional build command
}
pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> { pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
info!("> Installing package '{}'", package_name.yellow().bold()); info!("> Installing package '{}'", package_name.yellow().bold());
@@ -50,21 +34,21 @@ pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
.strip_suffix(".git") .strip_suffix(".git")
.unwrap_or_else(|| meta.src.rsplit('/').next().unwrap()); .unwrap_or_else(|| meta.src.rsplit('/').next().unwrap());
let repo_path = eiipm_dir.join(format!("cache/{}", repo_name)); let repo_fs_path = eiipm_dir.join(format!("cache/{}", repo_name));
// Clone or pull repo // Init and fetch or fetch and clean repo
if !repo_path.exists() { if !repo_fs_path.exists() {
info!( info!(
"Cloning repository {} to {}", "Cloning repository {} to {}",
meta.src.underline(), meta.src.underline(),
repo_path.display() repo_fs_path.display()
); );
let _repo = clone_https(&meta.src, &repo_path, Some(1)) let _repo = init_and_fetch(&meta.src, &repo_fs_path, &meta.commit_hash, 1)
.map_err(|e| format!("Git clone failed: {}", e))?; .map_err(|e| format!("Failed to fetch commit: {}", e))?;
} else { } else {
info!("Repository exists, pulling latest changes"); info!("Repository exists, fetching latest changes");
pull_but_reclone_on_fail(&meta.src, &repo_path, Some(1)) let _repo = update_to_latest(&repo_fs_path, &meta.commit_hash, 1)
.map_err(|e| format!("Git pull failed: {}", e))?; .map_err(|e| format!("Failed to fetch commit and clean state: {}", e))?;
} }
// Optional build step // Optional build step
@@ -73,7 +57,7 @@ pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
let status = Command::new("sh") let status = Command::new("sh")
.arg("-c") .arg("-c")
.arg(build_cmd) .arg(build_cmd)
.current_dir(&repo_path) .current_dir(&repo_fs_path)
.status()?; .status()?;
if !status.success() { if !status.success() {
return Err(format!("Build failed for package '{}'", package_name).into()); return Err(format!("Build failed for package '{}'", package_name).into());
@@ -94,7 +78,7 @@ pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
for file_entry in &meta.files { for file_entry in &meta.files {
// handle *, ** etc. in file entry // handle *, ** etc. in file entry
let files: Vec<(std::path::PathBuf, std::path::PathBuf)> = match file_entry { let files: Vec<(std::path::PathBuf, std::path::PathBuf)> = match file_entry {
FileEntry::Flat(f) => glob(&repo_path.join(f).to_string_lossy()) FileEntry::Flat(f) => glob(&repo_fs_path.join(f).to_string_lossy())
.expect("Invalid glob") .expect("Invalid glob")
.filter_map(Result::ok) .filter_map(Result::ok)
.map(|src| { .map(|src| {
@@ -103,7 +87,7 @@ pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
}) })
.collect(), .collect(),
FileEntry::Detailed { src, dest } => glob(&repo_path.join(src).to_string_lossy()) FileEntry::Detailed { src, dest } => glob(&repo_fs_path.join(src).to_string_lossy())
.expect("Invalid glob") .expect("Invalid glob")
.filter_map(Result::ok) .filter_map(Result::ok)
.map(|src_path| { .map(|src_path| {
@@ -134,11 +118,13 @@ pub fn install_package(package_name: &str) -> Result<(), Box<dyn Error>> {
db.packages.insert( db.packages.insert(
meta.name.clone(), meta.name.clone(),
InstalledPackage { InstalledPackage {
repo_path: repo_path.to_string_lossy().to_string(), repo_fs_path: repo_fs_path.to_string_lossy().to_string(),
installed_files: installed_files, installed_files: installed_files,
copy_files: meta.files.clone(), copy_files: meta.files.clone(),
pkg_type: meta.pkg_type.clone(), pkg_type: meta.pkg_type.clone(),
upstream_src: meta.src.clone(), upstream_src: meta.src.clone(),
installed_hash: meta.commit_hash.clone(),
manifest_url: raw_manifest_url,
build_command: meta.build.clone(), build_command: meta.build.clone(),
}, },
); );

View File

@@ -19,7 +19,7 @@ pub fn list_packages(list_args: ListArgs) -> Result<(), Box<dyn Error>> {
"{}\n Type: {}\n Repo: {}\n Build: {}\n Files:\n {}", "{}\n Type: {}\n Repo: {}\n Build: {}\n Files:\n {}",
pkg, pkg,
package.pkg_type, package.pkg_type,
package.repo_path, package.repo_fs_path,
package package
.build_command .build_command
.clone() .clone()
@@ -42,7 +42,7 @@ pub fn list_packages(list_args: ListArgs) -> Result<(), Box<dyn Error>> {
"{}\n Type: {}\n Repo: {}\n Build: {}\n Files:\n {}", "{}\n Type: {}\n Repo: {}\n Build: {}\n Files:\n {}",
name, name,
package.pkg_type, package.pkg_type,
package.repo_path, package.repo_fs_path,
package package
.build_command .build_command
.clone() .clone()

View File

@@ -22,19 +22,38 @@ pub struct PackageDB {
packages: HashMap<String, InstalledPackage>, packages: HashMap<String, InstalledPackage>,
} }
/// Metadata structs
#[derive(Deserialize, Debug)]
pub struct PackageRootMeta {
metadata: PackageMeta,
}
#[derive(Deserialize, Debug)]
pub struct PackageMeta {
name: String,
#[serde(rename = "type")]
pkg_type: String,
src: String,
#[serde(rename = "commit")]
commit_hash: String, // hash of the commit to install
files: Vec<FileEntry>,
build: Option<String>, // Optional build command
}
// Wait there dev! // Wait there dev!
// if you add a new value to InstalledPackage, eiipm will break // if you add a new value to InstalledPackage, eiipm will break
// no... no... eiipm wont break, but old db's that use the old // no... no... eiipm wont break, but old db's that use the old
// struct will break... So, remember to add `#[serde(default)]`. // struct will break... So, remember to add `#[serde(default)]`.
// #[serde(default)] is our lord and savior if we need to add a new value. // #[serde(default)] is our lord and savior if we need to add a new value.
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
pub struct InstalledPackage { pub struct InstalledPackage {
repo_path: String, // path to cached repo. E.g. ~/.eiipm/cache/<REPO_NAME> repo_fs_path: String, // path to cached repo. E.g. ~/.eiipm/cache/<REPO_NAME>
installed_files: Vec<String>, installed_files: Vec<String>,
copy_files: Vec<FileEntry>, copy_files: Vec<FileEntry>,
pkg_type: String, pkg_type: String,
upstream_src: String, upstream_src: String,
installed_hash: String,
manifest_url: String,
build_command: Option<String>, build_command: Option<String>,
} }
@@ -79,3 +98,20 @@ pub fn http_get_string(url: &str) -> Result<String, Box<dyn Error>> {
} }
Ok(response.text()?) Ok(response.text()?)
} }
pub fn is_update_needed_for(package_name: &str) -> Result<(bool, String), Box<dyn Error>> {
let mut db = load_db()?;
if let Some(pkg) = db.packages.get_mut(package_name) {
let upstream_manifest_raw = http_get_string(&pkg.manifest_url)?;
let root_manifest: PackageRootMeta = toml::from_str(&upstream_manifest_raw)?;
let upstream_manifest = root_manifest.metadata;
Ok((
upstream_manifest.commit_hash != pkg.installed_hash,
upstream_manifest.commit_hash,
))
} else {
Err(format!("Package `{}` not found in DB", package_name).into())
}
}

View File

@@ -19,7 +19,7 @@ pub fn purge_cache() -> Result<(), Box<dyn std::error::Error>> {
.packages .packages
.values() .values()
.map(|pkg| { .map(|pkg| {
Path::new(&pkg.repo_path) Path::new(&pkg.repo_fs_path)
.file_name() .file_name()
.unwrap() .unwrap()
.to_string_lossy() .to_string_lossy()

View File

@@ -6,9 +6,9 @@ use std::fs;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::Command;
use super::{FileEntry, InstalledPackage, load_db, save_db}; use super::{FileEntry, InstalledPackage, is_update_needed_for, load_db, save_db};
use crate::git::{clone_https, is_upstream_ahead, pull_but_reclone_on_fail}; use crate::git::{init_and_fetch, update_to_latest};
pub fn update_package(package_name: &Option<String>) -> Result<(), Box<dyn Error>> { pub fn update_package(package_name: &Option<String>) -> Result<(), Box<dyn Error>> {
let mut db = load_db()?; let mut db = load_db()?;
@@ -24,10 +24,10 @@ pub fn update_package(package_name: &Option<String>) -> Result<(), Box<dyn Error
if pkg.pkg_type == "theme" { if pkg.pkg_type == "theme" {
info!("Skipping theme package '{}'", name.yellow().bold()); info!("Skipping theme package '{}'", name.yellow().bold());
} else { } else {
let need_update = is_upstream_ahead(&pkg.repo_path)?; let need_update = is_update_needed_for(&name)?;
if need_update { if need_update.0 {
info!("> Updating '{}'", name.yellow().bold()); info!("> Updating '{}'", name.yellow().bold());
update_file(pkg, &name)?; update_file(pkg, &name, need_update.1)?;
info!("Successfully updated '{}'", name.yellow().bold()); info!("Successfully updated '{}'", name.yellow().bold());
} else { } else {
info!("Package '{}' is already up-to-date", name.yellow().bold()); info!("Package '{}' is already up-to-date", name.yellow().bold());
@@ -50,10 +50,10 @@ pub fn update_package(package_name: &Option<String>) -> Result<(), Box<dyn Error
continue; continue;
} }
let need_update = is_upstream_ahead(&pkg.repo_path)?; let need_update = is_update_needed_for(&name)?;
if need_update { if need_update.0 {
info!("> Updating '{}'", name.yellow().bold()); info!("> Updating '{}'", name.yellow().bold());
update_file(pkg, &name)?; update_file(pkg, &name, need_update.1)?;
info!("Successfully updated '{}'", name.yellow().bold()); info!("Successfully updated '{}'", name.yellow().bold());
} else { } else {
info!("Package '{}' is already up-to-date", name.yellow().bold()); info!("Package '{}' is already up-to-date", name.yellow().bold());
@@ -65,24 +65,29 @@ pub fn update_package(package_name: &Option<String>) -> Result<(), Box<dyn Error
Ok(()) Ok(())
} }
fn update_file(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box<dyn Error>> { fn update_file(
let repo_path = PathBuf::from(&pkg.repo_path); pkg: &mut InstalledPackage,
package_name: &str,
commit_hash: String,
) -> Result<(), Box<dyn Error>> {
let repo_fs_path = PathBuf::from(&pkg.repo_fs_path);
// Clone/Pull latest changes // Clone/Pull latest changes
debug!("Pulling latest version of {} using git...", package_name); debug!("Pulling latest version of {} using git...", package_name);
if !repo_path.exists() { // Init and fetch or fetch and clean repo
if !repo_fs_path.exists() {
info!( info!(
"Cache not found. Cloning repository {} to {}", "Cloning repository {} to {}",
pkg.upstream_src.underline(), pkg.upstream_src.underline(),
repo_path.display() repo_fs_path.display()
); );
let _repo = clone_https(&pkg.upstream_src, &repo_path, Some(1)) let _repo = init_and_fetch(&pkg.upstream_src, &repo_fs_path, &commit_hash, 1)
.map_err(|e| format!("Git clone failed: {}", e))?; .map_err(|e| format!("Failed to fetch commit: {}", e))?;
} else { } else {
info!("Repository is cached, pulling latest changes"); info!("Repository exists, fetching latest changes");
pull_but_reclone_on_fail(&pkg.upstream_src, &repo_path, Some(1)) let _repo = update_to_latest(&repo_fs_path, &commit_hash, 1)
.map_err(|e| format!("Git pull failed: {}", e))?; .map_err(|e| format!("Failed to fetch commit and clean state: {}", e))?;
} }
// Optional build step // Optional build step
@@ -91,10 +96,10 @@ fn update_file(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box
let status = Command::new("sh") let status = Command::new("sh")
.arg("-c") .arg("-c")
.arg(build_cmd) .arg(build_cmd)
.current_dir(&repo_path) .current_dir(&repo_fs_path)
.status()?; .status()?;
if !status.success() { if !status.success() {
return Err(format!("Build failed for package '{}'", pkg.repo_path).into()); return Err(format!("Build failed for package '{}'", pkg.repo_fs_path).into());
} }
} }
@@ -118,7 +123,7 @@ fn update_file(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box
for file_entry in &pkg.copy_files { for file_entry in &pkg.copy_files {
// handle *, **, etc. in file entry // handle *, **, etc. in file entry
let files: Vec<(std::path::PathBuf, std::path::PathBuf)> = match file_entry { let files: Vec<(std::path::PathBuf, std::path::PathBuf)> = match file_entry {
FileEntry::Flat(f) => glob(&repo_path.join(f).to_string_lossy()) FileEntry::Flat(f) => glob(&repo_fs_path.join(f).to_string_lossy())
.expect("Invalid glob") .expect("Invalid glob")
.filter_map(Result::ok) .filter_map(Result::ok)
.map(|src| { .map(|src| {
@@ -127,7 +132,7 @@ fn update_file(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box
}) })
.collect(), .collect(),
FileEntry::Detailed { src, dest } => glob(&repo_path.join(src).to_string_lossy()) FileEntry::Detailed { src, dest } => glob(&repo_fs_path.join(src).to_string_lossy())
.expect("Invalid glob") .expect("Invalid glob")
.filter_map(Result::ok) .filter_map(Result::ok)
.map(|src_path| { .map(|src_path| {
@@ -154,5 +159,7 @@ fn update_file(pkg: &mut InstalledPackage, package_name: &str) -> Result<(), Box
} }
} }
pkg.installed_hash = commit_hash;
Ok(()) Ok(())
} }

View File

@@ -1,182 +1,81 @@
//! Working with git2 API's //! Minimal fetch & checkout with git2
use crate::other::confirm_action::confirm; use git2::{Error, FetchOptions, RemoteCallbacks, Repository};
use git2::{Cred, Error, FetchOptions, RemoteCallbacks, Repository, build::RepoBuilder};
use std::fs;
use std::path::Path; use std::path::Path;
pub fn clone_https(repo_url: &str, path: &Path, depth: Option<u32>) -> Result<Repository, Error> { /// Initialize a repo at `path` and fetch a specific commit from origin.
///
/// Equivalent to:
/// ```bash
/// git init
/// git fetch --depth 1 origin <commit>
/// git checkout FETCH_HEAD
/// ```
pub fn init_and_fetch(
repo_url: &str,
path: &Path,
commit: &str,
fetch_depth: i32,
) -> Result<Repository, Error> {
// initialize new git repository
let repo = Repository::init(path)?;
repo.remote("origin", repo_url)?;
// prepare fetch options (shallow, depth=1)
let callbacks = RemoteCallbacks::new(); let callbacks = RemoteCallbacks::new();
let mut fetch_opts = FetchOptions::new();
fetch_opts.remote_callbacks(callbacks);
fetch_opts.depth(fetch_depth);
// Set up fetch options // Fetch the given commit
let mut fetch_options = FetchOptions::new(); {
fetch_options.remote_callbacks(callbacks); let mut remote = repo.find_remote("origin")?;
remote.fetch(&[commit], Some(&mut fetch_opts), None)?;
// Apply shallow clone if depth is specified
if let Some(d) = depth {
fetch_options.depth(d as i32);
} }
// Use RepoBuilder directly {
let mut builder = RepoBuilder::new(); // Point HEAD to FETCH_HEAD
builder.fetch_options(fetch_options); let fetch_head = repo.find_reference("FETCH_HEAD")?;
let commit = repo.reference_to_annotated_commit(&fetch_head)?;
repo.set_head_detached(commit.id())?;
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
}
let repo = builder.clone(repo_url, path)?;
Ok(repo) Ok(repo)
} }
pub fn pull_https(repo: &Repository) -> Result<(), Error> { /// Fetch the latest commit from `origin/<commit>`, checkout it,
let head_ref = repo.head()?; /// and discard all previous history (like a shallow reset).
let branch_name = head_ref pub fn update_to_latest(repo_path: &Path, commit: &str, fetch_depth: i32) -> Result<(), Error> {
.shorthand() let repo = Repository::open(repo_path)?;
.ok_or_else(|| Error::from_str("Invalid branch"))?;
// Fetch remote branch // Prepare fetch options (shallow)
let callbacks = RemoteCallbacks::new(); let callbacks = RemoteCallbacks::new();
let mut fetch_options = FetchOptions::new(); let mut fetch_opts = FetchOptions::new();
fetch_options.remote_callbacks(callbacks); fetch_opts.remote_callbacks(callbacks);
fetch_opts.depth(fetch_depth);
let mut remote = repo.find_remote("origin")?; // Fetch from origin
remote.fetch(&[branch_name], Some(&mut fetch_options), None)?; {
let mut remote = repo.find_remote("origin")?;
// Find fetched commit remote.fetch(&[commit], Some(&mut fetch_opts), None)?;
let fetch_ref = repo.find_reference(&format!("refs/remotes/origin/{}", branch_name))?;
let fetch_commit = repo.reference_to_annotated_commit(&fetch_ref)?;
// Merge analysis
let analysis = repo.merge_analysis(&[&fetch_commit])?;
if analysis.0.is_fast_forward() {
// Fast-forward
let mut ref_to_update = repo.find_reference(head_ref.name().unwrap())?;
ref_to_update.set_target(fetch_commit.id(), "Fast-forward")?;
repo.set_head(head_ref.name().unwrap())?;
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
log::info!("Fast-forward merge completed");
} else {
// Real merge
log::info!("Fast-forward not possible, performing merge...");
let head_commit = repo.reference_to_annotated_commit(&head_ref)?;
let head_tree = repo.find_commit(head_commit.id())?.tree()?;
let fetch_tree = repo.find_commit(fetch_commit.id())?.tree()?;
let ancestor_commit = repo
.merge_base(head_commit.id(), fetch_commit.id())
.and_then(|oid| repo.find_commit(oid))?;
let ancestor_tree = ancestor_commit.tree()?;
let mut idx = repo.merge_trees(&ancestor_tree, &head_tree, &fetch_tree, None)?;
if idx.has_conflicts() {
return Err(Error::from_str(
"Merge conflicts detected. Please resolve manually.",
));
}
// Write the merged tree
let result_tree_id = idx.write_tree_to(repo)?;
let result_tree = repo.find_tree(result_tree_id)?;
// Create merge commit
let sig = repo.signature()?;
let head_commit_obj = repo.find_commit(head_commit.id())?;
let fetch_commit_obj = repo.find_commit(fetch_commit.id())?;
repo.commit(
Some(head_ref.name().unwrap()), // update current branch
&sig,
&sig,
"Merge commit from pull",
&result_tree,
&[&head_commit_obj, &fetch_commit_obj],
)?;
// Checkout updated HEAD
repo.checkout_head(Some(git2::build::CheckoutBuilder::default().force()))?;
log::info!("Merge completed successfully");
} }
// Point HEAD to FETCH_HEAD
let fetch_head = repo.find_reference("FETCH_HEAD")?;
let commit = repo.reference_to_annotated_commit(&fetch_head)?;
let commit_obj = repo.find_commit(commit.id())?;
// Reset hard to that commit
repo.reset(
commit_obj.as_object(),
git2::ResetType::Hard,
Some(git2::build::CheckoutBuilder::default().force()),
)?;
let _ = repo.cleanup_state();
Ok(()) Ok(())
} }
pub fn pull_but_reclone_on_fail(
repo_url: &str,
repo_path: &Path,
depth: Option<u32>,
) -> Result<Repository, Error> {
// Try opening the repo if it exists
if let Ok(repo) = Repository::open(repo_path) {
// Try to pull
match pull_https(&repo) {
Ok(_) => return Ok(repo),
Err(err) => {
log::warn!("Pull failed: {}.", err);
let user_confirm = confirm("Failed to update cache (outdated). Remove and retry?");
let home_dir = dirs::home_dir()
.ok_or_else(|| Error::from_str("Failed to get home directory"))?;
let cache_root = home_dir.join(".eiipm/cache");
if user_confirm {
if !repo_path.starts_with(cache_root.as_path()) {
return Err(Error::from_str(&format!(
"Refusing to delete outside cache: {}",
repo_path.display()
)));
}
fs::remove_dir_all(repo_path)
.map_err(|e| Error::from_str(&format!("Failed to remove dir: {}", e)))?;
} else {
// user refused, so just return the repo as-is
return Ok(repo);
}
}
}
}
// Either repo didn't exist or we removed it, so clone fresh
clone_https(repo_url, repo_path, depth)
}
/// Checks if the current branch is behind its upstream.
/// Returns `Ok(true)` if the upstream has commits the local branch doesn't have.
pub fn is_upstream_ahead(repo_path: &str) -> Result<bool, Error> {
let repo = Repository::open(repo_path)?;
// Get the current branch
let head_ref = repo.head()?;
let branch_name = head_ref
.shorthand()
.ok_or_else(|| Error::from_str("Invalid branch name"))?;
// Set up fetch options with authentication callbacks
let mut callbacks = RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
Cred::ssh_key_from_agent(username_from_url.unwrap_or("git"))
});
let mut fetch_options = FetchOptions::new();
fetch_options.remote_callbacks(callbacks);
// Fetch from origin
let mut remote = repo.find_remote("origin")?;
remote.fetch(&[branch_name], Some(&mut fetch_options), None)?;
// Resolve upstream
let local_branch = repo.find_branch(branch_name, git2::BranchType::Local)?;
let upstream_branch = local_branch.upstream()?;
let local_oid = local_branch
.get()
.target()
.ok_or_else(|| Error::from_str("Local branch has no commit"))?;
let upstream_oid = upstream_branch
.get()
.target()
.ok_or_else(|| Error::from_str("Upstream branch has no commit"))?;
let (_ahead, behind) = repo.graph_ahead_behind(local_oid, upstream_oid)?;
Ok(behind > 0)
}