Compare commits

...

5 Commits

Author SHA1 Message Date
43a8696619 bkp 2025-08-08 21:13:00 +03:00
19d3481f8a bkp 2025-08-08 00:00:14 +03:00
e90430f52c hardcopy subcommand add 2025-06-08 12:04:43 +03:00
b8fcbe2909 delete v0.1 2025-06-04 20:33:53 +03:00
51ffa435fb delete uncorrect 2025-06-02 18:00:32 +03:00
20 changed files with 1284 additions and 76 deletions

View File

@ -1,6 +1,6 @@
[build]
rustflags = [
"-C", "target-feature=+crt-static",
"-C", "link-arg=-static",
"-C", "link-arg=-no-pie"
]
#rustflags = [
#"-C", "target-feature=+crt-static",
#"-C", "link-arg=-static",
#"-C", "link-arg=-no-pie"
#]

12
.gitignore vendored
View File

@ -1,9 +1,13 @@
*
!.gitignore
!src
!src/**
!Cargo.toml
!tests
!tests/**
!.cargo
!.cargo/**
!install.sh
!src
!src/**
!assets
!assets/**
!tests
!tests/**

View File

@ -4,6 +4,7 @@ version='0.1.0'
edition='2024'
[dependencies]
clap = "4.5.39"
rayon = "1.10.0"
[profile.release]
@ -11,3 +12,7 @@ opt-level = 3
lto = true
codegen-units = 1
panic = "abort"
[[bin]]
name = "pkg"
path = "src/main.rs"

View File

@ -0,0 +1,17 @@
Format <path_destination> <path_source>
=
``` cfg *** bin ***
```
``` cfg *** sbin ***
```
``` cfg *** include ***
```
``` cfg *** lib ***
```
``` cfg *** share ***
```

14
assets/etc/sexpkg.md Normal file
View File

@ -0,0 +1,14 @@
Sexpkg's config file
=
``` cfg *** Repository list and priority ***
gnu /pkg/gnu/sexpkg/var/gnu
musl /pkg/gnu/sexpkg/var/musl
```
``` cfg *** Clean exclude ***
*
```
``` cfg *** Clean include ***
```

18
assets/var/gnu/glibc.md Normal file
View File

@ -0,0 +1,18 @@
glibc 2.41
=
https://ftp.gnu.org/gnu/libc/{name}-{version}.tar.xz
``` sh *** build.sctipt ***
mkdir build; cd build &&
../configure \
--prefix=/pkg/gnu/glibc \
--with-headers=/pkg/gnu/linux/include \
--disable-multilib \
--enable-install-ldconfig &&
make -j$(nproc) && make install
```
``` cfg *** dependencies ***
binutils
linux
```

22
install.sh Executable file
View File

@ -0,0 +1,22 @@
#!/bin/sh
if [ -z "$1" ]; then
echo "Ошибка: Укажите путь установки как аргумент."
echo "Пример использования: $0 /pkg/gnu/sexpkg/"
exit 1
fi
INSTALL_PATH="$1"
echo "Сборка проекта..."
cargo build --release
echo "Установка в '$INSTALL_PATH'..."
cargo install --path . --root "$INSTALL_PATH"
if [ $? -eq 0 ]; then
echo "Проект успешно установлен в '$INSTALL_PATH'."
else
echo "Ошибка при установке."
exit 1
fi

132
src/commands/delete.rs Normal file
View File

@ -0,0 +1,132 @@
use std::fs;
use std::io;
use std::path::Path;
use rayon::prelude::*;
use std::os::unix::fs::MetadataExt;
use super::get_var_path;
use crate::utils::parser;
pub fn delete(repo: &str, pkgname: &str) {
let base_dir = Path::new("/pkg").join(repo);
let pkg_dir = base_dir.join(pkgname);
if pkg_dir.exists() {
match fs::remove_dir_all(&pkg_dir) {
Ok(()) => { println!("removed: {}", pkg_dir.display()) }
Err(e) => { eprintln!("Can't remove {}: {}", pkg_dir.display(), e)}
}
} else {
eprintln!("{} not installed in {}", pkgname, repo)
}
let subdirs = ["bin", "lib", "libexec", "include", "share"];
for subdir in &subdirs {
let dir_path = base_dir.join(subdir);
if dir_path.exists() {
match remove_unused_files(&dir_path) {
Ok(()) => {}
Err(_) => {}
}
}
}
}
pub fn delete_recursive(repo: &str, pkgname: &str) {
let base_dir = Path::new("/pkg").join(repo);
let pkg_dir = base_dir.join(pkgname);
if pkg_dir.exists() {
let var_path = get_var_path();
let pkg_md_path = var_path.join(format!("{}/{}.md", repo, pkgname));
match parser::get_deps(&pkg_md_path) {
Ok(deps) => {
for dependency in deps.lines() {
let dependency = dependency.trim();
if !dependency.is_empty() {
let pkg_dir = Path::new("/pkg").join(repo).join(dependency);
if pkg_dir.exists() {
if !parser::get_use_status(repo, dependency) {
delete(repo, &dependency.to_string())
}
}
}
}
}
Err(e) => {
eprintln!("Failed to parse dependencies: {}", e)
}
}
match fs::remove_dir_all(&pkg_dir) {
Ok(()) => { println!("removed: {}", pkg_dir.display()) }
Err(e) => { eprintln!("Can't remove {}: {}", pkg_dir.display(), e)}
}
} else {
eprintln!("{} not installed in {}", pkgname, repo)
}
let subdirs = ["lib", "include", "bin"];
for subdir in &subdirs {
let dir_path = base_dir.join(subdir);
if dir_path.exists() {
match remove_unused_files(&dir_path) {
Ok(()) => {}
Err(_) => {}
}
match remove_unused_dirs_and_symlink(&dir_path) {
Ok(()) => {}
Err(_) => {}
}
}
}
}
fn remove_unused_files(path: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(path)?;
if metadata.file_type().is_file() {
if get_nlink(path)? == 1 {
fs::remove_file(path)?
}
} else if metadata.file_type().is_dir() {
let entries: Vec<_> = fs::read_dir(path)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
remove_unused_files(&entry.path())
})?;
}
Ok(())
}
fn remove_unused_dirs_and_symlink(path: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(path)?;
if metadata.file_type().is_dir() {
let entries: Vec<_> = fs::read_dir(path)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
remove_unused_dirs_and_symlink(&entry.path())
})?;
if fs::read_dir(path)?.next().is_none() {
fs::remove_dir(path)?
}
} else if metadata.file_type().is_symlink() {
if let Err(_) = fs::metadata(path) {
fs::remove_file(path)?
}
}
Ok(())
}
fn get_nlink(path: &Path) -> io::Result<u64> {
Ok(fs::metadata(path)?.nlink())
}

35
src/commands/disable.rs Normal file
View File

@ -0,0 +1,35 @@
use std::path::PathBuf;
use std::fs;
use crate::utils::deletecopy::deletecopy;
use crate::utils::shell::*;
pub fn disable(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
let _ = fs::File::create(&source.join("disabled"));
let destination = source.parent()
.ok_or("Failed to get parent directory for path")?
.to_path_buf();
let dirs_to_copy = vec![
("bin"),
("lib"),
("libexec"),
("include"),
("share"),
];
for base_system_folder_dir in dirs_to_copy {
let src = source.join(base_system_folder_dir);
let dest = destination.join(base_system_folder_dir);
if src.exists() {
deletecopy(&src, &dest)
.map_err(|e| format!("Failed to delete copy {} to {}: {}", src.display(), dest.display(), e))?;
}
}
mount_overlay(&destination)?;
shell_update()?;
Ok(())
}

11
src/commands/enable.rs Normal file
View File

@ -0,0 +1,11 @@
use std::path::PathBuf;
use std::fs;
use super::pkglink::pkglink;
pub fn enable(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
let _ = fs::remove_file(&source.join("disabled"));
pkglink(&repo, &pkgname)?;
Ok(())
}

268
src/commands/install.rs Normal file
View File

@ -0,0 +1,268 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::process;
use super::*;
use crate::utils::parser;
use crate::commands::pkglink::pkglink;
pub fn install(repo: &String, pkgname: &String) -> Result<(), bool> {
let var_path = get_var_path();
let pkg_md_path = var_path.join(format!("{}/{}.md", repo, pkgname));
if !pkg_md_path.exists() {
upload_from_repo(&repo, &pkgname, &pkg_md_path)?;
}
check_dependency(&repo, &pkg_md_path)?;
download(&pkgname, &pkg_md_path)?;
let src_dir = PathBuf::from("/pkg/src").join(&pkgname);
build(&repo, &pkgname, &src_dir, &pkg_md_path)?;
pkglink(&repo, &pkgname).expect("Failed link package");
println!("Package {} installed successfully from repo {}", pkgname, repo);
Ok(())
}
pub fn install_all(pkgname: &String) {
let repos = match parser::get_repo_list() {
Ok(repos) => repos,
Err(e) => {
eprintln!("Failed to get repository list: {}", e);
return;
}
};
let mut success = false;
for repo in repos {
println!("Trying to install {} from repo {}...", pkgname, repo);
match install(&repo, pkgname) {
Ok(()) => {
success = true;
break;
}
Err(no_repo_package) => {
if no_repo_package {
continue;
} else {
process::exit(1)
}
}
}
}
if !success {
eprintln!("Package {} not found in any available repository", pkgname);
}
}
fn upload_from_repo(repo: &String, pkgname: &String, pkg_md_path: &Path) -> Result<(), bool> {
match parser::get_repo_addr(repo) {
Ok(repo_addr) => {
let rsync_command = format!(
"rsync --include='{}.md' --exclude='*' {} {}",
pkgname,
repo_addr,
pkg_md_path.to_str().unwrap()
);
let rsync_output = Command::new("sh")
.arg("-c")
.arg(rsync_command)
.output()
.expect("Failed to execute rsync");
if !rsync_output.status.success() {
eprintln!("broken repo: {}", repo);
return Err(false);
}
if !pkg_md_path.exists() {
eprintln!("not found {} in {} repo", pkgname, repo);
return Err(true);
}
Ok(())
}
Err(e) => {
eprintln!("Repository {} not found: {}", repo, e);
return Err(true);
}
}
}
fn check_dependency(repo: &String, pkg_md_path: &Path) -> Result<(), bool> {
let deps = match parser::get_deps(&pkg_md_path) {
Ok(deps) => deps,
Err(e) => {
eprintln!("Failed to parse dependencies {}: {}", &pkg_md_path.to_str().unwrap(), e);
return Err(false);
}
};
for dependency in deps.lines() {
if !dependency.trim().is_empty() {
if !Path::new("/pkg").join(repo).join(dependency).exists() {
match install(repo, &dependency.to_string()) {
Ok(()) => {}
Err(_) => {process::exit(1) }
}
}
}
}
Ok(())
}
fn download(pkgname: &String, pkg_md_path: &Path) -> Result<(), bool> {
let url = match parser::get_url(pkg_md_path) {
Ok(url) => url,
Err(e) => {
eprintln!("Failed to parse URL: {}", e);
return Err(false);
}
};
let src = PathBuf::from("/pkg/src").join(pkgname);
if let Err(e) = fs::create_dir_all(&src) {
eprintln!("Failed to create directory {}: {}", src.display(), e);
return Err(false);
}
if !url.ends_with(".git") {
let compress_flag = if url.ends_with(".bz2") {
"--bzip2"
} else if url.ends_with(".xz") {
"--xz"
} else if url.ends_with(".lz") {
"--lzip"
} else if url.ends_with(".lzma") {
"--lzma"
} else if url.ends_with(".lzo") {
"--lzop"
} else if url.ends_with(".zst") {
"--zstd"
} else if url.ends_with(".gz") {
"--gzip"
} else {
eprintln!("Unsupported compression format for URL: {}", url);
return Err(false);
};
let wget_output = Command::new("wget")
.arg("-O-")
.arg(&url)
.stdout(Stdio::piped())
.spawn();
let tar_input = match wget_output {
Ok(child) => child.stdout.unwrap(),
Err(e) => {
eprintln!("Failed to execute wget: {}", e);
return Err(false);
}
};
let tar_status = Command::new("tar")
.arg("-x")
.arg(compress_flag)
.arg("-C")
.arg(&src)
.stdin(tar_input)
.status();
if tar_status.is_err() || !tar_status.unwrap().success() {
eprintln!("Failed to extract archive from URL: {}", url);
return Err(false);
}
let entries = fs::read_dir(&src).unwrap();
let dirs: Vec<_> = entries
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().map_or(false, |ft| ft.is_dir()))
.collect();
if dirs.len() == 1 {
let single_dir = dirs[0].path();
for entry in fs::read_dir(&single_dir).unwrap() {
let entry = entry.unwrap();
let dest = src.join(entry.file_name());
fs::rename(entry.path(), dest).unwrap();
}
fs::remove_dir(single_dir).unwrap();
}
} else {
let git_status = Command::new("git")
.arg("clone")
.arg(&url)
.arg(&src)
.status();
if git_status.is_err() || !git_status.unwrap().success() {
eprintln!("Failed to clone git repository from URL: {}", url);
return Err(false);
}
}
Ok(())
}
fn build(
repo: &String,
pkgname: &String,
src_dir: &Path,
pkg_md_path: &Path,
) -> Result<(), bool> {
let build_script = match parser::get_build_script(pkg_md_path) {
Ok(script) => script,
Err(error) => {
eprintln!("Failed to parse build script: {}", error);
return Err(false);
}
};
let output = Command::new("zsh")
.arg("-c")
.arg(&build_script)
.current_dir(src_dir)
.output();
if let Err(e) = output {
eprintln!("Failed to execute build script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("Script failed with error: {}", stderr);
return Err(false);
}
let dest_dir = PathBuf::from("/pkg").join(repo).join(pkgname);
if let Err(e) = fs::create_dir_all(&dest_dir) {
eprintln!("Failed to create destination directory: {}", e);
return Err(false);
}
let dest_path = dest_dir.join("build-script.md");
if let Err(e) = fs::copy(pkg_md_path, &dest_path) {
eprintln!("Failed to copy build script to destination: {}", e);
return Err(false);
}
if let Err(e) = fs::remove_dir_all(src_dir) {
eprintln!("Failed to remove source directory: {}", e);
return Err(false);
}
Ok(())
}

18
src/commands/mod.rs Normal file
View File

@ -0,0 +1,18 @@
pub mod install;
pub mod delete;
pub mod pkglink;
pub mod disable;
pub mod enable;
use std::path::PathBuf;
pub fn get_var_path() -> PathBuf {
let exe_path = std::env::current_exe().expect("Failed to get executable path");
exe_path
.parent()
.unwrap()
.join("../var")
.canonicalize()
.expect("Failed to canonicalize var path")
}

35
src/commands/pkglink.rs Normal file
View File

@ -0,0 +1,35 @@
use std::path::PathBuf;
use crate::utils::hardcopy::hardcopy_handler;
use crate::utils::shell::*;
pub fn pkglink(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
if source.join("disabled").exists() { return Ok(()) }
let destination = source.parent()
.ok_or("Failed to get parent directory for path")?
.to_path_buf();
let dirs_to_copy = vec![
("bin"),
("lib"),
("libexec"),
("include"),
("share")
];
for base_system_folder_dir in dirs_to_copy {
let src = source.join(base_system_folder_dir);
let dest = destination.join(base_system_folder_dir);
if src.exists() {
hardcopy_handler(&src, &dest)
.map_err(|e| format!("Failed to copy {} to {}: {}", src.display(), dest.display(), e))?;
}
}
mount_overlay(&destination)?;
shell_update()?;
Ok(())
}

View File

@ -1,38 +1,142 @@
use rayon::prelude::*;
use std::fs;
use std::io;
use std::path::Path;
use std::os::unix;
mod commands;
mod utils;
fn copy_recursive_hardlink_optimized(source: &Path, destination: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(source)?;
fn main() {
let matches = clap::Command::new("pkg")
.version("1.0")
.about("Package manager for installing software from repositories")
.subcommand(
clap::Command::new("install")
.about("Install a package")
.arg(
clap::Arg::new("args")
.help("Repository and package name (optional repo)")
.required(true)
.num_args(1..=2)
.value_names(["repo", "pkgname"]),
),
)
.subcommand(
clap::Command::new("delete")
.about("Delete a package from a repository")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true),
)
.arg(
clap::Arg::new("recursive")
.short('R')
.long("recursive")
.help("Recursively delete the package")
.action(clap::ArgAction::SetTrue),
),
)
.subcommand(
clap::Command::new("pkglink")
.about("Create package links and mount overlays")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.subcommand(
clap::Command::new("disable")
.about("Disable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.subcommand(
clap::Command::new("enable")
.about("Enable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.get_matches();
if metadata.file_type().is_dir() {
fs::create_dir_all(destination)?;
if let Some(install_matches) = matches.subcommand_matches("install") {
let args: Vec<&String> = install_matches.get_many::<String>("args").unwrap().collect();
let entries: Vec<_> = fs::read_dir(source)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
let path = entry.path();
if let Some(file_name) = path.file_name() {
let dest_path = destination.join(file_name);
copy_recursive_hardlink_optimized(&path, &dest_path)
} else {
Ok(())
match args.len() {
1 => {
let pkgname = args[0];
commands::install::install_all(pkgname);
}
})?;
} else if metadata.file_type().is_file() {
fs::hard_link(source, destination)?;
} else if metadata.file_type().is_symlink() {
let target = fs::read_link(source)?;
unix::fs::symlink(target, destination)?;
2 => {
let repo = args[0];
let pkgname = args[1];
commands::install::install(repo, pkgname).unwrap();
}
_ => unreachable!(),
}
} else if let Some(delete_matches) = matches.subcommand_matches("delete") {
let repo = delete_matches.get_one::<String>("repo").unwrap();
let pkgname = delete_matches.get_one::<String>("pkgname").unwrap();
let recursive = delete_matches.get_flag("recursive");
if recursive {
commands::delete::delete_recursive(repo, pkgname);
} else {
commands::delete::delete(repo, pkgname);
}
} else if let Some(pkglink_matches) = matches.subcommand_matches("pkglink") {
let repo = pkglink_matches.get_one::<String>("repo").unwrap();
let pkgname = pkglink_matches.get_one::<String>("pkgname").unwrap();
match commands::pkglink::pkglink(&repo, &pkgname) {
Ok(_) => println!("pkglink completed successfully."),
Err(e) => eprintln!("Error during pkglink: {}", e),
}
} else if let Some(disable_matches) = matches.subcommand_matches("disable") {
let repo = disable_matches.get_one::<String>("repo").unwrap();
let pkgname = disable_matches.get_one::<String>("pkgname").unwrap();
match commands::disable::disable(&repo, &pkgname) {
Ok(_) => println!("disable completed successfully."),
Err(e) => eprintln!("Error during disable: {}", e),
}
} else if let Some(enable_matches) = matches.subcommand_matches("enable") {
let repo = enable_matches.get_one::<String>("repo").unwrap();
let pkgname = enable_matches.get_one::<String>("pkgname").unwrap();
match commands::enable::enable(&repo, &pkgname) {
Ok(_) => println!("enable completed successfully."),
Err(e) => eprintln!("Error during enable: {}", e),
}
} else {
println!("No command provided. Use `pkg --help` for usage information.");
}
Ok(())
}
fn main() -> io::Result<()> {
let source = Path::new("/usr/lib");
let destination = Path::new("/pkg/gnu/lib");
copy_recursive_hardlink_optimized(source, destination)?;
Ok(())
}

33
src/utils/deletecopy.rs Normal file
View File

@ -0,0 +1,33 @@
use rayon::prelude::*;
use std::fs;
use std::io;
use std::path::Path;
use std::os::unix::fs::MetadataExt;
pub fn deletecopy(source: &Path, destination: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(source)?;
if metadata.file_type().is_file() {
if let Ok(dest_metadata) = fs::metadata(destination) {
if dest_metadata.ino() == metadata.ino() {
fs::remove_file(destination)?;
return Ok(());
}
}
} else if metadata.file_type().is_dir() {
let entries: Vec<_> = fs::read_dir(source)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
let path_source = entry.path();
if let Some(file_name) = path_source.file_name() {
deletecopy(&path_source, &destination.join(file_name))
} else {
Ok(())
}
})?;
} else if metadata.file_type().is_symlink() {
fs::remove_file(destination)?;
}
Ok(())
}

216
src/utils/hardcopy.rs Normal file
View File

@ -0,0 +1,216 @@
use rayon::prelude::*;
use std::fs;
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use std::os::unix;
use std::os::unix::fs::MetadataExt;
use std::sync::mpsc;
use std::thread;
fn hardcopy(
source: &Path,
destination: &Path,
conflict_sender: Option<mpsc::Sender<(Vec<PathBuf>, mpsc::Sender<PathBuf>)>>,
) -> io::Result<()> {
let metadata = fs::symlink_metadata(source)?;
if metadata.file_type().is_file() {
match fs::hard_link(source, destination) {
Ok(_) => {}
Err(_) => {
if let Ok(dest_metadata) = fs::metadata(destination) {
if dest_metadata.ino() == metadata.ino() {
return Ok(());
}
}
match crate::utils::parser::get_index_conflict(destination) {
Ok(index_source) => {
if index_source == source {
fs::remove_file(destination)?;
fs::hard_link(source, destination)?;
} else {
return Ok(());
}
}
Err(_) => {
let conflict_list = find_files_with_location(&destination);
let count = conflict_list.len();
if count == 1 {
fs::remove_file(destination)?;
fs::hard_link(source, destination)?;
} else if count >= 1 {
let (response_tx, response_rx) = mpsc::channel();
if let Some(sender) = &conflict_sender {
sender.send((conflict_list.clone(), response_tx)).unwrap(); }
let selected_source = response_rx.recv().unwrap();
append_index_block(&selected_source, &destination)?;
if selected_source == source {
fs::remove_file(destination)?;
fs::hard_link(source, destination)?;
} else {
return Ok(());
}
}
}
}
}
}
} else if metadata.file_type().is_dir() {
fs::create_dir_all(destination)?;
let entries: Vec<_> = fs::read_dir(source)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
let path_source = entry.path();
if let Some(file_name) = path_source.file_name() {
let dest_path = destination.join(file_name);
hardcopy(&path_source, &dest_path, conflict_sender.clone())
} else {
Ok(())
}
})?;
} else if metadata.file_type().is_symlink() {
let symlink_value = fs::read_link(source)?;
fs::remove_file(destination)?;
unix::fs::symlink(symlink_value, destination)?;
}
Ok(())
}
pub fn hardcopy_handler(
source: &Path,
destination: &Path,
) -> io::Result<()> {
let (tx, rx): (
mpsc::Sender<(Vec<PathBuf>, mpsc::Sender<PathBuf>)>,
mpsc::Receiver<(Vec<PathBuf>, mpsc::Sender<PathBuf>)>,
) = mpsc::channel();
thread::spawn(move || {
for (conflict_list, response_tx) in rx {
let selected_source = choise_index_conflict(conflict_list);
response_tx.send(selected_source).unwrap();
}
});
hardcopy(source, destination, Some(tx))
}
fn find_files_with_location(destination: &Path) -> Vec<PathBuf> {
let mut found_files = Vec::new();
let mut components = destination.components();
let prefix = match (components.next(), components.next(), components.next()) {
(Some(first), Some(second), Some(third)) => {
PathBuf::from(first.as_os_str())
.join(second.as_os_str())
.join(third.as_os_str())
}
_ => {
return Vec::new();
}
};
let file_location: PathBuf = components.as_path().to_path_buf();
if let Ok(entries) = fs::read_dir(&prefix) {
for entry in entries.filter_map(Result::ok) {
let path = entry.path();
if path.is_dir() {
let target_path = path.join(&file_location);
if target_path.exists() {
if !path.join(PathBuf::from("disabled")).exists() {
found_files.push(target_path);
}
}
}
}
}
found_files
}
fn choise_index_conflict(conflict_list: Vec<PathBuf>) -> PathBuf {
for (index, path) in conflict_list.iter().enumerate() {
println!("{}: {}", index + 1, path.display());
}
let count = conflict_list.len();
loop {
print!("Choose a path to resolve the conflict (1-{}): ", count);
io::stdout().flush().unwrap();
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.expect("Failed to read input");
match input.trim().parse::<usize>() {
Ok(selected) if selected >= 1 && selected <= count => {
return conflict_list[selected - 1].clone();
}
_ => {
println!("Invalid input. Please enter a number between 1 and {}.", count);
}
}
}
}
fn append_index_block(source: &Path, destination: &Path) -> io::Result<()> {
let source_components: Vec<_> = source.iter().collect();
let base_system_folder = source_components[4].to_str().unwrap();
let index_conflict_path = Path::new("/pkg/gnu/sexpkg/etc/index-conflict.md");
let content = fs::read_to_string(&index_conflict_path)?;
let start_marker = format!("``` cfg *** {} ***", base_system_folder);
let lines: Vec<&str> = content.lines().collect();
let mut start_block_index = None;
let mut end_block_index = None;
for (i, line) in lines.iter().enumerate() {
if line.contains(&start_marker) {
start_block_index = Some(i);
} else if start_block_index.is_some() && line.trim() == "```" {
end_block_index = Some(i);
break;
}
}
let end_block_index = end_block_index.ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidData, "End block not found")
})?;
let new_line = format!(
"{} {}",
destination.to_str().unwrap(),
source.to_str().unwrap()
);
let mut new_content = String::new();
for (i, line) in lines.iter().enumerate() {
if i == end_block_index {
new_content.push_str(&new_line);
new_content.push('\n');
}
new_content.push_str(line);
new_content.push('\n');
}
let mut file = fs::File::create(&index_conflict_path)?;
file.write_all(new_content.as_bytes())?;
Ok(())
}

4
src/utils/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod hardcopy;
pub mod parser;
pub mod deletecopy;
pub mod shell;

234
src/utils/parser.rs Normal file
View File

@ -0,0 +1,234 @@
use std::fs;
use std::env;
use std::io::{self, BufRead};
use std::path::{Path,PathBuf};
pub fn get_name<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line.split_whitespace().next().unwrap_or("").to_string())
}
pub fn get_version<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line
.split_whitespace()
.nth(1)
.unwrap_or("")
.to_string())
}
pub fn get_url<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let lines = read_lines(&file_path)?;
let third_line = lines.get(2)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "File has less than 3 lines"))?
.trim()
.to_string();
let name = get_name(&file_path)?;
let version = get_version(&file_path)?;
let url = third_line
.replace("{name}", &name)
.replace("{version}", &version);
Ok(url)
}
pub fn get_deps<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh dependencies", "```")
}
pub fn get_build_script<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh build.sctipt", "```")
}
pub fn get_repo_list() -> io::Result<Vec<String>> {
let exe_path = env::current_exe()?;
let file_path = exe_path
.parent()
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Failed to get executable directory"))?
.join("../etc/sexpkg.md");
let block = extract_block(file_path, "``` sh *** Repository list and priority ***", "```")?;
let mut repo_list = Vec::new();
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if let Some(repo) = parts.first() {
repo_list.push(repo.to_string());
}
}
}
Ok(repo_list)
}
pub fn get_repo_addr(repo: &str) -> io::Result<String> {
let exe_path = env::current_exe()?;
let file_path = exe_path
.parent()
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Failed to get executable directory"))?
.join("../etc/sexpkg.md");
let block = extract_block(file_path, "``` sh *** Repository list and priority ***", "```")?;
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == repo {
return Ok(parts[1].to_string());
}
}
}
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Repository '{}' not found in the repository list", repo),
))
}
pub fn get_use_status(repo: &str, dependency: &str) -> bool {
let base_path = format!("/pkg/{}", repo);
let path = Path::new(&base_path);
if !path.exists() || !path.is_dir() {
return false;
}
let mut match_count = 0;
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let subdir_path = entry.path();
if subdir_path.is_dir() {
let script_path = subdir_path.join("build-script.md");
if let Ok(lines) = read_lines(&script_path) {
for line in lines.iter() {
if line.trim() == format!("={}", dependency) {
match_count += 1;
if match_count > 1 {
return true
}
break;
}
}
}
}
}
}
return false
}
pub fn get_index_conflict<P: AsRef<Path>>(destination: P) -> io::Result<PathBuf> {
let destination_path = destination.as_ref();
let parts: Vec<&str> = destination_path
.iter()
.map(|component| component.to_str().unwrap_or(""))
.collect();
if parts.len() < 4 {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid destination path format",
));
}
let system_struct_folder = parts[3]; // bin, sbin, include, lib, share
let etc = Path::new("/pkg/gnu/sexpkg/etc");
let cfg_path = etc.join("index-conflict.md");
let start_marker = format!("``` cfg *** {} ***", system_struct_folder);
let end_marker = "```";
let block_content = extract_block(&cfg_path, &start_marker, end_marker)?;
let destination_str = destination_path.to_str().ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidInput, "Failed to convert destination path to string")
})?;
for line in block_content.lines() {
let trimmed_line = line.trim();
if trimmed_line.starts_with(destination_str) {
let mut words = trimmed_line.split_whitespace();
if let Some(_) = words.next() {
if let Some(path_source) = words.next() {
return Ok(PathBuf::from(path_source));
}
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
format!(
"No matching line found for destination: {}",
destination_path.display()
),
))
}
fn read_first_line<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
if let Some(line) = reader.lines().next() {
line
} else {
Err(io::Error::new(io::ErrorKind::InvalidData, "File is empty"))
}
}
fn read_lines<P: AsRef<Path>>(file_path: P) -> io::Result<Vec<String>> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
reader.lines().collect()
}
fn extract_block<P: AsRef<Path>>(
file_path: P,
start_marker: &str,
end_marker: &str,
) -> io::Result<String> {
let lines = read_lines(file_path)?;
let mut block_started = false;
let mut result = Vec::new();
for line in lines {
if line.trim() == start_marker {
block_started = true;
continue;
}
if block_started {
if line.trim() == end_marker {
break;
}
result.push(line.trim().to_string());
}
}
if result.is_empty() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Block between '{}' and '{}' not found", start_marker, end_marker),
))
} else {
Ok(result.join("\n"))
}
}

73
src/utils/shell.rs Normal file
View File

@ -0,0 +1,73 @@
use std::path::Path;
use std::process::Command;
use std::env;
pub fn mount_overlay(path_repo: &Path) -> Result<(), String> {
let lowerdirs = vec![
path_repo.join("bin"),
path_repo.join("sbin"),
];
let lowerdir_str = lowerdirs.iter()
.map(|p| p.to_string_lossy())
.collect::<Vec<_>>()
.join(":");
let mounts = vec![
("/usr/bin", &lowerdir_str),
("/usr/sbin", &lowerdir_str),
("/bin", &lowerdir_str),
("/sbin", &lowerdir_str),
];
for (target, lowerdir) in mounts {
let output = Command::new("mount")
.arg("-t").arg("overlay")
.arg("overlay")
.arg("-o").arg(format!("lowerdir={}", lowerdir))
.arg(target)
.output()
.map_err(|e| format!("Failed to execute mount command: {}", e))?;
if !output.status.success() {
return Err(format!(
"Mount failed for target {}: {}",
target,
String::from_utf8_lossy(&output.stderr)
));
}
}
Ok(())
}
pub fn shell_update() -> Result<(), String> {
let shell = env::var("SHELL").unwrap_or_else(|_| "/bin/sh".to_string());
let output_hash = Command::new(&shell)
.arg("-c")
.arg("hash -r")
.output()
.map_err(|e| format!("Failed to execute hash -r with shell {}: {}", shell, e))?;
if !output_hash.status.success() {
return Err(format!(
"hash -r failed: {}",
String::from_utf8_lossy(&output_hash.stderr)
));
}
let output_ldconfig = Command::new("ldconfig")
.output()
.map_err(|e| format!("Failed to execute ldconfig: {}", e))?;
if !output_ldconfig.status.success() {
return Err(format!(
"ldconfig failed: {}",
String::from_utf8_lossy(&output_ldconfig.stderr)
));
}
Ok(())
}

View File

@ -1,35 +0,0 @@
#!/bin/bash
if [ -z "$1" ]; then
echo "Usage: $0 <path_to_executable>"
exit 1
fi
test_file="$1"
iterations=100
times=()
for i in $(seq 1 $iterations); do
rm -rf /pkg/gnu/lib
sleep 0.1
start_time=$(date +%s%N)
$test_file
end_time=$(date +%s%N)
elapsed_time=$((end_time - start_time))
elapsed_time_seconds=$(echo "scale=6; $elapsed_time / 1000000000" | bc)
times+=($elapsed_time_seconds)
done
sorted_times=($(printf '%s\n' "${times[@]}" | sort -n))
mid=$((iterations / 2))
if (( iterations % 2 == 1 )); then
median=${sorted_times[$mid]}
else
median=$(echo "scale=6; (${sorted_times[$mid]} + ${sorted_times[$((mid - 1))]} ) / 2" | bc)
fi
echo "Test: $test_file"
echo "Median exec time: $median"