delete uncorrect

This commit is contained in:
2025-06-02 18:00:32 +03:00
parent 5918b2c4b2
commit 51ffa435fb
16 changed files with 666 additions and 76 deletions

View File

@ -1,6 +1,6 @@
[build]
rustflags = [
"-C", "target-feature=+crt-static",
"-C", "link-arg=-static",
"-C", "link-arg=-no-pie"
]
#rustflags = [
#"-C", "target-feature=+crt-static",
#"-C", "link-arg=-static",
#"-C", "link-arg=-no-pie"
#]

12
.gitignore vendored
View File

@ -1,9 +1,13 @@
*
!.gitignore
!src
!src/**
!Cargo.toml
!tests
!tests/**
!.cargo
!.cargo/**
!install.sh
!src
!src/**
!assets
!assets/**
!tests
!tests/**

View File

@ -4,6 +4,7 @@ version='0.1.0'
edition='2024'
[dependencies]
clap = "4.5.39"
rayon = "1.10.0"
[profile.release]

1
assets/bin/sexpkg Symbolic link
View File

@ -0,0 +1 @@
../../target/release/sexpkg

15
assets/etc/sexpkg.md Normal file
View File

@ -0,0 +1,15 @@
Sexpkg's config file
=
``` sh *** Repository list and priority ***
gnu /pkg/gnu/sexpkg/var/gnu
musl /pkg/gnu/sexpkg/var/musl
```
``` sh *** Clean exclude ***
*
```
``` sh *** Clean include ***
```

18
assets/var/gnu/glibc.md Normal file
View File

@ -0,0 +1,18 @@
glibc 2.41
=
https://ftp.gnu.org/gnu/libc/{name}-{version}.tar.xz
``` sh build.sctipt
mkdir build; cd build &&
../configure \
--prefix=/pkg/gnu/glibc \
--with-headers=/pkg/gnu/linux/include \
--disable-multilib \
--enable-install-ldconfig &&
make -j$(nproc) && make install
```
``` sh dependencies
binutils
linux
```

3
install.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/sh
cargo build --release
cargo install --path . --root /pkg/gnu/sexpkg/

117
src/commands/delete.rs Normal file
View File

@ -0,0 +1,117 @@
use std::fs;
use std::io;
use std::path::Path;
use rayon::prelude::*;
use std::os::unix::fs::MetadataExt;
use super::get_var_path;
use crate::utils::parser;
pub fn delete(repo: &str, pkgname: &str) {
let base_dir = Path::new("/pkg").join(repo);
let pkg_dir = base_dir.join(pkgname);
if pkg_dir.exists() {
match fs::remove_dir_all(&pkg_dir) {
Ok(()) => { println!("removed: {}", pkg_dir.display()) }
Err(e) => { eprintln!("Can't remove {}: {}", pkg_dir.display(), e)}
}
} else {
eprintln!("{} not installed in {}", pkgname, repo)
}
let subdirs = ["lib", "include", "bin"];
for subdir in &subdirs {
let dir_path = base_dir.join(subdir);
if dir_path.exists() {
match remove_unused_files(&dir_path) {
Ok(()) => {}
Err(_) => {}
}
}
}
}
pub fn delete_recursive(repo: &str, pkgname: &str) {
let base_dir = Path::new("/pkg").join(repo);
let pkg_dir = base_dir.join(pkgname);
if pkg_dir.exists() {
let var_path = get_var_path();
let pkg_md_path = var_path.join(format!("{}/{}.md", repo, pkgname));
match parser::get_deps(&pkg_md_path) {
Ok(deps) => {
for dependency in deps.lines() {
let dependency = dependency.trim();
if !dependency.is_empty() {
let pkg_dir = Path::new("/pkg").join(repo).join(dependency);
if pkg_dir.exists() {
if !parser::get_use_status(repo, dependency) {
delete(repo, &dependency.to_string())
}
}
}
}
}
Err(e) => {
eprintln!("Failed to parse dependencies: {}", e)
}
}
match fs::remove_dir_all(&pkg_dir) {
Ok(()) => { println!("removed: {}", pkg_dir.display()) }
Err(e) => { eprintln!("Can't remove {}: {}", pkg_dir.display(), e)}
}
} else {
eprintln!("{} not installed in {}", pkgname, repo)
}
let subdirs = ["lib", "include", "bin"];
for subdir in &subdirs {
let dir_path = base_dir.join(subdir);
if dir_path.exists() {
match remove_unused_files(&dir_path) {
Ok(()) => {}
Err(_) => {}
}
}
}
}
fn remove_unused_files(dir: &Path) -> Result<(), String> {
let entries = fs::read_dir(dir)
.map_err(|e| format!("Can't read {}: {}", dir.display(), e))?;
entries.par_bridge().for_each(|entry| {
if let Ok(entry) = entry {
let path = entry.path();
if path.is_file() {
if let Ok(nlink) = get_nlink(&path) {
if nlink == 1 {
if let Err(e) = fs::remove_file(&path) {
eprintln!("Can't remove {}: {}", path.display(), e)
}
}
}
} else if path.is_dir() {
if let Err(e) = remove_unused_files(&path) { eprintln!("Error processing directory {}: {}", path.display(), e) }
} else if path.is_symlink() {
if fs::read_link(&path).is_err() {
if let Err(e) = fs::remove_file(&path) { eprintln!("Can't remove symlink {}: {}", path.display(), e) }
}
}
}
});
Ok(())
}
fn get_nlink(path: &Path) -> io::Result<u64> {
Ok(fs::metadata(path)?.nlink())
}

165
src/commands/install.rs Normal file
View File

@ -0,0 +1,165 @@
use std::fs;
use std::path::Path;
use std::process::Command;
use std::process;
use super::*;
use crate::utils::parser;
use crate::utils::hardcopy::hardcopy;
pub fn install(repo: &String, pkgname: &String) -> Result<(), bool> {
let var_path = get_var_path();
let pkg_md_path = var_path.join(format!("{}/{}.md", repo, pkgname));
if !pkg_md_path.exists() {
match parser::get_repo_addr(repo) {
Ok(repo_addr) => {
let rsync_command = format!(
"rsync --include='{}.md' --exclude='*' {} {}",
pkgname,
repo_addr,
pkg_md_path.to_str().unwrap()
);
let rsync_output = Command::new("sh")
.arg("-c")
.arg(rsync_command)
.output()
.expect("Failed to execute rsync");
if !rsync_output.status.success() {
eprintln!("broken repo: {}", repo);
return Err(false);
}
if !pkg_md_path.exists() {
eprintln!("not found {} in {} repo", pkgname, repo);
return Err(true);
}
}
Err(e) => {
eprintln!("Repository {} not found: {}", repo, e);
return Err(false);
}
}
}
let deps = match parser::get_deps(&pkg_md_path) {
Ok(deps) => deps,
Err(e) => {
eprintln!("Failed to parse dependencies {}: {}", &pkg_md_path.to_str().unwrap(), e);
return Err(false);
}
};
for dependency in deps.lines() {
let dependency = dependency.trim();
if !dependency.is_empty() {
let pkg_dir = Path::new("/pkg").join(repo).join(dependency);
if !pkg_dir.exists() {
match install(repo, &dependency.to_string()) {
Ok(()) => {}
Err(_) => {process::exit(1) }
}
}
}
}
let url = match parser::get_url(&pkg_md_path) {
Ok(url) => url,
Err(e) => {
eprintln!("Failed to parse URL: {}", e);
return Err(false);
}
};
let src = Path::new("/pkg/src");
fs::create_dir_all(&src).expect("Failed to create src directory");
let wget_command: String;
let src_dir: String;
if url.ends_with(".tar.gz") {
wget_command = format!("wget -O- {} | tar -xz -C {}", url, src.to_str().unwrap());
src_dir = format!(
"{}/{}",
src.to_str().unwrap(),
url
.rsplit('/')
.next()
.expect("Failed to extract archive name from URL")
.trim_end_matches(".tar.gz"))
} else if url.ends_with(".tar.xz") {
wget_command = format!("wget -O- {} | tar -xJ -C {}", url, src.to_str().unwrap());
src_dir = format!(
"{}/{}",
src.to_str().unwrap(),
url
.rsplit('/')
.next()
.expect("Failed to extract archive name from URL")
.trim_end_matches(".tar.xz"))
} else {
eprintln!("Unsupported archive format for URL: {}", url);
return Err(false);
}
println!("Downloading and extracting package from: {}", url);
let wget_output = Command::new("sh")
.arg("-c")
.arg(wget_command)
.output()
.expect("Failed to execute wget and tar");
if !wget_output.status.success() {
eprintln!("Failed to download and extract package: {}", String::from_utf8_lossy(&wget_output.stderr));
return Err(false);
}
// 3. Выполнение bash-скрипта сборки
let build_script = match parser::get_build_script(&pkg_md_path) {
Ok(script) => script,
Err(e) => {
eprintln!("Failed to parse build script: {}", e);
return Err(false);
}
};
println!("{}", &src_dir);
let build_output = Command::new("sh")
.arg("-c")
.arg(build_script)
.current_dir(&src_dir)
.output()
.expect("Failed to execute build script");
if !build_output.status.success() {
eprintln!("Build failed: {}", String::from_utf8_lossy(&build_output.stderr));
return Err(false);
}
let build_script_dest = Path::new("/pkg")
.join(repo)
.join(pkgname)
.join("build-script.md");
fs::copy(&pkg_md_path, &build_script_dest).expect("Failed to copy build script");
let pkg_dir = Path::new("/pkg").join(repo).join(pkgname);
hardcopy(
&pkg_dir.join("bin"),
&Path::new("/pkg").join(repo).join("bin"),
).expect("Failed to copy bin directory");
hardcopy(
&pkg_dir.join("include"),
&Path::new("/pkg").join(repo).join("include"),
).expect("Failed to copy include directory");
hardcopy(
&pkg_dir.join("lib"),
&Path::new("/pkg").join(repo).join("lib"),
).expect("Failed to copy lib directory");
fs::remove_dir_all(&src_dir).expect("Failed to clean up src directory");
println!("Package {} installed successfully from repo {}", pkgname, repo);
Ok(())
}

View File

@ -0,0 +1,37 @@
use std::process;
use crate::utils::parser;
use super::install::install;
pub fn install_all(pkgname: &String) {
let repos = match parser::get_repo_list() {
Ok(repos) => repos,
Err(e) => {
eprintln!("Failed to get repository list: {}", e);
return;
}
};
let mut success = false;
for repo in repos {
println!("Trying to install {} from repo {}...", pkgname, repo);
match install(&repo, pkgname) {
Ok(()) => {
success = true;
break;
}
Err(no_repo_package) => {
if no_repo_package {
continue;
} else {
process::exit(1)
}
}
}
}
if !success {
eprintln!("Package {} not found in any available repository", pkgname);
}
}

16
src/commands/mod.rs Normal file
View File

@ -0,0 +1,16 @@
pub mod install;
pub mod install_all;
pub mod delete;
use std::path::PathBuf;
pub fn get_var_path() -> PathBuf {
let exe_path = std::env::current_exe().expect("Failed to get executable path");
exe_path
.parent()
.unwrap()
.join("../var")
.canonicalize()
.expect("Failed to canonicalize var path")
}

View File

@ -1,38 +1,70 @@
use rayon::prelude::*;
use std::fs;
use std::io;
use std::path::Path;
use std::os::unix;
mod commands;
mod utils;
fn copy_recursive_hardlink_optimized(source: &Path, destination: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(source)?;
fn main() {
let matches = clap::Command::new("pkg")
.version("1.0")
.about("Package manager for installing software from repositories")
.subcommand(
clap::Command::new("install")
.about("Install a package")
.arg(
clap::Arg::new("args")
.help("Repository and package name (optional repo)")
.required(true)
.num_args(1..=2)
.value_names(["repo", "pkgname"]),
),
)
.subcommand(
clap::Command::new("delete")
.about("Delete a package from a repository")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true),
)
.arg(
clap::Arg::new("recursive")
.short('R')
.long("recursive")
.help("Recursively delete the package")
.action(clap::ArgAction::SetTrue),
),
)
.get_matches();
if metadata.file_type().is_dir() {
fs::create_dir_all(destination)?;
if let Some(install_matches) = matches.subcommand_matches("install") {
let args: Vec<&String> = install_matches.get_many::<String>("args").unwrap().collect();
let entries: Vec<_> = fs::read_dir(source)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
let path = entry.path();
if let Some(file_name) = path.file_name() {
let dest_path = destination.join(file_name);
copy_recursive_hardlink_optimized(&path, &dest_path)
} else {
Ok(())
match args.len() {
1 => {
let pkgname = args[0];
commands::install_all::install_all(pkgname);
}
})?;
} else if metadata.file_type().is_file() {
fs::hard_link(source, destination)?;
} else if metadata.file_type().is_symlink() {
let target = fs::read_link(source)?;
unix::fs::symlink(target, destination)?;
2 => {
let repo = args[0];
let pkgname = args[1];
commands::install::install(repo, pkgname).unwrap();
}
_ => unreachable!(),
}
} else if let Some(delete_matches) = matches.subcommand_matches("delete") {
let repo = delete_matches.get_one::<String>("repo").unwrap();
let pkgname = delete_matches.get_one::<String>("pkgname").unwrap();
let recursive = delete_matches.get_flag("recursive");
if recursive {
commands::delete::delete_recursive(repo, pkgname);
} else {
commands::delete::delete(repo, pkgname);
}
} else {
println!("No command provided. Use `pkg --help` for usage information.");
}
Ok(())
}
fn main() -> io::Result<()> {
let source = Path::new("/usr/lib");
let destination = Path::new("/pkg/gnu/lib");
copy_recursive_hardlink_optimized(source, destination)?;
Ok(())
}

31
src/utils/hardcopy.rs Normal file
View File

@ -0,0 +1,31 @@
use rayon::prelude::*;
use std::fs;
use std::io;
use std::path::Path;
use std::os::unix;
pub fn hardcopy(source: &Path, destination: &Path) -> io::Result<()> {
let metadata = fs::symlink_metadata(source)?;
if metadata.file_type().is_file() {
fs::hard_link(source, destination)?;
} else if metadata.file_type().is_dir() {
fs::create_dir_all(destination)?;
let entries: Vec<_> = fs::read_dir(source)?.collect::<io::Result<Vec<_>>>()?;
entries.par_iter().try_for_each(|entry| {
let path = entry.path();
if let Some(file_name) = path.file_name() {
let dest_path = destination.join(file_name);
hardcopy(&path, &dest_path)
} else {
Ok(())
}
})?;
} else if metadata.file_type().is_symlink() {
let target = fs::read_link(source)?;
unix::fs::symlink(target, destination)?;
}
Ok(())
}

2
src/utils/mod.rs Normal file
View File

@ -0,0 +1,2 @@
pub mod hardcopy;
pub mod parser;

183
src/utils/parser.rs Normal file
View File

@ -0,0 +1,183 @@
use std::fs;
use std::env;
use std::io::{self, BufRead};
use std::path::Path;
pub fn get_name<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line.split_whitespace().next().unwrap_or("").to_string())
}
pub fn get_version<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line
.split_whitespace()
.nth(1)
.unwrap_or("")
.to_string())
}
pub fn get_url<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let lines = read_lines(&file_path)?;
let third_line = lines.get(2)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "File has less than 3 lines"))?
.trim()
.to_string();
let name = get_name(&file_path)?;
let version = get_version(&file_path)?;
let url = third_line
.replace("{name}", &name)
.replace("{version}", &version);
Ok(url)
}
pub fn get_deps<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh dependencies", "```")
}
pub fn get_build_script<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh build.sctipt", "```")
}
pub fn get_repo_list() -> io::Result<Vec<String>> {
let exe_path = env::current_exe()?;
let file_path = exe_path
.parent()
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Failed to get executable directory"))?
.join("../etc/sexpkg.md");
let block = extract_block(file_path, "``` sh *** Repository list and priority ***", "```")?;
let mut repo_list = Vec::new();
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if let Some(repo) = parts.first() {
repo_list.push(repo.to_string());
}
}
}
Ok(repo_list)
}
pub fn get_repo_addr(repo: &str) -> io::Result<String> {
let exe_path = env::current_exe()?;
let file_path = exe_path
.parent()
.ok_or_else(|| io::Error::new(io::ErrorKind::Other, "Failed to get executable directory"))?
.join("../etc/sexpkg.md");
let block = extract_block(file_path, "``` sh *** Repository list and priority ***", "```")?;
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == repo {
return Ok(parts[1].to_string());
}
}
}
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Repository '{}' not found in the repository list", repo),
))
}
pub fn get_use_status(repo: &str, dependency: &str) -> bool {
let base_path = format!("/pkg/{}", repo);
let path = Path::new(&base_path);
if !path.exists() || !path.is_dir() {
return false;
}
let mut match_count = 0;
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let subdir_path = entry.path();
if subdir_path.is_dir() {
let script_path = subdir_path.join("build-script.md");
if let Ok(lines) = read_lines(&script_path) {
for line in lines.iter() {
if line.trim() == format!("={}", dependency) {
match_count += 1;
if match_count > 1 {
return true
}
break;
}
}
}
}
}
}
return false
}
fn read_first_line<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
if let Some(line) = reader.lines().next() {
line
} else {
Err(io::Error::new(io::ErrorKind::InvalidData, "File is empty"))
}
}
fn read_lines<P: AsRef<Path>>(file_path: P) -> io::Result<Vec<String>> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
reader.lines().collect()
}
fn extract_block<P: AsRef<Path>>(
file_path: P,
start_marker: &str,
end_marker: &str,
) -> io::Result<String> {
let lines = read_lines(file_path)?;
let mut block_started = false;
let mut result = Vec::new();
for line in lines {
if line.trim() == start_marker {
block_started = true;
continue;
}
if block_started {
if line.trim() == end_marker {
break;
}
result.push(line.trim().to_string());
}
}
if result.is_empty() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Block between '{}' and '{}' not found", start_marker, end_marker),
))
} else {
Ok(result.join("\n"))
}
}

View File

@ -1,35 +0,0 @@
#!/bin/bash
if [ -z "$1" ]; then
echo "Usage: $0 <path_to_executable>"
exit 1
fi
test_file="$1"
iterations=100
times=()
for i in $(seq 1 $iterations); do
rm -rf /pkg/gnu/lib
sleep 0.1
start_time=$(date +%s%N)
$test_file
end_time=$(date +%s%N)
elapsed_time=$((end_time - start_time))
elapsed_time_seconds=$(echo "scale=6; $elapsed_time / 1000000000" | bc)
times+=($elapsed_time_seconds)
done
sorted_times=($(printf '%s\n' "${times[@]}" | sort -n))
mid=$((iterations / 2))
if (( iterations % 2 == 1 )); then
median=${sorted_times[$mid]}
else
median=$(echo "scale=6; (${sorted_times[$mid]} + ${sorted_times[$((mid - 1))]} ) / 2" | bc)
fi
echo "Test: $test_file"
echo "Median exec time: $median"