before index-conflict changes

This commit is contained in:
2025-11-18 19:46:36 +03:00
parent 28ba2135ec
commit 30540602bd
66 changed files with 1403 additions and 879 deletions

View File

@ -9,16 +9,38 @@ gnu /pkg/gnu/aeropkg/var/gnu
musl /pkg/gnu/aeropkg/var/musl
```
> # Env
> Block name `env *** env ***`
> Global process environment
>
> Block name's `env *** env [repo] [stages] ***`
> Install subcommand environment
> Repo, stages - optional
> Valid stages: download, patch, build, config.
> `Params`
> save_source=true (default: none)
> pgo=generate|use (default: none)
> disable=true|false (default: false)
> # Trim options
``` env *** env ***
SHELL=/bin/sh
```
``` env *** env gnu ***
save_source=true
```
> # Hooks
> Block format `sh *** hook [repo] [stages] ***`
> Execute a shell script for a specific repository
>
> Repo, stages - optional
> Valid stages: download, patch, build, config.
> # Trim
> Block format `cfg *** Trim rules ***`
> Trimming removes unused files for a specified period
>
> Usage:
> pkg trim \<repo\> \<date\> \[time\]
> date: DD.MM.YYYY
> time: HH:mm:ss
>
> Configure format
> Relative paths with /pkg/<repo>, support wildcard
> Add ! to exclude
@ -26,37 +48,3 @@ musl /pkg/gnu/aeropkg/var/musl
!**
!aeropkg
```
> # Env
> Build stages environment
>
> Example:
> env
> env \<repo\>
> env \<stage\>
> env \<repo\> \<stage\>
> stage can be: download, patch, build, config
``` cfg *** Env <repo> <stage> ***
src_remove=true|false (default true)
pgo=generate|use (default none)
disable=true|false (default false)
```
> # Hooks
> Execute a shell script for a specific repository
>
> Example:
> Hook \<repo\> \<stage\>
> Hook \<stage\>
> stage can be: download, patch, build, config
>
> hook env
> pkgname - package name
> repo - repository name
``` sh *** Hook <repo> <stage> ***
echo "hooked"
```

23
assets/var/gnu/7z.md Normal file
View File

@ -0,0 +1,23 @@
7z 25.01
=
https://github.com/ip7z/7zip/archive/refs/tags/25.01.tar.gz
``` sh *** build ***
cd CPP/7zip/Bundles/Alone2
make -f makefile.gcc
mkdir -p /pkg/gnu/7z/bin
cp _o/7zz /pkg/gnu/7z/bin/
ln /pkg/gnu/7z/bin/7zz /pkg/gnu/7z/bin/7z
```
``` sh *** config ***
cat > /pkg/gnu/7z/bin/unzip <<'EOF'
#!/bin/bash
if [ "$1" = "-n" ]; then
shift
fi
7z x -y "$@" >/dev/null 2>&1
[ $? -le 1 ] && exit 0 || exit 1
EOF
chmod +x /pkg/gnu/7z/bin/unzip
```

10
assets/var/gnu/ell.md Normal file
View File

@ -0,0 +1,10 @@
ell
=
git://git.kernel.org/pub/scm/libs/ell/ell.git
``` sh *** build ***
libtoolize && ./bootstrap &&
./configure --prefix=/pkg/gnu/ell &&
make -j$(nproc) && make install
```

7
assets/var/gnu/fzf.md Normal file
View File

@ -0,0 +1,7 @@
fzf 0.65.2
=
https://github.com/junegunn/fzf/archive/refs/tags/v0.65.2.tar.gz
``` sh *** build ***
make install PREFIX=/pkg/gnu/fzf FZF_VERSION=0.65.2 FZF_REVISION=tarball
```

View File

@ -9,14 +9,14 @@ mkdir build; cd build &&
--disable-multilib \
--enable-install-ldconfig \
--prefix=/pkg/gnu/glibc &&
make -j$(nproc) && make install
make -j$(nproc) && make install &&
mkdir -p /pkg/gnu/glibc/{share/i18n/charmaps,lib/locale,etc/ld.so.conf.d} &&
cp localedata/charmaps/* /pkg/gnu/glibc/share/i18n/charmaps/ &&
```
``` sh *** config ***
mkdir -p /pkg/gnu/glibc/{share/i18n/charmaps,lib/locale,etc/ld.so.conf.d} &&
echo "include /pkg/gnu/glibc/etc/ld.so.conf.d/*.conf" > /pkg/gnu/glibc/etc/ld.so.conf &&
echo "/pkg/gnu/lib" > /pkg/gnu/glibc/etc/ld.so.conf.d/lib.conf &&
cp localedata/charmaps/* /pkg/gnu/glibc/share/i18n/charmaps/ &&
/pkg/gnu/glibc/bin/localedef -i en_US -f UTF-8 en_US.UTF-8 &&
sed -i 's|/pkg/gnu/glibc/lib64/ld-linux-x86-64.so.2|/pkg/gnu/glibc/lib/ld-linux-x86-64.so.2|g' /pkg/gnu/glibc/bin/ldd
```

15
assets/var/gnu/go-1.4.md Normal file
View File

@ -0,0 +1,15 @@
go 1.4.3
=
https://go.dev/dl/go1.4.3.src.tar.gz
``` sh *** build ***
cd src
export CC="gcc -std=gnu99"
export CGO_ENABLED=0
export GOROOT_FINAL=/pkg/gnu/go-1.4
./make.bash
mkdir -p /pkg/gnu/go-1.4/
cp -r ../bin /pkg/gnu/go-1.4
cp -r ../lib /pkg/gnu/go-1.4
cp -r ../include /pkg/gnu/go-1.4
```

16
assets/var/gnu/go.md Normal file
View File

@ -0,0 +1,16 @@
go 1.25.1
=
https://go.dev/dl/go1.25.1.linux-amd64.tar.gz
``` sh *** build ***
export GOROOT_BOOTSTRAP=/pkg/src/go/.go-bootstrap
export PKG_HOME=/pkg/gnu/go
export GOROOT_FINAL=/pkg/gnu/go
mkdir -p $GOROOT_BOOTSTRAP
cp -rPl ./* $GOROOT_BOOTSTRAP
cd src
./make.bash
cd ..
mkdir -p $PKG_HOME
cp -rPl ./* $PKG_HOME
```

View File

@ -1,12 +1,19 @@
iwd
=
https://git.kernel.org/pub/scm/network/wireless/iwd.git
git://git.kernel.org/pub/scm/network/wireless/iwd.git
``` sh *** build ***
libtoolize && ./bootstrap && ./configure --prefix=/pkg/gnu/iwd && make -j$(nproc) && make install
libtoolize && ./bootstrap &&
./configure \
--disable-systemd-service \
--disable-manual-pages \
--prefix=/pkg/gnu/iwd &&
make -j$(nproc) && make install
```
``` cfg *** build deps ***
dbus
readline
ncurses
ell
```

View File

@ -0,0 +1,13 @@
libeconf 0.8.0
=
https://github.com/openSUSE/libeconf/archive/refs/tags/v0.8.0.tar.gz
``` sh *** build ***
mkdir build
cd build
meson setup \
--prefix=/pkg/gnu/libeconf \
--libdir=lib \
--buildtype=release \
..
```

10
assets/var/gnu/libuv.md Normal file
View File

@ -0,0 +1,10 @@
libuv 1.51
=
https://github.com/libuv/libuv/archive/refs/tags/v1.51.0.tar.gz
``` sh *** build ***
./autogen.sh &&
./configure --prefix=/pkg/gnu/libuv -disable-static &&
make -j$(nproc) &&
make install
```

View File

@ -1,6 +1,6 @@
linux 6.14.11
linux 6.17.8
=
https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-6.14.11.tar.xz
https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-6.17.8.tar.xz
``` sh *** build ***
make ARCH=x86 INSTALL_HDR_PATH=/pkg/gnu/linux headers_install

12
assets/var/gnu/lpeg.md Normal file
View File

@ -0,0 +1,12 @@
lpeg 1.1.0
=
https://www.inf.puc-rio.br/~roberto/lpeg/lpeg-1.1.0.tar.gz
``` sh *** build ***
make \
LUADIR=/pkg/gnu/lpeg/include \
LUA_LIBDIR=/pkg/gnu/lpeg/lib \
LUA_INC=-I/pkg/gnu/lpeg/include \
CC=gcc \
CFLAGS="-O2 -Wall -shared -fPIC"
```

8
assets/var/gnu/lua.md Normal file
View File

@ -0,0 +1,8 @@
lua 5.4.8
=
https://www.lua.org/ftp/lua-5.4.8.tar.gz
``` sh *** build ***
make linux INSTALL_TOP=/pkg/gnu/lua
make install INSTALL_TOP=/pkg/gnu/lua
```

7
assets/var/gnu/luajit.md Normal file
View File

@ -0,0 +1,7 @@
luajit 2.1
=
https://luajit.org/git/luajit.git
``` sh *** build ***
make install PREFIX=/pkg/gnu/luajit
```

View File

@ -0,0 +1,13 @@
luarocks 3.12.2
=
https://github.com/luarocks/luarocks/archive/refs/tags/v3.12.2.tar.gz
``` sh *** build ***
./configure --prefix=/pkg/gnu/luarocks
make -j$(nproc)
make install
```
``` cfg *** run deps ***
7z
```

18
assets/var/gnu/luv.md Normal file
View File

@ -0,0 +1,18 @@
luv 1.51
=
https://github.com/luvit/luv/releases/download/1.51.0-1/luv-1.51.0-1.tar.gz
``` sh *** build ***
mkdir -p build; cd build
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/pkg/gnu/luv \
-DLUA_BUILD_TYPE=System \
-DBUILD_SHARED_LIBS=ON
make -j$(nproc)
make install
```
``` cfg *** build deps ***
libuv
```

View File

@ -3,7 +3,30 @@ ncurses 6.5
https://ftp.gnu.org/gnu/ncurses/ncurses-6.5.tar.gz
``` sh *** build ***
CFG_FLAGS="--prefix=/pkg/gnu/ncurses --without-debug --enable-pc-files --with-pkg-config-libdir=/pkg/gnu/ncurses/lib/pkgconfig"
./configure $CFG_FLAGS && make -j$(nproc) && make install && make distclean &&
./configure $CFG_FLAGS --disable-widec && make -j$(nproc) && make install
./configure \
--prefix=/pkg/gnu/ncurses \
--without-cxx-binding \
--without-debug \
--with-termlib \
--with-shared \
--enable-pc-files \
--with-pkg-config-libdir=/pkg/gnu/ncurses/lib/pkgconfig &&
make -j$(nproc) && make install && make clean &&
./configure \
--prefix=/pkg/gnu/ncurses \
--without-cxx-binding \
--without-debug \
--without-normal \
--with-termlib \
--with-shared \
--enable-pc-files \
--with-pkg-config-libdir=/pkg/gnu/ncurses/lib/pkgconfig \
--disable-widec &&
make -j$(nproc) && make install
```
``` cfg *** build deps ***
pkg-config
```
--enable-getcap \

25
assets/var/gnu/neovim.md Normal file
View File

@ -0,0 +1,25 @@
neovim nightly
=
https://github.com/neovim/neovim/archive/refs/tags/nightly.tar.gz
``` sh *** build ***
luarocks install lpeg
pkg link gnu luarocks
mkdir -p build; cd build
cmake .. \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/pkg/gnu/neovim
make -j&(nproc)
make install
```
``` cfg *** build deps ***
luv
tree-sitter
unibilium
utf8proc
```
``` cfg *** run deps ***
luajit
```

View File

@ -5,3 +5,16 @@ https://github.com/openssl/openssl/releases/download/openssl-3.5.0/openssl-3.5.0
``` sh *** build ***
./Configure --prefix=/pkg/gnu/openssl --libdir=lib linux-x86_64 shared zlib-dynamic && make -j$(nproc) && make install
```
``` sh *** config ***
wget https://curl.se/ca/cacert.pem -O /pkg/gnu/openssl/ssl/cacert.pem --no-check-certificate &&
csplit -f "/pkg/gnu/openssl/ssl/certs/cert-" -b "%03d.pem" "/pkg/gnu/openssl/ssl/cacert.pem" '/-----BEGIN CERTIFICATE-----/' '{*}' &>/dev/null &&
for cert in "/pkg/gnu/openssl/ssl/certs/"cert-*.pem; do
hash=$(openssl x509 -hash -noout -in "$cert")
mv "$cert" "/pkg/gnu/openssl/ssl/certs/$hash.$(find "/pkg/gnu/openssl/ssl/certs/" -maxdepth 1 -type f -name "$hash.*" | wc -l)"
done
```
``` cfg *** run deps ***
wget
```

View File

@ -3,5 +3,28 @@ readline 8.3
https://ftp.gnu.org/gnu/readline/readline-8.3.tar.gz
``` sh *** build ***
./configure --prefix=/pkg/gnu/readline && make -j$(nproc) && make install
export LIBS="-ltinfo"
./configure \
--prefix=/pkg/gnu/readline \
--with-curses \
--with-shared-termcap-library=tinfo &&
make -j$(nproc) && make install
sed -i 's/Requires\.private: termcap/Requires.private: tinfo/' /pkg/gnu/readline/lib/pkgconfig/readline.pc
```
``` cfg *** build deps ***
ncurses
```
Современные программы
Почти все современные программы (включая bash, gdb, iwd, python и т.д.) используют readline → ncurses → terminfo.
Лишь очень старые или экзотические системы (например, embedded без ncurses) всё ещё полагаются на termcap.
Если добавлять
export CPPFLAGS="-I/pkg/gnu/ncurses/include"
export LDFLAGS="-L/pkg/gnu/ncurses/lib -ltinfo"
export LIBS="-ltinfo"
То .pc файл конфигурацию наебнёт но из-за чего именно хуй его знает
если не добавлять то он не хавает в зависимость это
мб чёто определённое надо выдрочить хз

View File

@ -0,0 +1,13 @@
s6-linux-utils 2.6.3
=
https://skarnet.org/software/s6-linux-utils/s6-linux-utils-2.6.3.0.tar.gz
``` sh *** build ***
./configure --prefix=/pkg/gnu/s6-linux-utils --with-sysdeps=/pkg/gnu/skalibs/lib/skalibs/sysdeps &&
make -j$(nproc) && make install
```
``` cfg *** build deps ***
skalibs
```

View File

@ -1,4 +1,5 @@
s6 2.13.2
=
https://skarnet.org/software/s6/s6-2.13.2.0.tar.gz
``` sh *** build ***
@ -6,6 +7,11 @@ https://skarnet.org/software/s6/s6-2.13.2.0.tar.gz
make -j$(nproc) && make install && mkdir -p /pkg/gnu/s6/etc/service
```
``` sh *** config ***
echo '#!/pkg/gnu/bin/execlineb -P\n\n/pkg/gnu/bin/export PATH /bin:/pkg/musl/bin:/pkg/musl/sbin:/pkg/gnu/bin:/pkg/gnu/sbin\n/pkg/gnu/bin/exec s6-svscan /pkg/gnu/s6/etc/service/current' > /pkg/gnu/s6/bin/s6-init
```
``` cfg *** build deps ***
skalibs
aero-utils
```

View File

@ -6,6 +6,11 @@ https://github.com/shadow-maint/shadow/releases/download/4.17.4/shadow-4.17.4.ta
./configure --prefix=/pkg/gnu/shadow && make -j$(nproc) && make install
```
``` sh *** config ***
chmod u+s /pkg/gnu/bin/su
echo '/bin/bash\n/bin/zsh' > /etc/shells
```
``` cfg *** build deps ***
linux-pam
libbsd

View File

@ -0,0 +1,8 @@
tree-sitter 0.25.10
=
https://github.com/tree-sitter/tree-sitter/archive/refs/tags/v0.25.10.tar.gz
``` sh *** build ***
make -j$(nproc)
make install PREFIX=/pkg/gnu/tree-sitter
```

View File

@ -0,0 +1,8 @@
unibilium 2.0.0
=
https://github.com/mauke/unibilium/archive/refs/tags/v2.0.0.tar.gz
``` sh *** build ***
make -j$(nproc)
make install PREFIX=/pkg/gnu/unibilium
```

View File

@ -0,0 +1,12 @@
utf8proc 2.11.0
=
https://github.com/JuliaStrings/utf8proc/archive/refs/tags/v2.11.0.tar.gz
``` sh *** patch ***
sed -i "s|prefix=/usr/local|prefix=/pkg/gnu/utf8proc|" Makefile
```
``` sh *** build ***
make -j$(nproc)
make install
```

View File

@ -5,3 +5,7 @@ https://mirrors.edge.kernel.org/pub/linux/utils/util-linux/v2.41/util-linux-2.41
``` sh *** build ***
./configure --prefix=/pkg/gnu/util-linux && make -j$(nproc) && make install
```
``` cfg *** build deps ***
libeconf
```

View File

@ -7,19 +7,11 @@ https://ftp.gnu.org/gnu/wget/wget-1.25.0.tar.gz
make -j$(nproc) && make install
```
``` sh *** config ***
wget https://curl.se/ca/cacert.pem -O /pkg/gnu/openssl/ssl/cacert.pem --no-check-certificate &&
csplit -f "/pkg/gnu/openssl/ssl/certs/cert-" -b "%03d.pem" "/pkg/gnu/openssl/ssl/cacert.pem" '/-----BEGIN CERTIFICATE-----/' '{*}' &>/dev/null &&
for cert in "/pkg/gnu/openssl/ssl/certs/"cert-*.pem; do
hash=$(openssl x509 -hash -noout -in "$cert")
mv "$cert" "/pkg/gnu/openssl/ssl/certs/$hash.$(find "/pkg/gnu/openssl/ssl/certs/" -maxdepth 1 -type f -name "$hash.*" | wc -l)"
done
```
``` cfg *** build deps ***
pcre2
zlib
libxcrypt
openssl
```
``` cfg *** run deps ***

View File

@ -0,0 +1,3 @@
linux-6.17 6.17.7
=

View File

@ -2,15 +2,22 @@
if [ -z "$1" ]; then
echo "Ошибка: Укажите путь установки как аргумент."
echo "Пример использования: $0 /pkg/gnu/aeropkg/"
echo "Пример использования: $0 /pkg/gnu/aeropkg/ /pkg"
exit 1
fi
if [ -z "$2" ]; then
echo "Ошибка: Укажите базовую директорию как аргумент."
echo "Пример использования: $0 /pkg/gnu/aeropkg/ /pkg"
exit 1
fi
INSTALL_PATH="$1"
BASE_PATH="$2"
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
echo "Установка в '$INSTALL_PATH'..."
AEROPKG_HOME=$INSTALL_PATH cargo install --path . --root "$INSTALL_PATH"
AEROPKHG_BASE=$BASE_PATH AEROPKG_HOME=$INSTALL_PATH cargo install --path . --root "$INSTALL_PATH"
patchelf --set-interpreter /pkg/gnu/glibc/lib/ld-linux-x86-64.so.2 /pkg/gnu/aeropkg/bin/pkg
if [ $? -ne 0 ]; then
@ -26,4 +33,5 @@ else
echo "Предупреждение: Директория '$ASSETS_DIR' не найдена. Пропуск копирования."
fi
echo "Проект успешно установлен в '$INSTALL_PATH'."
echo "Проект успешно установлен в '$INSTALL_PATH'"
echo "Базовая директория - '$BASE_PATH'"

View File

@ -1,15 +1,14 @@
use std::fs;
use std::path::{Path,PathBuf};
use crate::utils::parser;
pub fn delete(repo: &String, pkgname: &String) {
super::disable::disable(&repo, &pkgname).unwrap();
fs::remove_dir_all(PathBuf::from("/pkg").join(&repo).join(&pkgname)).unwrap();
fs::remove_dir_all(super::get_aeropkg_base().join(&repo).join(&pkgname)).unwrap();
}
pub fn delete_recursive(repo: &String, pkgname: &String) {
let pkg_dir = Path::new("/pkg").join(repo).join(pkgname);
let pkg_dir = super::get_aeropkg_base().join(repo).join(pkgname);
if pkg_dir.exists() {
let var_path = super::get_var_path();
@ -20,9 +19,9 @@ pub fn delete_recursive(repo: &String, pkgname: &String) {
for dependency in deps.lines() {
let dependency = dependency.trim();
if !dependency.is_empty() {
let pkg_dir = Path::new("/pkg").join(repo).join(dependency);
let pkg_dir = super::get_aeropkg_base().join(repo).join(dependency);
if pkg_dir.exists() {
if !parser::get_use_status(repo, dependency) {
if !parser::pkginfo::get_use_status(repo, dependency) {
delete(repo, &dependency.to_string())
}
}

View File

@ -1,11 +1,12 @@
use std::path::PathBuf;
use std::fs;
use crate::utils::deletecopy::deletecopy;
use crate::utils::fs::deletecopy::deletecopy;
use crate::utils::shell::*;
pub fn disable(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
let _ = fs::File::create(&source.join("disabled"));
let source = crate::commands::get_aeropkg_base().join(repo).join(pkgname);
if source.join("disabled").exists() { return Ok(()) };
let destination = source.parent()
.ok_or("Failed to get parent directory for path")?
.to_path_buf();
@ -31,5 +32,6 @@ pub fn disable(repo: &String, pkgname: &String) -> Result<(), String> {
mount_overlay(&destination)?;
shell_update()?;
fs::File::create(&source.join("disabled")).ok();
Ok(())
}

View File

@ -1,9 +1,8 @@
use std::path::PathBuf;
use std::fs;
use super::link::link;
pub fn enable(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
let source = crate::commands::get_aeropkg_base().join(repo).join(pkgname);
let _ = fs::remove_file(&source.join("disabled"));
link(&repo, &pkgname)?;

View File

@ -1,123 +1,27 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::path::Path;
use std::process;
use std::os::unix::fs::MetadataExt;
use crate::utils::parser;
use crate::commands::link::link;
use crate::utils::parser::{self, pkginfo};
use super::run::download::download;
use super::run::patch::patch;
use super::run::build::build;
use super::run::config::config;
pub fn install(repo: &String, pkgname: &String) -> Result<(), bool> {
let var_path = super::get_var_path();
let pkg_md_path = var_path.join(format!("{}/{}.md", &repo, &pkgname));
let builded_pkg_md_path = PathBuf::from("/pkg").join(&repo).join(&pkgname).join("build-script.md");
if !pkg_md_path.exists() {
upload_from_repo(&repo, &pkgname, &pkg_md_path)?;
}
if builded_pkg_md_path.exists() {
if fs::metadata(&builded_pkg_md_path).unwrap().ino() == fs::metadata(&pkg_md_path).unwrap().ino() {
println!("package {} already installed in {} repo", &pkgname, &repo);
return Ok(())
}
}
let pkg_md_path = crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
check_build_dependency(&repo, &pkg_md_path)?;
check_run_dependency(&pkg_md_path)?;
download(&pkgname, &pkg_md_path)?;
let src_dir = PathBuf::from("/pkg/src").join(&pkgname);
patch(&pkgname, &src_dir, &pkg_md_path)?;
build(&repo, &pkgname, &src_dir, &pkg_md_path)?;
link(&repo, &pkgname).expect("Failed link package");
hook(&repo, &pkgname);
config(&src_dir, &pkg_md_path)?;
let src_remove_flag = match std::env::var("src_remove") {
Ok(value) => value != "false",
Err(_) => true,
};
if src_remove_flag {
if let Err(e) = fs::remove_dir_all(&src_dir) {
eprintln!("Failed to remove source directory: {}", e);
return Err(false);
}
}
download(&repo, &pkgname)?;
patch(&repo, &pkgname)?;
build(&repo, &pkgname)?;
config(&repo, &pkgname)?;
println!("Package {} installed successfully from repo {}", pkgname, repo);
Ok(())
}
pub fn install_all(pkgname: &String) {
let repos = match parser::get_repo_list() {
Ok(repos) => repos,
Err(e) => {
eprintln!("Failed to get repository list: {}", e);
return;
}
};
let mut success = false;
for repo in repos {
println!("Trying to install {} from repo {}...", pkgname, repo);
match install(&repo, pkgname) {
Ok(()) => {
success = true;
break;
}
Err(no_repo_package) => {
if no_repo_package {
continue;
} else {
process::exit(1)
}
}
}
}
if !success {
eprintln!("Package {} not found in any available repository", pkgname);
}
}
fn upload_from_repo(repo: &String, pkgname: &String, pkg_md_path: &Path) -> Result<(), bool> {
match parser::get_repo_addr(repo) {
Ok(repo_addr) => {
let rsync_command = format!(
"rsync --include='{}.md' --exclude='*' {} {}",
pkgname,
repo_addr,
pkg_md_path.to_str().unwrap()
);
let rsync_output = Command::new("sh")
.arg("-c")
.arg(rsync_command)
.output()
.expect("Failed to execute rsync");
if !rsync_output.status.success() {
eprintln!("broken repo: {}", repo);
return Err(false);
}
if !pkg_md_path.exists() {
eprintln!("not found {} in {} repo", pkgname, repo);
return Err(true);
}
Ok(())
}
Err(e) => {
eprintln!("Repository {} not found: {}", repo, e);
return Err(true);
}
}
}
fn check_build_dependency(repo: &String, pkg_md_path: &Path) -> Result<(), bool> {
let deps = match parser::get_build_deps(&pkg_md_path) {
Ok(deps) => deps,
@ -126,7 +30,7 @@ fn check_build_dependency(repo: &String, pkg_md_path: &Path) -> Result<(), bool>
for dependency in deps.lines() {
if !dependency.trim().is_empty() {
if !Path::new("/pkg").join(repo).join(dependency).exists() {
if !super::get_aeropkg_base().join(repo).join(dependency).exists() {
match install(repo, &dependency.to_string()) {
Ok(()) => {}
Err(_) => {process::exit(1) }
@ -144,290 +48,23 @@ fn check_run_dependency(pkg_md_path: &Path) -> Result<(), bool> {
Err(_) => { return Ok(()) }
};
let repo_list = match parser::get_repo_list() {
Ok(repos) => repos,
Err(e) => {
eprintln!("Failed to get repository list: {}", e);
return Err(false)
}
};
for dependency in deps.split_whitespace() {
let repo_list = parser::repoinfo::get_repo_list();
for pkgname in deps.split_whitespace() {
let mut found = false;
for repo_name in &repo_list {
let path = format!("/pkg/{}/{}/", repo_name, dependency);
if Path::new(&path).exists() {
let path = super::get_aeropkg_base().join(repo_name).join(pkgname);
if path.exists() {
found = true;
break;
}
}
if !found {
install_all(&dependency.to_string());
install(&pkginfo::get_priority_repo(pkgname.to_string()), &pkgname.to_string());
}
}
Ok(())
}
fn download(pkgname: &String, pkg_md_path: &Path) -> Result<(), bool> {
let url = match parser::get_url(pkg_md_path) {
Ok(url) => url,
Err(e) => {
eprintln!("Failed to parse URL: {}", e);
return Err(false);
}
};
let src = PathBuf::from("/pkg/src").join(pkgname);
if src.exists() {
let src_url = fs::read_to_string(src.join("aeropkg.download-url")).unwrap_or("".to_string());
if url == src_url {
return Ok(())
} else {
println!("url:\n{}\nend url", url);
println!("src url:\n{}\nend src url", src_url);
fs::remove_dir_all(&src).unwrap();
}
}
if let Err(e) = fs::create_dir_all(&src) {
eprintln!("Failed to create directory {}: {}", &src.display(), e);
return Err(false);
}
if !url.ends_with(".git") {
let compress_flag = if url.ends_with(".bz2") {
"--bzip2"
} else if url.ends_with(".xz") {
"--xz"
} else if url.ends_with(".lz") {
"--lzip"
} else if url.ends_with(".lzma") {
"--lzma"
} else if url.ends_with(".lzo") {
"--lzop"
} else if url.ends_with(".zst") {
"--zstd"
} else if url.ends_with(".gz") {
"--gzip"
} else {
eprintln!("Unsupported compression format for URL: {}", url);
return Err(false);
};
let wget_output = Command::new("wget")
.arg("-O-")
.arg("-q")
.arg("--show-progress")
.arg(&url)
.stdout(Stdio::piped())
.spawn();
let tar_input = match wget_output {
Ok(child) => child.stdout.unwrap(),
Err(e) => {
eprintln!("Failed to execute wget: {}", e);
return Err(false);
}
};
let tar_status = Command::new("tar")
.arg("-x")
.arg(compress_flag)
.arg("-C")
.arg(&src)
.stdin(tar_input)
.status();
if tar_status.is_err() || !tar_status.unwrap().success() {
eprintln!("Failed to extract archive from URL: {}", url);
return Err(false);
}
let entries = fs::read_dir(&src).unwrap();
let dirs: Vec<_> = entries
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().map_or(false, |ft| ft.is_dir()))
.collect();
if dirs.len() == 1 {
let single_dir = dirs[0].path();
for entry in fs::read_dir(&single_dir).unwrap() {
let entry = entry.unwrap();
let dest = src.join(entry.file_name());
fs::rename(entry.path(), dest).unwrap();
}
fs::remove_dir(single_dir).unwrap();
}
} else {
let git_status = Command::new("git")
.arg("clone")
.arg(&url)
.arg(&src)
.status();
if git_status.is_err() || !git_status.unwrap().success() {
eprintln!("Failed to clone git repository from URL: {}", url);
return Err(false);
}
}
fs::write(src.join("aeropkg.download-url"), &url).unwrap();
Ok(())
}
fn patch(
pkgname: &String,
src_dir: &Path,
pkg_md_path: &Path,
) -> Result<(), bool> {
let patch_script = parser::get_patch_script(pkg_md_path).unwrap_or("".to_string());
if src_dir.join("aeropkg.applied-patch").exists() {
let src_patch = fs::read_to_string(src_dir.join("aeropkg.applied-patch")).unwrap_or("".to_string());
if patch_script == src_patch {
return Ok(())
} else {
println!("patch:\n{}\nend patch", patch_script);
println!("src patch:\n{}\nend src patch", src_patch);
fs::remove_dir_all(src_dir).unwrap();
download(&pkgname, &pkg_md_path)?;
}
}
if patch_script == "" { return Ok(()) }
let output = Command::new("zsh")
.arg("-c")
.arg(&patch_script)
.current_dir(src_dir)
.output();
if let Err(e) = output {
eprintln!("Failed to execute patch script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("Script failed with error: {}", stderr);
return Err(false);
}
fs::write(src_dir.join("aeropkg.applied-patch"), &patch_script).unwrap();
Ok(())
}
fn build(
repo: &String,
pkgname: &String,
src_dir: &Path,
pkg_md_path: &Path,
) -> Result<(), bool> {
let build_script = match parser::get_build_script(pkg_md_path) {
Ok(script) => script,
Err(error) => {
eprintln!("Failed to parse build script: {}", error);
return Err(false);
}
};
if src_dir.join("aeropkg.applied-build").exists() {
let src_build = fs::read_to_string(src_dir.join("aeropkg.applied-build")).unwrap_or("".to_string());
if build_script == src_build {
return Ok(())
} else {
println!("build:\n{}\nend build", build_script);
println!("src build:\n{}\nend src build", src_build);
fs::remove_dir_all(src_dir).unwrap();
download(&pkgname, &pkg_md_path)?;
patch(&pkgname, &src_dir, &pkg_md_path)?;
}
}
let output = Command::new("zsh")
.arg("-c")
.arg(&build_script)
.current_dir(src_dir)
.output();
if let Err(e) = output {
eprintln!("Failed to execute build script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("Script failed with error:\n{}", stderr);
std::process::exit(1);
}
let dest_dir = PathBuf::from("/pkg").join(repo).join(pkgname);
if let Err(e) = fs::create_dir_all(&dest_dir) {
eprintln!("Failed to create destination directory: {}", e);
return Err(false);
}
let dest_path = dest_dir.join("build-script.md");
fs::remove_file(&dest_path).expect("");
if let Err(e) = fs::hard_link(pkg_md_path, &dest_path) {
eprintln!("Failed to copy build script to destination: {}", e);
return Err(false);
}
fs::write(src_dir.join("aeropkg.build-script"), &build_script).unwrap();
Ok(())
}
fn config(
src_dir: &Path,
pkg_md_path: &Path
) -> Result<(), bool> {
let config_script = match parser::get_config_script(pkg_md_path) {
Ok(script) => script,
Err(_) => { return Ok(()) }
};
let output = Command::new("zsh")
.arg("-c")
.arg(&config_script)
.current_dir(src_dir)
.output();
if let Err(e) = output {
eprintln!("Failed to execute config script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("Script failed with error: {}", stderr);
return Err(false);
}
Ok(())
}
fn hook(repo: &String, pkgname: &String) {
let pkg_dir = PathBuf::from("/pkg").join(&repo).join(&pkgname);
let lib_dir = &pkg_dir.join("lib");
let lib64_dir = &pkg_dir.join("lib64");
let lib32_dir = &pkg_dir.join("lib32");
crate::utils::mv::mv(&lib64_dir, &lib_dir).unwrap();
crate::utils::mv::mv(&lib32_dir, &lib_dir).unwrap();
fs::remove_dir_all(&lib64_dir).unwrap();
fs::remove_dir_all(&lib32_dir).unwrap();
}

View File

@ -1,10 +1,9 @@
use std::path::PathBuf;
use crate::utils::hardcopy::hardcopy_handler;
use crate::utils::fs::hardcopy::hardcopy_handler;
use crate::utils::shell::*;
pub fn link(repo: &String, pkgname: &String) -> Result<(), String> {
let source = PathBuf::from("/pkg").join(repo).join(pkgname);
if source.join("disabled").exists() { return Ok(()) }
let source = crate::commands::get_aeropkg_base().join(repo).join(pkgname);
if source.join("disabled").exists() { println!("Can't link disabled package: {}", &pkgname); return Ok(()) }
let destination = source.parent()
.ok_or("Failed to get parent directory for path")?
@ -12,6 +11,7 @@ pub fn link(repo: &String, pkgname: &String) -> Result<(), String> {
let dirs_to_copy = vec![
("bin"),
("sbin"),
("lib"),
("libexec"),
("include"),

View File

@ -4,6 +4,7 @@ pub mod link;
pub mod disable;
pub mod enable;
pub mod trim;
pub mod run;
use std::path::PathBuf;
@ -16,3 +17,10 @@ pub fn get_etc_path() -> PathBuf {
PathBuf::from(env!("AEROPKG_HOME")).join("etc")
}
pub fn get_aeropkg_home() -> PathBuf {
PathBuf::from(env!("AEROPKG_HOME"))
}
pub fn get_aeropkg_base() -> PathBuf {
PathBuf::from(env!("AEROPKG_BASE"))
}

93
src/commands/run/build.rs Normal file
View File

@ -0,0 +1,93 @@
use std::fs;
use std::os::unix::fs::MetadataExt;
use crate::utils::parser::{self, env::get_install_env};
use crate::utils::shell::{run_install_script,run_install_script_hook};
use crate::commands::link::link;
use super::download::download;
use super::patch::patch;
pub fn build(
repo: &String,
pkgname: &String,
) -> Result<(), bool> {
let src_dir = &crate::commands::get_aeropkg_base().join("src").join(&pkgname);
let pkg_md_path = &crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
let builded_pkg_md_path = crate::commands::get_aeropkg_base().join(&repo).join(&pkgname).join("build-script.md");
if builded_pkg_md_path.exists() {
if fs::metadata(&builded_pkg_md_path).unwrap().ino() == fs::metadata(&pkg_md_path).unwrap().ino() {
println!("package {} already installed in {} repo", &pkgname, &repo);
return Ok(())
}
}
let build_script = match parser::get_build_script(pkg_md_path) {
Ok(script) => script,
Err(error) => {
eprintln!("Failed to parse build script: {}", error);
return Err(false);
}
};
if src_dir.join("aeropkg.applied-build").exists() {
let src_build = fs::read_to_string(src_dir.join("aeropkg.applied-build")).unwrap_or("".to_string());
if build_script == src_build {
return Ok(())
} else {
println!("build:\n{}\nend build", build_script);
println!("src build:\n{}\nend src build", src_build);
fs::remove_dir_all(src_dir).unwrap();
download(&repo, &pkgname)?;
patch(&repo, &pkgname)?;
}
}
let full_env = get_install_env(repo, pkgname, pkg_md_path, "build");
run_install_script(&build_script, src_dir, &full_env)?;
run_install_script_hook(repo, "build", &full_env)?;
let dest_dir = crate::commands::get_aeropkg_base().join(repo).join(pkgname);
if let Err(e) = fs::create_dir_all(&dest_dir) {
eprintln!("Failed to create destination directory: {}", e);
return Err(false);
}
let dest_path = dest_dir.join("build-script.md");
fs::remove_file(&dest_path).ok();
if let Err(e) = fs::hard_link(pkg_md_path, &dest_path) {
eprintln!("Failed to copy build script to destination: {}", e);
return Err(false);
}
fs::write(src_dir.join("aeropkg.build-script"), &build_script).unwrap();
let link_flag = full_env.get("disable").map_or(true, |v| v != "true");
if !link_flag {
let _ = fs::File::create(crate::commands::get_aeropkg_base().join(repo).join(pkgname).join("disabled"));
}
link(&repo, &pkgname).expect("Failed link package");
let save_source_flag = full_env.get("save_source").map_or(false, |v| v == "true");
if !save_source_flag {
if let Err(e) = fs::remove_dir_all(&src_dir) {
eprintln!("Failed to remove source directory: {}", e);
return Err(false);
}
}
hook(&repo, &pkgname);
Ok(())
}
fn hook(repo: &String, pkgname: &String) {
let pkg_dir = crate::commands::get_aeropkg_base().join(&repo).join(&pkgname);
let lib_dir = &pkg_dir.join("lib");
let lib64_dir = &pkg_dir.join("lib64");
let lib32_dir = &pkg_dir.join("lib32");
crate::utils::fs::mv::mv(&lib64_dir, &lib_dir).ok();
crate::utils::fs::mv::mv(&lib32_dir, &lib_dir).ok();
fs::remove_dir_all(&lib64_dir).ok();
fs::remove_dir_all(&lib32_dir).ok();
}

View File

@ -0,0 +1,25 @@
use crate::commands::link::link;
use crate::utils::parser::{self, env::get_install_env};
use crate::utils::shell::{run_install_script,run_install_script_hook};
pub fn config(
repo: &String,
pkgname: &String
) -> Result<(), bool> {
let src_dir = &crate::commands::get_aeropkg_base().join("src").join(&pkgname);
let pkg_md_path = &crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
let config_script = match parser::get_config_script(pkg_md_path) {
Ok(script) => script,
Err(_) => { return Ok(()) }
};
let full_env = get_install_env(repo, pkgname, pkg_md_path, "config");
run_install_script(&config_script, src_dir, &full_env)?;
run_install_script_hook(repo, "config", &full_env)?;
link(&repo, &pkgname).expect("Failed link package");
Ok(())
}

View File

@ -0,0 +1,15 @@
use crate::utils::parser::{self, env::get_custom_env};
use crate::utils::shell::run_install_script;
pub fn custom(scriptname: &String, repo: &String, pkgname: &String) -> Result<(), bool> {
let src_dir = &crate::commands::get_aeropkg_base().join("src").join(&pkgname);
let pkg_md_path = &crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
let config_script = parser::get_custom_script(pkg_md_path, scriptname);
let env = get_custom_env(repo, pkgname, pkg_md_path);
run_install_script(&config_script, src_dir, &env)?;
Ok(())
}

View File

@ -0,0 +1,149 @@
use std::fs;
use std::path::Path;
use std::process::{Command, Stdio};
use crate::utils::parser::{self, env::get_install_env};
use crate::utils::shell::run_install_script_hook;
pub fn download(repo: &String, pkgname: &String) -> Result<(), bool> {
let pkg_md_path = &crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
if !pkg_md_path.exists() { upload_from_repo(&repo, &pkgname, &pkg_md_path)? }
let full_env = get_install_env(repo, pkgname, pkg_md_path, "download");
let url = match parser::pkginfo::get_url(pkg_md_path) {
Ok(url) => url,
Err(e) => {
eprintln!("Failed to parse URL: {}", e);
return Err(false);
}
};
let src = crate::commands::get_aeropkg_base().join("src").join(pkgname);
if src.exists() {
let src_url = fs::read_to_string(src.join("aeropkg.download-url")).unwrap_or("".to_string());
if url == src_url {
return Ok(())
} else {
fs::remove_dir_all(&src).unwrap();
}
}
if !url.ends_with(".git") {
if let Err(e) = fs::create_dir_all(&src) {
eprintln!("Failed to create directory {}: {}", &src.display(), e);
return Err(false);
}
let compress_flag = if url.ends_with(".bz2") {
"--bzip2"
} else if url.ends_with(".xz") {
"--xz"
} else if url.ends_with(".lz") {
"--lzip"
} else if url.ends_with(".lzma") {
"--lzma"
} else if url.ends_with(".lzo") {
"--lzop"
} else if url.ends_with(".zst") {
"--zstd"
} else if url.ends_with(".gz") {
"--gzip"
} else {
eprintln!("Unsupported compression format for URL: {}", url);
return Err(false);
};
let wget_output = Command::new("wget")
.arg("-O-")
.arg("-q")
.arg("--show-progress")
.arg(&url)
.envs(&full_env)
.stdout(Stdio::piped())
.spawn();
let tar_input = match wget_output {
Ok(child) => child.stdout.unwrap(),
Err(e) => {
eprintln!("Failed to execute wget: {}", e);
return Err(false);
}
};
let tar_status = Command::new("tar")
.arg("-x")
.arg(compress_flag)
.arg("-C")
.arg(&src)
.envs(&full_env)
.stdin(tar_input)
.status();
if tar_status.is_err() || !tar_status.unwrap().success() {
eprintln!("Failed to extract archive from URL: {}", url);
return Err(false);
}
let entries = fs::read_dir(&src).unwrap();
let dirs: Vec<_> = entries
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().map_or(false, |ft| ft.is_dir()))
.collect();
if dirs.len() == 1 {
let single_dir = dirs[0].path();
for entry in fs::read_dir(&single_dir).unwrap() {
let entry = entry.unwrap();
let dest = src.join(entry.file_name());
fs::rename(entry.path(), dest).unwrap();
}
fs::remove_dir(single_dir).unwrap();
}
} else {
let git_status = Command::new("git")
.arg("clone")
.arg(&url)
.arg(&src)
.status();
if git_status.is_err() || !git_status.unwrap().success() {
eprintln!("Failed to clone git repository from URL: {}", url);
return Err(false);
}
}
fs::write(src.join("aeropkg.download-url"), &url).unwrap();
run_install_script_hook(repo, "download", &full_env)?;
Ok(())
}
fn upload_from_repo(repo: &String, pkgname: &String, pkg_md_path: &Path) -> Result<(), bool> {
let repo_addr = parser::repoinfo::get_repo_addr(repo);
let rsync_command = format!(
"rsync --include='{}.md' --exclude='*' {} {}",
pkgname,
repo_addr,
pkg_md_path.to_str().unwrap()
);
let rsync_output = Command::new("sh")
.arg("-c")
.arg(rsync_command)
.output()
.expect("Failed to execute rsync");
if !rsync_output.status.success() {
eprintln!("broken repo: {}", repo);
return Err(false);
}
if !pkg_md_path.exists() {
eprintln!("not found {} in {} repo", pkgname, repo);
return Err(true);
}
Ok(())
}

6
src/commands/run/mod.rs Normal file
View File

@ -0,0 +1,6 @@
pub mod run;
pub mod download;
pub mod patch;
pub mod build;
pub mod config;
pub mod custom;

32
src/commands/run/patch.rs Normal file
View File

@ -0,0 +1,32 @@
use std::fs;
use crate::utils::parser::{self, env::get_install_env};
use crate::utils::shell::{run_install_script,run_install_script_hook};
use super::download::download;
pub fn patch(
repo: &String,
pkgname: &String,
) -> Result<(), bool> {
let src_dir = &crate::commands::get_aeropkg_base().join("src").join(&pkgname);
let pkg_md_path = &crate::commands::get_var_path().join(format!("{}/{}.md", &repo, &pkgname));
let patch_script = &parser::get_patch_script(pkg_md_path).unwrap_or("".to_string());
if src_dir.join("aeropkg.applied-patch").exists() {
let src_patch = &fs::read_to_string(src_dir.join("aeropkg.applied-patch")).unwrap_or("".to_string());
if patch_script == src_patch {
return Ok(())
} else {
fs::remove_dir_all(src_dir).unwrap();
download(&repo, &pkgname)?;
}
}
if patch_script == "" { return Ok(()) }
let full_env = get_install_env(repo, pkgname, pkg_md_path, "patch");
run_install_script(patch_script, src_dir, &full_env)?;
run_install_script_hook(repo, "patch", &full_env)?;
fs::write(src_dir.join("aeropkg.applied-patch"), &patch_script).unwrap();
Ok(())
}

14
src/commands/run/run.rs Normal file
View File

@ -0,0 +1,14 @@
use super::download::download;
use super::patch::patch;
use super::build::build;
use super::config::config;
pub fn run(script: &String, repo: &String, pkgname: &String) -> Result<(), bool> {
if script == "download" { download(&repo, &pkgname)?; return Ok(()) }
if script == "patch" { patch(&repo, &pkgname)?; return Ok(()) }
if script == "build" { build(&repo, &pkgname)?; return Ok(()) }
if script == "config" { config(&repo, &pkgname)?; return Ok(()) }
Ok(())
}

View File

@ -1,4 +1,4 @@
use std::path::{Path, PathBuf};
use std::path::Path;
use std::fs;
use std::os::unix::fs::MetadataExt;
use glob::Pattern;
@ -42,7 +42,7 @@ pub fn trim_handler(repo: &String, trim_date: i64) {
let rules = crate::utils::parser::get_trim_rules(&cfg_path).unwrap();
let pkg_dir = PathBuf::from("/pkg").join(repo);
let pkg_dir = super::get_aeropkg_base().join(repo);
trim(repo, &pkg_dir, trim_date, &rules).unwrap();
}
@ -62,7 +62,7 @@ fn rules_check(repo: &String, path: &Path, rules: &String) -> bool {
let wildcard_string = if rule.starts_with('/') {
rule.to_string()
} else {
format!("/pkg/{}/{}", repo, rule)
super::get_aeropkg_base().join(repo).join(rule).to_string_lossy().into_owned()
};
let path_str = path.to_str().unwrap_or("");

View File

@ -1,179 +1,33 @@
use clap::Command;
mod commands;
mod utils;
use chrono::NaiveDateTime;
fn main() {
let matches = clap::Command::new("pkg")
for (key, value) in utils::parser::env::get_global_env() {
unsafe { std::env::set_var(key, value) }
}
let matches = Command::new("pkg")
.version("1.0")
.about("Package manager for installing software from repositories")
.subcommand(
clap::Command::new("install")
.about("Install a package")
.arg(
clap::Arg::new("args")
.help("Repository and package name (optional repo)")
.required(true)
.num_args(1..=2)
.value_names(["repo", "pkgname"]),
),
)
.subcommand(
clap::Command::new("delete")
.about("Delete a package from a repository")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true),
)
.arg(
clap::Arg::new("recursive")
.short('R')
.long("recursive")
.help("Recursively delete the package")
.action(clap::ArgAction::SetTrue),
),
)
.subcommand(
clap::Command::new("link")
.about("Create package links and mount overlays")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.subcommand(
clap::Command::new("trim")
.about("Remove unused files within a specified period")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("date")
.help("DD.MM.YYYY")
.required(true)
.index(2),
)
.arg(
clap::Arg::new("time")
.help("HH:mm:ss")
.required(false)
.index(3),
),
)
.subcommand(
clap::Command::new("disable")
.about("Disable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.subcommand(
clap::Command::new("enable")
.about("Enable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
),
)
.subcommand(utils::command_handler::install::command())
.subcommand(utils::command_handler::run::command())
.subcommand(utils::command_handler::link::command())
.subcommand(utils::command_handler::delete::command())
.subcommand(utils::command_handler::disable::command())
.subcommand(utils::command_handler::enable::command())
.subcommand(utils::command_handler::trim::command())
.get_matches();
if let Some(install_matches) = matches.subcommand_matches("install") {
let args: Vec<&String> = install_matches.get_many::<String>("args").unwrap().collect();
match args.len() {
1 => {
let pkgname = args[0];
commands::install::install_all(pkgname);
}
2 => {
let repo = args[0];
let pkgname = args[1];
if let Err(_) = commands::install::install(repo, pkgname) { std::process::exit(1) }
}
_ => unreachable!(),
}
} else if let Some(delete_matches) = matches.subcommand_matches("delete") {
let repo = delete_matches.get_one::<String>("repo").unwrap();
let pkgname = delete_matches.get_one::<String>("pkgname").unwrap();
let recursive = delete_matches.get_flag("recursive");
if recursive {
commands::delete::delete_recursive(repo, pkgname);
} else {
commands::delete::delete(repo, pkgname);
}
} else if let Some(link_matches) = matches.subcommand_matches("link") {
let repo = link_matches.get_one::<String>("repo").unwrap();
let pkgname = link_matches.get_one::<String>("pkgname").unwrap();
match commands::link::link(&repo, &pkgname) {
Ok(_) => println!("link completed successfully."),
Err(e) => eprintln!("Error during link: {}", e),
}
} else if let Some(trim_matches) = matches.subcommand_matches("trim") {
let repo = trim_matches.get_one::<String>("repo").unwrap();
let date = trim_matches.get_one::<String>("date").unwrap();
let time = trim_matches.get_one::<String>("time").map(|s| s.as_str()).unwrap_or("00:00:00");
let datetime_str = format!("{} {}", date, time);
let datetime = NaiveDateTime::parse_from_str(&datetime_str, "%d.%m.%Y %H:%M:%S")
.expect("Invalid date or time format. Expected format: DD.MM.YYYY HH:mm:ss");
let trim_date = datetime.and_utc().timestamp();
commands::trim::trim_handler(&repo, trim_date);
} else if let Some(disable_matches) = matches.subcommand_matches("disable") {
let repo = disable_matches.get_one::<String>("repo").unwrap();
let pkgname = disable_matches.get_one::<String>("pkgname").unwrap();
match commands::disable::disable(&repo, &pkgname) {
Ok(_) => println!("disable completed successfully."),
Err(e) => eprintln!("Error during disable: {}", e),
}
} else if let Some(enable_matches) = matches.subcommand_matches("enable") {
let repo = enable_matches.get_one::<String>("repo").unwrap();
let pkgname = enable_matches.get_one::<String>("pkgname").unwrap();
match commands::enable::enable(&repo, &pkgname) {
Ok(_) => println!("enable completed successfully."),
Err(e) => eprintln!("Error during enable: {}", e),
}
} else {
println!("No command provided. Use `pkg --help` for usage information.");
match matches.subcommand() {
Some(("install", sub_m)) => utils::command_handler::install::install(sub_m),
Some(("run", sub_m)) => utils::command_handler::run::run(sub_m),
Some(("link", sub_m)) => utils::command_handler::link::link(sub_m),
Some(("delete", sub_m)) => utils::command_handler::delete::delete(sub_m),
Some(("disable", sub_m)) => utils::command_handler::disable::disable(sub_m),
Some(("enable", sub_m)) => utils::command_handler::enable::enable(sub_m),
Some(("trim", sub_m)) => utils::command_handler::trim::trim(sub_m),
_ => { println!("No command provided. Use `pkg --help` for usage information.") }
}
}

View File

@ -0,0 +1,36 @@
use crate::commands;
pub fn delete(matches: &clap::ArgMatches) {
let repo = matches.get_one::<String>("repo").unwrap();
let pkgname = matches.get_one::<String>("pkgname").unwrap();
let recursive = matches.get_flag("recursive");
if recursive {
commands::delete::delete_recursive(repo, pkgname);
} else {
commands::delete::delete(repo, pkgname);
}
}
pub fn command() -> clap::Command {
clap::Command::new("delete")
.about("Delete a package from a repository")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true),
)
.arg(
clap::Arg::new("recursive")
.short('R')
.long("recursive")
.help("Recursively delete the package")
.action(clap::ArgAction::SetTrue),
)
}

View File

@ -0,0 +1,29 @@
use crate::commands;
pub fn disable(matches: &clap::ArgMatches) {
let repo = matches.get_one::<String>("repo").unwrap();
let pkgname = matches.get_one::<String>("pkgname").unwrap();
match commands::disable::disable(&repo, &pkgname) {
Ok(_) => println!("disable completed successfully."),
Err(e) => eprintln!("Error during disable: {}", e),
}
}
pub fn command() -> clap::Command {
clap::Command::new("disable")
.about("Disable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
)
}

View File

@ -0,0 +1,29 @@
use crate::commands;
pub fn enable(matches: &clap::ArgMatches) {
let repo = matches.get_one::<String>("repo").unwrap();
let pkgname = matches.get_one::<String>("pkgname").unwrap();
match commands::enable::enable(&repo, &pkgname) {
Ok(_) => println!("enable completed successfully."),
Err(e) => eprintln!("Error during enable: {}", e),
}
}
pub fn command() -> clap::Command {
clap::Command::new("enable")
.about("Enable package")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
)
}

View File

@ -0,0 +1,33 @@
use crate::commands;
use crate::utils::parser::pkginfo;
pub fn install(matches: &clap::ArgMatches) {
let args: Vec<&String> = matches.get_many::<String>("args").unwrap().collect();
match args.len() {
1 => {
let pkgname = args[0];
commands::install::install(&pkginfo::get_priority_repo(pkgname.to_string()), &pkgname.to_string());
}
2 => {
let repo = args[0];
let pkgname = args[1];
if let Err(_) = commands::install::install(repo, pkgname) { std::process::exit(1) }
}
_ => unreachable!(),
}
}
pub fn command() -> clap::Command {
clap::Command::new("install")
.about("Install a package")
.arg(
clap::Arg::new("args")
.help("Repository and package name (optional repo)")
.required(true)
.num_args(1..=2)
.value_names(["repo", "pkgname"]),
)
}

View File

@ -0,0 +1,29 @@
use crate::commands;
pub fn link(matches: &clap::ArgMatches) {
let repo = matches.get_one::<String>("repo").unwrap();
let pkgname = matches.get_one::<String>("pkgname").unwrap();
match commands::link::link(&repo, &pkgname) {
Ok(_) => println!("link completed successfully."),
Err(e) => eprintln!("Error during link: {}", e),
}
}
pub fn command() -> clap::Command {
clap::Command::new("link")
.about("Create package links and mount overlays")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("pkgname")
.help("Package name")
.required(true)
.index(2),
)
}

View File

@ -0,0 +1,7 @@
pub mod install;
pub mod delete;
pub mod disable;
pub mod enable;
pub mod link;
pub mod run;
pub mod trim;

View File

@ -0,0 +1,43 @@
use crate::commands;
use crate::utils::parser::pkginfo;
// pkg run <script> [repo] <pkgname>
// pkg run kernel_change gnu linux-6.17
// pkg run kernel_change linux-6.17
//
// pkg md_path должен брать из уже установленного ?
// Или пусть проверяет
pub fn run(matches: &clap::ArgMatches) {
let args: Vec<&String> = matches.get_many::<String>("args").unwrap().collect();
match args.len() {
2 => {
let pkgname = args[1];
let scriptname = args[0];
commands::run::custom::custom(scriptname , &pkginfo::get_priority_repo(pkgname.to_string()), &pkgname.to_string());
}
3 => {
let scriptname = args[0];
let repo = args[0];
let pkgname = args[1];
commands::run::custom::custom(scriptname , &repo, &pkgname.to_string());
if let Err(_) = commands::install::install(repo, pkgname) { std::process::exit(1) }
}
_ => unreachable!(),
}
}
pub fn command() -> clap::Command {
clap::Command::new("run")
.about("Install a package")
.arg(
clap::Arg::new("args")
.help("Repository and package name (optional repo)")
.required(true)
.num_args(1..=2)
.value_names(["repo", "pkgname"]),
)
}

View File

@ -0,0 +1,40 @@
use crate::commands;
use chrono::NaiveDateTime;
pub fn trim(matches: &clap::ArgMatches) {
let repo = matches.get_one::<String>("repo").unwrap();
let date = matches.get_one::<String>("date").unwrap();
let time = matches.get_one::<String>("time").map(|s| s.as_str()).unwrap_or("00:00:00");
let datetime_str = format!("{} {}", date, time);
let datetime = NaiveDateTime::parse_from_str(&datetime_str, "%d.%m.%Y %H:%M:%S")
.expect("Invalid date or time format. Expected format: DD.MM.YYYY HH:mm:ss");
let trim_date = datetime.and_utc().timestamp();
commands::trim::trim_handler(&repo, trim_date);
}
pub fn command() -> clap::Command {
clap::Command::new("trim")
.about("Remove unused files within a specified period")
.arg(
clap::Arg::new("repo")
.help("Repository name")
.required(true)
.index(1),
)
.arg(
clap::Arg::new("date")
.help("DD.MM.YYYY")
.required(true)
.index(2),
)
.arg(
clap::Arg::new("time")
.help("HH:mm:ss")
.required(false)
.index(3),
)
}

View File

View File

@ -11,7 +11,7 @@ pub fn deletecopy(source: &Path, destination: &Path) -> io::Result<()> {
if metadata.file_type().is_file() {
if let Ok(dest_metadata) = fs::metadata(destination) {
if dest_metadata.ino() == metadata.ino() {
fs::remove_file(destination)?;
fs::remove_file(destination).ok();
return Ok(());
}
}
@ -26,7 +26,7 @@ pub fn deletecopy(source: &Path, destination: &Path) -> io::Result<()> {
}
})?;
} else if metadata.file_type().is_symlink() {
fs::remove_file(destination)?;
fs::remove_file(destination).ok();
}
Ok(())

View File

@ -26,7 +26,7 @@ fn hardcopy(
}
}
match crate::utils::parser::get_index_conflict(destination) {
match crate::utils::parser::pkginfo::get_index_conflict(destination) {
Ok(index_source) => {
if index_source == source {
fs::remove_file(destination)?;
@ -172,7 +172,7 @@ fn append_index_block(source: &Path, destination: &Path) -> io::Result<()> {
let source_components: Vec<_> = source.iter().collect();
let base_system_folder = source_components[4].to_str().unwrap();
let index_conflict_path = Path::new("/pkg/gnu/aeropkg/etc/index-conflict.md");
let index_conflict_path = crate::commands::get_etc_path().join("index-conflict.md");
let content = fs::read_to_string(&index_conflict_path)?;
let start_marker = format!("``` cfg *** {} ***", base_system_folder);

3
src/utils/fs/mod.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod hardcopy;
pub mod deletecopy;
pub mod mv;

View File

View File

@ -1,5 +1,5 @@
pub mod hardcopy;
pub mod fs;
pub mod parser;
pub mod deletecopy;
pub mod shell;
pub mod mv;
pub mod command_handler;

View File

@ -1,240 +0,0 @@
use std::fs;
use std::io::{self, BufRead};
use std::path::{Path,PathBuf};
pub fn get_name<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line.split_whitespace().next().unwrap_or("").to_string())
}
pub fn get_version<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let first_line = read_first_line(file_path)?;
Ok(first_line
.split_whitespace()
.nth(1)
.unwrap_or("")
.to_string())
}
pub fn get_url<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let lines = read_lines(&file_path)?;
let third_line = lines.get(2)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "File has less than 3 lines"))?
.trim()
.to_string();
let name = get_name(&file_path)?;
let version = get_version(&file_path)?;
let url = third_line
.replace("{name}", &name)
.replace("{version}", &version);
Ok(url)
}
pub fn get_build_deps<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` cfg *** build deps ***", "```")
}
pub fn get_run_deps<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` cfg *** run deps ***", "```")
}
pub fn get_build_script<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh *** build ***", "```")
}
pub fn get_config_script<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh *** config ***", "```")
}
pub fn get_patch_script<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` sh *** config ***", "```")
}
pub fn get_trim_rules<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
extract_block(file_path, "``` cfg *** Trim rules ***", "```")
}
pub fn get_repo_list() -> io::Result<Vec<String>> {
let file_path = crate::commands::get_etc_path().join("aeropkg.md");
let block = extract_block(file_path, "``` cfg *** Repository list and priority ***", "```")?;
let mut repo_list = Vec::new();
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if let Some(repo) = parts.first() {
repo_list.push(repo.to_string());
}
}
}
Ok(repo_list)
}
pub fn get_repo_addr(repo: &str) -> io::Result<String> {
let file_path = crate::commands::get_etc_path().join("aeropkg.md");
let block = extract_block(file_path, "``` cfg *** Repository list and priority ***", "```")?;
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == repo {
return Ok(parts[1].to_string());
}
}
}
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Repository '{}' not found in the repository list", repo),
))
}
pub fn get_use_status(repo: &str, dependency: &str) -> bool {
let base_path = format!("/pkg/{}", repo);
let path = Path::new(&base_path);
if !path.exists() || !path.is_dir() {
return false;
}
let mut match_count = 0;
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let subdir_path = entry.path();
if subdir_path.is_dir() {
let script_path = subdir_path.join("build-script.md");
if let Ok(lines) = read_lines(&script_path) {
for line in lines.iter() {
if line.trim() == format!("={}", dependency) {
match_count += 1;
if match_count > 1 {
return true
}
break;
}
}
}
}
}
}
return false
}
pub fn get_index_conflict<P: AsRef<Path>>(destination: P) -> io::Result<PathBuf> {
let destination_path = destination.as_ref();
let parts: Vec<&str> = destination_path
.iter()
.map(|component| component.to_str().unwrap_or(""))
.collect();
if parts.len() < 4 {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid destination path format",
));
}
let system_struct_folder = parts[3]; // bin, sbin, include, lib, share
let etc = Path::new("/pkg/gnu/aeropkg/etc");
let cfg_path = etc.join("index-conflict.md");
let start_marker = format!("``` cfg *** {} ***", system_struct_folder);
let end_marker = "```";
let block_content = extract_block(&cfg_path, &start_marker, end_marker)?;
let destination_str = destination_path.to_str().ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidInput, "Failed to convert destination path to string")
})?;
for line in block_content.lines() {
let trimmed_line = line.trim();
if trimmed_line.starts_with(destination_str) {
let mut words = trimmed_line.split_whitespace();
if let Some(_) = words.next() {
if let Some(path_source) = words.next() {
return Ok(PathBuf::from(path_source));
}
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
format!(
"No matching line found for destination: {}",
destination_path.display()
),
))
}
fn read_first_line<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
if let Some(line) = reader.lines().next() {
line
} else {
Err(io::Error::new(io::ErrorKind::InvalidData, "File is empty"))
}
}
fn read_lines<P: AsRef<Path>>(file_path: P) -> io::Result<Vec<String>> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
reader.lines().collect()
}
fn extract_block<P: AsRef<Path>>(
file_path: P,
start_marker: &str,
end_marker: &str,
) -> io::Result<String> {
let lines = read_lines(file_path)?;
let mut block_started = false;
let mut result = Vec::new();
for line in lines {
if line.trim() == start_marker {
block_started = true;
continue;
}
if block_started {
if line.trim() == end_marker {
break;
}
result.push(line.trim().to_string());
}
}
if result.is_empty() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Block between '{}' and '{}' not found", start_marker, end_marker),
))
} else {
Ok(result.join("\n"))
}
}

86
src/utils/parser/env.rs Normal file
View File

@ -0,0 +1,86 @@
use std::io;
use std::env;
use std::path::Path;
use std::collections::HashMap;
use super::*;
pub fn get_install_env(repo: &String, pkgname: &String, pkg_md_path: &Path, stage: &str) -> HashMap<String, String> {
let mut full_env = HashMap::new();
full_env.insert("repo".to_string(), repo.clone());
full_env.insert("pkgname".to_string(), pkgname.clone());
if let Some(global_env) = get_global_env_string().ok() { full_env.extend(global_env) }
if let Some(pkg_env) = get_pkg_env(pkg_md_path).ok() { full_env.extend(pkg_env) }
if let Some(repo_env) = get_repo_env(repo).ok() { full_env.extend(repo_env) }
if let Some(stage_env) = get_stage_env(stage).ok() { full_env.extend(stage_env) }
if let Some(repo_stage_env) = get_repo_and_stage_env(repo, stage).ok() { full_env.extend(repo_stage_env) }
full_env.extend(env::vars().map(|(k, v)| (k, v)));
return full_env
}
pub fn get_custom_env(repo: &String, pkgname: &String, pkg_md_path: &Path) -> HashMap<String, String> {
let mut full_env = HashMap::new();
full_env.insert("repo".to_string(), repo.clone());
full_env.insert("pkgname".to_string(), pkgname.clone());
if let Some(global_env) = get_global_env_string().ok() { full_env.extend(global_env) }
if let Some(pkg_env) = get_pkg_env(pkg_md_path).ok() { full_env.extend(pkg_env) }
if let Some(repo_env) = get_repo_env(repo).ok() { full_env.extend(repo_env) }
full_env.extend(env::vars().map(|(k, v)| (k, v)));
return full_env
}
pub fn get_global_env() -> HashMap<String, String> {
let mut full_env = HashMap::new();
full_env.insert("repo".to_string(), repo.clone());
full_env.insert("pkgname".to_string(), pkgname.clone());
if let Some(global_env) = get_global_env_string().ok() { full_env.extend(global_env) }
return full_env
}
fn get_global_env_string() -> io::Result<HashMap<String, String>> {
let cfg_path = crate::commands::get_etc_path().join("aeropkg.md");
let content = extract_block(&cfg_path, &format!("``` env *** env ***"), "```")?;
Ok(parse_env_vars(&content))
}
fn parse_env_vars(content: &str) -> HashMap<String, String> {
content
.lines()
.filter_map(|line| {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
return None;
}
let parts: Vec<&str> = line.splitn(2, '=').collect();
if parts.len() != 2 {
return None;
}
Some((parts[0].to_string(), parts[1].to_string()))
})
.collect()
}
pub fn get_pkg_env(file_path: &Path) -> io::Result<HashMap<String, String>> {
let content = extract_block(file_path, "``` env *** env ***", "```")?;
Ok(parse_env_vars(&content))
}
pub fn get_repo_env(repo: &str) -> io::Result<HashMap<String, String>> {
let cfg_path = crate::commands::get_etc_path().join("aeropkg.md");
let content = extract_block(&cfg_path, &format!("``` env *** env {} ***", repo), "```")?;
Ok(parse_env_vars(&content))
}
pub fn get_stage_env(stage: &str) -> io::Result<HashMap<String, String>> {
let cfg_path = crate::commands::get_etc_path().join("aeropkg.md");
let content = extract_block(&cfg_path, &format!("``` env *** env {} ***", stage), "```")?;
Ok(parse_env_vars(&content))
}
pub fn get_repo_and_stage_env(repo: &str, stage: &str) -> io::Result<HashMap<String, String>> {
let cfg_path = crate::commands::get_etc_path().join("aeropkg.md");
let content = extract_block(&cfg_path, &format!("``` env *** env {} {} ***", repo, stage), "```")?;
Ok(parse_env_vars(&content))
}

77
src/utils/parser/mod.rs Normal file
View File

@ -0,0 +1,77 @@
pub mod env;
pub mod pkginfo;
pub mod repoinfo;
use std::fs;
use std::io::{self, BufRead};
use std::path::Path;
pub fn get_stage_hook(stage: &str) -> io::Result<String> {
let cfg_path = &crate::commands::get_etc_path().join("aeropkg.md");
extract_block(cfg_path, &format!("``` sh *** hook {} ***", &stage), "```")
}
pub fn get_repo_and_stage_hook(repo: &str, stage: &str) -> io::Result<String> {
let cfg_path = &crate::commands::get_etc_path().join("aeropkg.md");
extract_block(cfg_path, &format!("``` sh *** hook {} {} ***", &repo, &stage), "```")
}
pub fn get_trim_rules(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` cfg *** Trim rules ***", "```") }
pub fn get_build_deps(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` cfg *** build deps ***", "```") }
pub fn get_run_deps(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` cfg *** run deps ***", "```") }
pub fn get_build_script(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` sh *** build ***", "```") }
pub fn get_config_script(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` sh *** config ***", "```") }
pub fn get_patch_script(file_path: &Path) -> io::Result<String> { extract_block(file_path, "``` sh *** config ***", "```") }
pub fn get_custom_script(file_path: &Path, scriptname: &String) -> String { extract_block(file_path, &format!("``` sh *** {} ***", scriptname), "```").expect(&format!("Can't get custom script: {}", &scriptname)) }
fn extract_block(
file_path: &Path,
start_marker: &str,
end_marker: &str,
) -> io::Result<String> {
let lines = read_lines(file_path)?;
let mut block_started = false;
let mut result = Vec::new();
for line in lines {
if line.trim() == start_marker {
block_started = true;
continue;
}
if block_started {
if line.trim() == end_marker {
break;
}
result.push(line.trim().to_string());
}
}
if result.is_empty() {
Err(io::Error::new(
io::ErrorKind::InvalidData,
format!("Block between '{}' and '{}' not found", start_marker, end_marker),
))
} else {
Ok(result.join("\n"))
}
}
fn read_first_line<P: AsRef<Path>>(file_path: P) -> io::Result<String> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
if let Some(line) = reader.lines().next() {
line
} else {
Err(io::Error::new(io::ErrorKind::InvalidData, "File is empty"))
}
}
fn read_lines<P: AsRef<Path>>(file_path: P) -> io::Result<Vec<String>> {
let file = fs::File::open(file_path)?;
let reader = io::BufReader::new(file);
reader.lines().collect()
}

133
src/utils/parser/pkginfo.rs Normal file
View File

@ -0,0 +1,133 @@
use std::fs;
use std::io;
use std::path::{Path,PathBuf};
use super::*;
pub fn get_name<P: AsRef<Path>>(pkg_file_path: P) -> io::Result<String> {
let first_line = read_first_line(pkg_file_path)?;
Ok(first_line.split_whitespace().next().unwrap_or("").to_string())
}
pub fn get_version<P: AsRef<Path>>(pkg_file_path: P) -> io::Result<String> {
let first_line = read_first_line(pkg_file_path)?;
Ok(first_line
.split_whitespace()
.nth(1)
.unwrap_or("")
.to_string())
}
pub fn get_url(pkg_file_path: &Path) -> io::Result<String> {
let lines = read_lines(&pkg_file_path)?;
let third_line = lines.get(2)
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, "File has less than 3 lines"))?
.trim()
.to_string();
let name = get_name(&pkg_file_path)?;
let version = get_version(&pkg_file_path)?;
let url = third_line
.replace("{name}", &name)
.replace("{version}", &version);
Ok(url)
}
pub fn get_use_status(repo: &str, pkgname: &str) -> bool {
let base_path = crate::commands::get_aeropkg_base().join(repo);
let path = Path::new(&base_path);
if !path.exists() || !path.is_dir() {
return false;
}
let mut match_count = 0;
if let Ok(entries) = fs::read_dir(path) {
for entry in entries.flatten() {
let subdir_path = entry.path();
if subdir_path.is_dir() {
let script_path = subdir_path.join("build-script.md");
if let Ok(lines) = read_lines(&script_path) {
for line in lines.iter() {
if line.trim() == format!("={}", pkgname) {
match_count += 1;
if match_count > 1 {
return true
}
break;
}
}
}
}
}
}
return false
}
pub fn get_priority_repo(pkgname: String) -> String {
let repo_list = repoinfo::get_repo_list();
let var_path = crate::commands::get_var_path();
for repo in repo_list {
let pkg_path = var_path.join(&repo).join(format!("{}.md", &pkgname));
if pkg_path.exists() {
return repo;
}
}
panic!("Package {} not found in any available repository", pkgname);
}
pub fn get_index_conflict<P: AsRef<Path>>(pkg_file_path: P) -> io::Result<PathBuf> {
let pkg_file_path_path = pkg_file_path.as_ref();
let parts: Vec<&str> = pkg_file_path_path
.iter()
.map(|component| component.to_str().unwrap_or(""))
.collect();
if parts.len() < 4 {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"Invalid pkg_file_path path format",
));
}
let system_struct_folder = parts[3]; // bin, sbin, include, lib, share
let etc = crate::commands::get_etc_path();
let cfg_path = etc.join("index-conflict.md");
let start_marker = format!("``` cfg *** {} ***", system_struct_folder);
let end_marker = "```";
let block_content = extract_block(&cfg_path, &start_marker, end_marker)?;
let pkg_file_path_str = pkg_file_path_path.to_str().ok_or_else(|| {
io::Error::new(io::ErrorKind::InvalidInput, "Failed to convert pkg_file_path path to string")
})?;
for line in block_content.lines() {
let trimmed_line = line.trim();
if trimmed_line.starts_with(pkg_file_path_str) {
let mut words = trimmed_line.split_whitespace();
if let Some(_) = words.next() {
if let Some(path_source) = words.next() {
return Ok(PathBuf::from(path_source));
}
}
}
}
Err(io::Error::new(
io::ErrorKind::NotFound,
format!(
"No matching line found for pkg_file_path: {}",
pkg_file_path_path.display()
),
))
}

View File

@ -0,0 +1,38 @@
use super::*;
pub fn get_repo_addr(repo: &str) -> String {
let file_path = &crate::commands::get_etc_path().join("aeropkg.md");
let block = extract_block(file_path, "``` cfg *** Repository list and priority ***", "```").expect("Can't parse repo list block");
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if parts.len() >= 2 && parts[0] == repo {
return parts[1].to_string()
}
}
}
panic!("Repository '{}' not found in the repository list", repo);
}
pub fn get_repo_list() -> Vec<String> {
let file_path = &crate::commands::get_etc_path().join("aeropkg.md");
let block = extract_block(file_path, "``` cfg *** Repository list and priority ***", "```").expect("Can't parse repo list block");
let mut repo_list = Vec::new();
for line in block.lines() {
let trimmed_line = line.trim();
if !trimmed_line.is_empty() {
let parts: Vec<&str> = trimmed_line.split_whitespace().collect();
if let Some(repo) = parts.first() {
repo_list.push(repo.to_string());
}
}
}
repo_list
}

View File

@ -1,6 +1,9 @@
use std::path::Path;
use std::process::Command;
use std::env;
use std::collections::HashMap;
use crate::utils::parser;
pub fn mount_overlay(path_repo: &Path) -> Result<(), String> {
let lowerdirs = vec![
@ -71,3 +74,58 @@ pub fn shell_update() -> Result<(), String> {
Ok(())
}
pub fn run_install_script_hook(repo: &str, stage: &str, full_env: &HashMap<String, String>) -> Result<(), bool> {
let hook_script = match parser::get_repo_and_stage_hook(repo, stage) {
Ok(script) => Ok(script),
Err(_) => parser::get_stage_hook(stage),
};
let shell = env::var("SHELL").unwrap_or_else(|_| "/bin/sh".to_string());
if let Ok(script) = hook_script {
let output = Command::new(&shell)
.arg("-c")
.arg(&script)
.envs(full_env)
.output();
if let Err(e) = output {
eprintln!("Failed to execute hook script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
eprintln!("Hook script failed: {:?}", output);
return Err(false);
}
}
Ok(())
}
pub fn run_install_script(script: &str, work_dir: &Path, full_env: &HashMap<String, String>) -> Result<(), bool> {
let shell = env::var("SHELL").unwrap_or_else(|_| "/bin/sh".to_string());
let output = Command::new(&shell)
.arg("-c")
.arg(&script)
.current_dir(work_dir)
.envs(full_env)
.output();
if let Err(e) = output {
eprintln!("Failed to execute shell script: {}", e);
return Err(false);
}
let output = output.unwrap();
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
eprintln!("Failed to execute script:\n``` sh\n{}\n```\nError: {}", script, stderr);
return Err(false);
}
Ok(())
}