#!/bin/sh -ef # @DOCSTRING@ # shellcheck source=/dev/null # # This is the Carbs Packaging Toolchain written for Carbs Linux. # It was originally forked from the kiss package manager by # Dylan Araps. # # Currently maintained by Cem Keylan. version() { out "Carbs Packaging Tools, version @VERSION@" \ @LICENSE@ exit 0 } out() { # Print a message as is. printf '%s\n' "$@" } log() { # Print a message prettily. # # All messages are printed to stderr to allow the user to hide build # output which is the only thing printed to stdout. case $# in 1) printf '%b->%b %s\n' "$colory" "$colre" "$1" ;; 2) printf '%b->%b %b%s%b %s\n' "$colory" "$colre" "$colorb" "$1" "$colre" "$2" ;; 3) printf '%b%s%b %b%s%b %s\n' "$colory" "${3:-->}" "$colre" "$colorb" "$1" "$colre" "$2" ;; *) return 1 esac >&2 } warn() { # Print a warning message log "$1" "$2" "${3:-WARNING}" } outv() { # Call `out()` when CPT_VERBOSE is set. [ "$CPT_VERBOSE" = 1 ] || return 0 out "$@" } logv() { # Call `log()` when CPT_VERBOSE is set. [ "$CPT_VERBOSE" = 1 ] || return 0 log "$@" } warnv() { # Call `warn()` when CPT_VERBOSE is set. [ "$CPT_VERBOSE" = 1 ] || return 0 warn "$@" } execv() { # Redirect the output to /dev/null unless CPT_VERBOSE is set. if [ "$CPT_VERBOSE" = 1 ]; then "$@"; else "$@" >/dev/null 2>&1; fi } die() { # Print a message and exit with '1' (error). log "$1" "$2" "!>" exit 1 } colors_enabled() { case ${CPT_COLOR:=auto} in auto) [ -t 1 ] ;; 1|always) return 0 ;; 0|never) return 1 ;; *) die "Unknown color value: '$CPT_COLOR'" esac } trap_set() { # Function to set the trap value. case ${1:-cleanup} in cleanup) trap pkg_clean EXIT trap 'pkg_clean; exit 1' INT ;; handle-int) trap pkg_clean INT ;; block) trap '' INT ;; unset) trap - EXIT INT ;; esac } sepchar() ( # Seperate every character on the given string without resorting to external # processes. [ "$1" ] || return 0; str=$1; set -- while [ "$str" ]; do str_tmp=$str for i in $(_seq $(( ${#str} - 1 ))); do str_tmp=${str_tmp%?} done set -- "$@" "$str_tmp" str=${str#$str_tmp} done printf '%s\n' "$@" ) _re() { # Check that the string supplied in $2 conforms to the regular expression # of $1. printf %s "${2:?}" | grep -Eq "$1" } _seq() ( # Pure shell counter meant to be used in 'for' loops. i=0 buf='' while [ "$(( i += 1 ))" -le "$1" ]; do buf="$buf $i " done printf '%s' "$buf" ) _multiply_char() ( buf= for i in $(_seq "$2"); do buf="$buf$1" done out "$buf" ) _stat() ( _user=; eval set -- "$(ls -ld "$1")" id -u "${_user:=$3}" >/dev/null 2>&1 || _user=root printf '%s' "$_user" ) _readlinkf() ( # Public domain POSIX sh readlink function by Koichi Nakashima [ "${1:-}" ] || return 1 max_symlinks=40 CDPATH='' # to avoid changing to an unexpected directory target=$1 [ -e "${target%/}" ] || target=${1%"${1##*[!/]}"} # trim trailing slashes [ -d "${target:-/}" ] && target="$target/" cd -P . 2>/dev/null || return 1 while [ "$max_symlinks" -ge 0 ] && max_symlinks=$((max_symlinks - 1)); do if [ ! "$target" = "${target%/*}" ]; then case $target in /*) cd -P "${target%/*}/" 2>/dev/null || break ;; *) cd -P "./${target%/*}" 2>/dev/null || break ;; esac target=${target##*/} fi if [ ! -L "$target" ]; then target="${PWD%/}${target:+/}${target}" printf '%s\n' "${target:-/}" return 0 fi # `ls -dl` format: "%s %u %s %s %u %s %s -> %s\n", # , , , , # , , , # https://pubs.opengroup.org/onlinepubs/9699919799/utilities/ls.html link=$(ls -dl -- "$target" 2>/dev/null) || break target=${link#*" $target -> "} done return 1 ) _get_digest() { # Get digest algorithm from the given file. It looks for a header on the # file declaring the digest algorithm. Currently only BLAKE3 is supported. # If the file does not include a header, the function will assume that it is # using sha256 as a digest algorithm. If the given file doesn't exist it will # return 1. [ -r "$1" ] || return 1 read -r chk < "$1" case $chk in %BLAKE3) chk=b3sum ;; %*) die "Unknown digest algorithm: '${chk#\%}'" ;; *) chk=sh256 esac out "$chk" } # This is the public domain getoptions shell library. It also forms a usage # function. # URL: https://github.com/ko1nksm/getoptions (v2.5.0) # License: Creative Commons Zero v1.0 Universal # shellcheck disable=2016,2086 getoptions() { _error='' _on=1 _off='' _export='' _plus='' _mode='' _alt='' _rest='' _flags='' _nflags='' _opts='' _help='' _abbr='' _cmds='' _init=@empty IFS=' ' _0() { echo "$@"; } for i in 1 2 3 4 5; do eval "_$i() { _$((${i-}-1)) \" \$@\"; }"; done quote() { q="$2'" r='' while [ "$q" ]; do r="$r${q%%\'*}'\''" && q=${q#*\'}; done q="'${r%????}'" && q=${q#\'\'} && q=${q%\'\'} eval "$1=\${q:-\"''\"}" } code() { [ "${1#:}" = "$1" ] && c=3 || c=4 eval "[ ! \${$c:+x} ] || $2 \"\$$c\"" } kv() { eval "${2-}${1%%:*}=\${1#*:}"; } loop() { [ $# -gt 1 ] && [ "$2" != -- ]; } invoke() { eval '"_$@"'; } prehook() { invoke "$@"; } for i in setup flag param option disp msg; do eval "$i() { prehook $i \"\$@\"; }" done args() { on=$_on off=$_off export=$_export init=$_init _hasarg=$1 && shift while loop "$@" && shift; do case $1 in -?) [ "$_hasarg" ] && _opts="$_opts${1#-}" || _flags="$_flags${1#-}" ;; +?) _plus=1 _nflags="$_nflags${1#+}" ;; [!-+]*) kv "$1" esac done } defvar() { case $init in @none) : ;; @export) code "$1" _0 "export $1" ;; @empty) code "$1" _0 "${export:+export }$1=''" ;; @unset) code "$1" _0 "unset $1 ||:" "unset OPTARG ||:; ${1#:}" ;; *) case $init in @*) eval "init=\"=\${${init#@}}\""; esac case $init in [!=]*) _0 "$init"; return 0; esac quote init "${init#=}" code "$1" _0 "${export:+export }$1=$init" "OPTARG=$init; ${1#:}" esac } _setup() { [ "${1#-}" ] && _rest=$1 while loop "$@" && shift; do kv "$1" _; done } _flag() { args '' "$@"; defvar "$@"; } _param() { args 1 "$@"; defvar "$@"; } _option() { args 1 "$@"; defvar "$@"; } _disp() { args '' "$@"; } _msg() { args '' _ "$@"; } cmd() { _mode=@ _cmds="$_cmds${_cmds:+|}'$1'"; } "$@" cmd() { :; } _0 "${_rest:?}=''" _0 "$2() {" _1 'OPTIND=$(($#+1))' _1 'while OPTARG= && [ $# -gt 0 ]; do' [ "$_abbr" ] && getoptions_abbr "$@" args() { sw='' validate='' pattern='' counter='' on=$_on off=$_off export=$_export while loop "$@" && shift; do case $1 in --\{no-\}*) i=${1#--?no-?}; sw="$sw${sw:+|}'--$i'|'--no-$i'" ;; [-+]? | --*) sw="$sw${sw:+|}'$1'" ;; *) kv "$1" esac done quote on "$on" quote off "$off" } setup() { :; } _flag() { args "$@" [ "$counter" ] && on=1 off=-1 v="\$((\${$1:-0}+\$OPTARG))" || v='' _3 "$sw)" _4 '[ "${OPTARG:-}" ] && OPTARG=${OPTARG#*\=} && set "noarg" "$1" && break' _4 "eval '[ \${OPTARG+x} ] &&:' && OPTARG=$on || OPTARG=$off" valid "$1" "${v:-\$OPTARG}" _4 ';;' } _param() { args "$@" _3 "$sw)" _4 '[ $# -le 1 ] && set "required" "$1" && break' _4 'OPTARG=$2' valid "$1" '$OPTARG' _4 'shift ;;' } _option() { args "$@" _3 "$sw)" _4 'set -- "$1" "$@"' _4 '[ ${OPTARG+x} ] && {' _5 'case $1 in --no-*) set "noarg" "${1%%\=*}"; break; esac' _5 '[ "${OPTARG:-}" ] && { shift; OPTARG=$2; } ||' "OPTARG=$on" _4 "} || OPTARG=$off" valid "$1" '$OPTARG' _4 'shift ;;' } valid() { set -- "$validate" "$pattern" "$1" "$2" [ "$1" ] && _4 "$1 || { set -- ${1%% *}:\$? \"\$1\" $1; break; }" [ "$2" ] && { _4 "case \$OPTARG in $2) ;;" _5 '*) set "pattern:'"$2"'" "$1"; break' _4 "esac" } code "$3" _4 "${export:+export }$3=\"$4\"" "${3#:}" } _disp() { args "$@" _3 "$sw)" code "$1" _4 "echo \"\${$1}\"" "${1#:}" _4 'exit 0 ;;' } _msg() { :; } [ "$_alt" ] && _2 'case $1 in -[!-]?*) set -- "-$@"; esac' _2 'case $1 in' _wa() { _4 "eval 'set -- $1' \${1+'\"\$@\"'}"; } _op() { _3 "$1) OPTARG=\$1; shift" _wa '"${OPTARG%"${OPTARG#??}"}" '"$2"'"${OPTARG#??}"' _4 "$3" } _3 '--?*=*) OPTARG=$1; shift' _wa '"${OPTARG%%\=*}" "${OPTARG#*\=}"' _4 ';;' _3 '--no-*) unset OPTARG ;;' [ "$_alt" ] || { [ "$_opts" ] && _op "-[$_opts]?*" '' ';;' [ ! "$_flags" ] || _op "-[$_flags]?*" - 'OPTARG= ;;' } [ "$_plus" ] && { [ "$_nflags" ] && _op "+[$_nflags]?*" + 'unset OPTARG ;;' _3 '+*) unset OPTARG ;;' } _2 'esac' _2 'case $1 in' "$@" rest() { _4 'while [ $# -gt 0 ]; do' _5 "$_rest=\"\${$_rest}" '\"\${$(($OPTIND-$#))}\""' _5 'shift' _4 'done' _4 'break ;;' } _3 '--)' [ "$_mode" = @ ] || _4 'shift' rest _3 "[-${_plus:++}]?*)" case $_mode in [=#]) rest ;; *) _4 'set "unknown" "$1"; break ;;'; esac _3 '*)' case $_mode in @) _4 "case \$1 in ${_cmds:-*}) ;;" _5 '*) set "notcmd" "$1"; break' _4 'esac' rest ;; [+#]) rest ;; *) _4 "$_rest=\"\${$_rest}" '\"\${$(($OPTIND-$#))}\""' esac _2 'esac' _2 'shift' _1 'done' _1 '[ $# -eq 0 ] && { OPTIND=1; unset OPTARG; return 0; }' _1 'case $1 in' _2 'unknown) set "Unrecognized option: $2" "$@" ;;' _2 'noarg) set "Does not allow an argument: $2" "$@" ;;' _2 'required) set "Requires an argument: $2" "$@" ;;' _2 'pattern:*) set "Does not match the pattern (${1#*:}): $2" "$@" ;;' _2 'notcmd) set "Not a command: $2" "$@" ;;' _2 '*) set "Validation error ($1): $2" "$@"' _1 'esac' [ "$_error" ] && _1 "$_error" '"$@" >&2 || exit $?' _1 'echo "$1" >&2' _1 'exit 1' _0 '}' [ ! "$_help" ] || eval "shift 2; getoptions_help $1 $_help" ${3+'"$@"'} } # URL: https://github.com/ko1nksm/getoptions (v2.5.0) # License: Creative Commons Zero v1.0 Universal getoptions_help() { _width='30,12' _plus='' _leading=' ' pad() { p=$2; while [ ${#p} -lt "$3" ]; do p="$p "; done; eval "$1=\$p"; } kv() { eval "${2-}${1%%:*}=\${1#*:}"; } sw() { pad sw "$sw${sw:+, }" "$1"; sw="$sw$2"; } args() { _type=$1 var=${2%% *} sw='' label='' hidden='' && shift 2 while [ $# -gt 0 ] && i=$1 && shift && [ "$i" != -- ]; do case $i in --*) sw $((${_plus:+4}+4)) "$i" ;; -?) sw 0 "$i" ;; +?) [ ! "$_plus" ] || sw 4 "$i" ;; *) [ "$_type" = setup ] && kv "$i" _; kv "$i" esac done [ "$hidden" ] && return 0 || len=${_width%,*} [ "$label" ] || case $_type in setup | msg) label='' len=0 ;; flag | disp) label="$sw " ;; param) label="$sw $var " ;; option) label="${sw}[=$var] " esac [ "$_type" = cmd ] && label=${label:-$var } len=${_width#*,} pad label "${label:+$_leading}$label" "$len" [ ${#label} -le "$len" ] && [ $# -gt 0 ] && label="$label$1" && shift echo "$label" pad label '' "$len" for i; do echo "$label$i"; done } for i in setup flag param option disp 'msg -' cmd; do eval "${i% *}() { args $i \"\$@\"; }" done echo "$2() {" echo "cat<<'GETOPTIONSHERE'" "$@" echo "GETOPTIONSHERE" echo "}" } # 2086: # The lack of quotes are intentional. We do this so `getoptions()` do not try # to parse the empty string. # 2120: # The library does not call this function with any positional arguments, but # that does not mean that other programs will not do it, so this can also be # safely ignored. # shellcheck disable=2086,2120 global_options() { # These are options that are supported by most utilities. If the optional # argument 'silent' is given, the usage will not print these options, but # the arguments will still be accepted. Alternatively, if the 'compact' # argument is given, the function only prints the '--help' and '--version' # flags. Sometimes it doesn't make sense to pollute the screen with options # that will be rarely ever used. _h=hidden:1 case $1 in silent) _c=$_h ;; compact) _c='' ;; *) msg -- '' 'Global Options:'; _c='' _h='' esac flag CPT_FORCE -f --force $_h init:@export -- "Force operation" flag CPT_PROMPT -y --no-prompt on:0 off:0 $_h init:@export -- "Do not prompt for confirmation" param CPT_ROOT --root $_h init:@export -- "Use an alternate root directory" param CPT_COLOR --color $_h init:@export -- "Colorize the output [default:auto]" disp :usage -h --help $_c -- "Show this help message" disp :version -v --version $_c -- "Print version information" flag CPT_VERBOSE --verbose $_h init:@export -- "Be more verbose" } contains() { # Check if a "string list" contains a word. case " $1 " in *" $2 "*) return 0; esac; return 1 } regesc() { # Escape special regular expression characters as # defined in POSIX BRE. '$.*[\^' printf '%s\n' "$1" | sed 's|\\|\\\\|g;s|\[|\\[|g;s|\$|\\$|g;s|\.|\\.|g;s|\*|\\*|g;s|\^|\\^|g' } pkg_download() { # $1: URL # $2: Output (Optional) set -- "$1" "${2:-${1##*/}}" case ${dl_prog##*/} in aria2c|axel) set -- -o "$2" "$1" ;; curl) set -- -fLo "$2" "$1" ;; wget|wget2) set -- -O "$2" "$1" ;; esac "$dl_prog" "$@" || { rm -f "$2" return 1 } } prompt() { # If a CPT_NOPROMPT variable is set, continue. # This can be useful for installation scripts and # bootstrapping. [ "$CPT_PROMPT" = 0 ] && return 0 # Ask the user for some input. [ "$1" ] && log "$1" log "Continue?: Press Enter to continue or Ctrl+C to abort here" # POSIX 'read' has none of the "nice" options like '-n', '-p' # etc etc. This is the most basic usage of 'read'. # '_' is used as 'dash' errors when no variable is given to 'read'. read -r _ || return 1 } as_root() { # Simple function to run a command as root using either 'sudo', # 'doas' or 'su'. Hurrah for choice. [ "$uid" = 0 ] || log "Using '${su:-su}' (to become ${user:=root})" # We are exporting package manager variables, so that we still have the # same repository paths / access to the same cache directories etc. set -- HOME="$HOME" \ USER="$user" \ XDG_CACHE_HOME="$XDG_CACHE_HOME" \ CPT_CACHE="$CPT_CACHE" \ CPT_CHOICE="$CPT_CHOICE" \ CPT_COMPRESS="$CPT_COMPRESS" \ CPT_DEBUG="$CPT_DEBUG" \ CPT_FETCH="$CPT_FETCH" \ CPT_FORCE="$CPT_FORCE" \ CPT_HOOK="$CPT_HOOK" \ CPT_KEEPLOG="$CPT_KEEPLOG" \ CPT_PATH="$CPT_PATH" \ CPT_PID="$CPT_PID" \ CPT_PROMPT="$CPT_PROMPT" \ CPT_REPO_CACHE="$CPT_REPO_CACHE" \ CPT_ROOT="$CPT_ROOT" \ CPT_TMPDIR="$CPT_TMPDIR" \ CPT_VERBOSE="$CPT_VERBOSE" \ "$@" case ${su##*/} in sls) warn "'sls' is deprecated, use 'ssu' instead." "$su" -u "$user" -- env "$@" ;; ssu|sudo|doas) "$su" -u "$user" -- env "$@" ;; su) su -c "env $* <&3" "$user" 3<&0 /dev/null || sha256 -r "$1" 2>/dev/null || openssl dgst -r -sha256 "$1" || die "No sha256 program could be run." ;} | while read -r hash _; do printf '%s %s\n' "$hash" "$1"; done } pkg_owner() { set +f [ "$3" ] || set -- "$1" "$2" "$sys_db"/*/manifest pkg_owner=$(grep "$@") pkg_owner=${pkg_owner%/*} pkg_owner=${pkg_owner##*/} set -f -- "$pkg_owner"; unset pkg_owner [ "$1" ] && printf '%s\n' "$1" } pkg_isbuilt() ( # Check if a package is built or not. read -r ver rel < "$(pkg_find "$1")/version" set +f for tarball in "$bin_dir/$1#$ver-$rel.tar."*; do [ -f "$tarball" ] && return 0 done return 1 ) pkg_lint() { # Check that each mandatory file in the package entry exists. log "$1" "Checking repository files" repo_dir=$(pkg_find "$1") cd "$repo_dir" || die "'$repo_dir' not accessible" [ -f sources ] || warnv "$1" "Sources file not found" [ -x build ] || die "$1" "Build file not found or not executable" [ -s version ] || die "$1" "Version file not found or empty" read -r _ release 2>/dev/null < version || die "Version file not found" [ "$release" ] || die "Release field not found in version file" # If we have a second argument, we are generating the checksums file, # so we don't need to check whether there is one. [ -z "$2" ] || return 0 # Check for a checksums file only if there is a sources file. [ -f sources ] || return 0 [ -f checksums ] || die "$pkg" "Checksums are missing" } pkg_find() { # Use a SEARCH_PATH variable so that we can get the sys_db into # the same variable as CPT_PATH. This makes it easier when we are # searching for executables instead of CPT_PATH. : "${SEARCH_PATH:=$CPT_PATH:$sys_db}" # Figure out which repository a package belongs to by # searching for directories matching the package name # in $CPT_PATH/*. query=$1 match=$2 type=$3 IFS=:; set -- # Word splitting is intentional here. # shellcheck disable=2086 for path in $SEARCH_PATH ; do set +f for path2 in "$path/"$query; do test "${type:--d}" "$path2" && set -f -- "$@" "$path2" done # Break early if we only want a single match for a slight increase # in speed. We don't need to search for the entire path. [ "$1" ] && [ -z "$match" ] && break done IFS=$old_ifs # A package may also not be found due to a repository not being # readable by the current user. Either way, we need to die here. [ "$1" ] || die "Package '$query' not in any repository" # Show all search results if called from 'cpt search', else # print only the first match. [ "$match" ] && printf '%s\n' "$@" || printf '%s\n' "$1" } pkg_list() { # List installed packages. As the format is files and # directories, this just involves a simple for loop and # file read. # Change directories to the database. This allows us to # avoid having to 'basename' each path. If this fails, # set '$1' to mimic a failed glob which indicates that # nothing is installed. cd "$sys_db" 2>/dev/null || set -- "$sys_db/"\* # Optional arguments can be passed to check for specific # packages. If no arguments are passed, list all. As we # loop over '$@', if there aren't any arguments we can # just set the directory contents to the argument list. [ "$1" ] || { set +f; set -f -- *; } # If the 'glob' above failed, exit early as there are no # packages installed. [ "$1" = "$sys_db/"\* ] && return 1 # Loop over each package and print its name and version. for pkg do [ -d "$pkg" ] || { log "$pkg" "not installed"; return 1; } read -r version 2>/dev/null < "$pkg/version" || version=null printf '%s\n' "$pkg $version" done } pkg_cache() { read -r version release 2>/dev/null < "$(pkg_find "$1")/version" # Initially assume that the package tarball is built with the CPT_COMPRESS # value. if [ -f "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" ]; then tar_file="$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" else set +f; set -f -- "$bin_dir/$1#$version-$release.tar."* tar_file=$1 fi [ -f "$tar_file" ] } pkg_sources() { # Download any remote package sources. The existence of local # files is also checked. repo_dir=$(pkg_find "$1") # Support packages without sources. Simply do nothing. [ -f "$repo_dir/sources" ] || return 0 log "$1" "Downloading sources" # Store each downloaded source in a directory named after the # package it belongs to. This avoid conflicts between two packages # having a source of the same name. mkdir -p "$src_dir/$1" && cd "$src_dir/$1" repo_dir=$(pkg_find "$1") while read -r src dest || [ "$src" ]; do # Remote repository or comment. if _re "$re_vcs_or_com" "$src"; then : # Remote source (cached). elif [ -f "${src##*/}" ]; then log "$1" "Found cached source '${src##*/}'" # Remote source. elif [ -z "${src##*://*}" ]; then log "$1" "Downloading $src" # We don't want our trap to exit immediately here if we receive an # interrupt, we handle this ourselves. trap_set handle-int # Download the source pkg_download "$src" || die "$1" "Failed to download $src" # Restore original trap value. trap_set cleanup # Local source. elif [ -f "$repo_dir/$src" ]; then log "$1" "Found local file '$src'" else die "$1" "No local file '$src'" fi done < "$repo_dir/sources" } pkg_extract() { # Extract all source archives to the build directory and copy over # any local repository files. repo_dir=$(pkg_find "$1") # Support packages without sources. Simply do nothing. [ -f "$repo_dir/sources" ] || return 0 log "$1" "Extracting sources" while read -r src dest || [ "$src" ]; do mkdir -p "$mak_dir/$1/$dest" && cd "$mak_dir/$1/$dest" case $src in # VCS Repository git+*|hg+*|fossil+*) backend=${src%%+*} url=${src##${backend}+} com=${url##*[@#]} com=${com#${url%[@#]*}} log "$1" "Cloning ${url%[#@]*}" "pkg_vcs_clone_$backend" "${url%[#@]*}" "$com" ;; # Comment or blank line. \#*|'') continue ;; # Only 'tar', 'cpio', and 'zip' archives are currently supported for # extraction. Other filetypes are simply copied to '$mak_dir' # which allows for manual extraction. *://*.tar|*://*.tar.??|*://*.tar.???|*://*.tar.????|*://*.tgz|\ *://*.txz|*://*.cpio|*://*.cpio.??|*://*.cpio.???|*://*.cpio.????) decompress "$src_dir/$1/${src##*/}" | pax -rs '|[^/]*|.|' ;; *://*.zip) unzip "$src_dir/$1/${src##*/}" || die "$1" "Couldn't extract ${src##*/}" ;; *) # Local file. if [ -f "$repo_dir/$src" ]; then cp -f "$repo_dir/$src" . # Remote file. elif [ -f "$src_dir/$1/${src##*/}" ]; then cp -f "$src_dir/$1/${src##*/}" . else die "$1" "Local file $src not found" fi ;; esac done < "$repo_dir/sources" } pkg_depends() { # Resolve all dependencies and generate an ordered list. # This does a depth-first search. The deepest dependencies are # listed first and then the parents in reverse order. contains "$deps" "$1" || { # Filter out non-explicit, aleady installed dependencies. # Only filter installed if called from 'pkg_build()'. [ "$pkg_build" ] && [ -z "$2" ] && (pkg_list "$1" >/dev/null) && return while read -r dep type || [ "$dep" ]; do # Skip comments and empty lines. [ "${dep##\#*}" ] || continue # Skip test dependencies unless $CPT_TEST is set to 1. # # Skip make dependencies on the 'tree' operation for child packages # or when the 'first-nomake' argument is given. case $type in test) [ "$CPT_TEST" = 1 ] || continue ;; make) [ "$2" = tree ] && [ -z "${3#first-nomake}" ] && continue esac # Recurse through the dependencies of the child packages. Forward # the 'tree' operation. if [ "$2" = tree ]; then pkg_depends "$dep" tree else pkg_depends "$dep" fi done 2>/dev/null < "$(pkg_find "$1")/depends" ||: # After child dependencies are added to the list, # add the package which depends on them. [ "$2" = explicit ] || [ "$3" ] || deps="$deps $1 " } } pkg_order() { # Order a list of packages based on dependence and # take into account pre-built tarballs if this is # to be called from 'cpt i'. order=; redro=; deps= for pkg do case $pkg in *.tar.*) deps="$deps $pkg " ;; *) pkg_depends "$pkg" raw esac done # Filter the list, only keeping explicit packages. # The purpose of these two loops is to order the # argument list based on dependence. for pkg in $deps; do ! contains "$*" "$pkg" || { order="$order $pkg " redro=" $pkg $redro" } done deps= } pkg_strip() { # Strip package binaries and libraries. This saves space on the # system as well as on the tarballs we ship for installation. # Package has stripping disabled, stop here. [ -f "$mak_dir/$pkg/nostrip" ] && return log "$1" "Stripping binaries and libraries" find "$pkg_dir/$1" -type f | while read -r file; do case $(od -A o -t c -N 18 "$file") in # REL (object files (.o), static libraries (.a)). *177*E*L*F*0000020\ 001\ *|*\!*\<*a*r*c*h*\>*) strip -g -R .comment -R .note "$file" ;; # EXEC (static binaries). # DYN (shared libraries, dynamic binaries). # Shared libraries keep global symbols in a separate ELF section # called '.dynsym'. '--strip-all/-s' does not touch the dynamic # symbol entries which makes this safe to do. *177*E*L*F*0000020\ 00[23]\ *) strip -s -R .comment -R .note "$file" ;; esac done 2>/dev/null ||: } pkg_fix_deps_fullpath() { # Return the canonical path of libraries extracted by readelf. while read -r dep _ rslv _; do [ "$dep" = "$1" ] || continue printf '%s\n' "$rslv" done } pkg_fix_deps() { # Dynamically look for missing runtime dependencies by checking each binary # and library with either 'ldd' or 'readelf'. This catches any extra # libraries and or dependencies pulled in by the package's build suite. log "$1" "Checking for missing dependencies (using ${elf_prog##*/})" # Go to the directory containing the built package to # simplify path building. cd "$pkg_dir/$1/$pkg_db/$1" # Make a copy of the depends file if it exists to have a reference to 'diff' # against. if [ -f depends ]; then dep_file=$(_tmp_cp depends) else dep_file=/dev/null fi # Generate a list of all installed manifests. pkg_name=$1 set +f; set -f -- "$sys_db/"*/manifest # Get a list of binaries and libraries, false files # will be found, however it's faster to get 'ldd' to check # them anyway than to filter them out. find "$pkg_dir/$pkg_name/" -type f 2>/dev/null | while read -r file; do # We call ldd regardless here, because we also use it to retrieve the # fullpath of a library when using readelf. Best use we have here is # saving it in a buffer, so we don't use the dynamic loader everytime we # need to reference it. lddbuf=$(ldd -- "$file" 2>/dev/null) ||: case ${elf_prog:-ldd} in *readelf) "$elf_prog" -d "$file" 2>/dev/null ;; *) pirntf '%s\n' "$lddbuf" ;; esac | while read -r dep; do # Skip lines containing 'ldd'. [ "${dep##*ldd*}" ] || continue case $dep in *NEEDED*\[*\] | *'=>'*) ;; *) continue; esac # readelf output: # 0x0000 (NEEDED) Shared library: [libc.so] dep=${dep##*\[} dep=${dep%%\]*} # Retrieve the fullpath of the library from our ldd buffer. case $elf_prog in *readelf) line=$(printf '%s\n' "$lddbuf" | pkg_fix_deps_fullpath "$line") esac # ldd output: # libc.so => /lib/ld-musl-x86_64.so.1 dep=${dep#* => } dep=${dep% *} # Figure out which package owns the file. Skip file if it is owned # by the current package. This also handles cases where a '*-bin' # package exists on the system, so the package manager doesn't think # that the package we are building depends on the *-bin version of # itself, or any other renamed versions of the same software. pkg_owner -l "/${dep#/}\$" "$PWD/manifest" >/dev/null && continue pkg_owner -l "/${dep#/}\$" "$@" ||: done ||: done >> depends # Remove duplicate entries from the new depends file. # This removes duplicate lines looking *only* at the # first column. sort -uk1,1 -o depends depends 2>/dev/null ||: # Display a diff of the new dependencies against the old ones. execv diff -U 3 "$dep_file" depends 2>/dev/null ||: # Remove the depends file if it is empty. [ -s depends ] || rm -f depends } pkg_manifest() ( # Generate the package's manifest file. This is a list of each file # and directory inside the package. The file is used when uninstalling # packages, checking for package conflicts and for general debugging. log "$1" "Generating manifest" # This function runs as a sub-shell to avoid having to 'cd' back to the # prior directory before being able to continue. cd "${2:-$pkg_dir}/$1" # find: Print all files and directories and append '/' to directories. # sort: Sort the output in *reverse*. Directories appear *after* their # contents. # sed: Remove the first character in each line (./dir -> /dir) and # remove all lines which only contain '.'. find . -type d -exec printf '%s/\n' {} + -o -print | sort -r | sed '/^\.$/d;/^\.\/$/d;ss.ss' > "${2:-$pkg_dir}/$1/$pkg_db/$1/manifest" ) pkg_etcsums() ( # This function runs as a sub-shell to avoid having to 'cd' back to the # prior directory before being able to continue. cd "$pkg_dir/$1/etc" 2>/dev/null || return 0; cd .. # Generate checksums for each configuration file in the package's # /etc/ directory for use in "smart" handling of these files. log "$1" "Generating etcsums" # Try to get the digest algorithm from the installed etcsums file. This # makes sure that old packages continue to have the same digest algorithm # and not a bunch of '.new' files are installed. It's not foolproof at all, # but at least it keeps the /etc directory as clean as possible. digest=$(_get_digest "$sys_db/$1/etcsums") || digest=b3sum case $digest in b3sum) out "%BLAKE3"; esac > "$pkg_dir/$1/$pkg_db/$1/etcsums" find etc -type f | while read -r file; do "$digest" "$file" done >> "$pkg_dir/$1/$pkg_db/$1/etcsums" ) pkg_tar() { # Create a tarball from the built package's files. # This tarball also contains the package's database entry. log "$1" "Creating tarball" # Read the version information to name the package. read -r version release < "$(pkg_find "$1")/version" # Create a tarball from the contents of the built package. cd "$pkg_dir/$1" pax -w . | compress > "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" log "$1" "Successfully created tarball" run_hook post-package "$1" "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" } pkg_build() { # Build packages and turn them into packaged tarballs. This function # also checks checksums, downloads sources and ensure all dependencies # are installed. pkg_build=1 log "Resolving dependencies" for pkg do contains "$explicit" "$pkg" || { pkg_depends "$pkg" explicit # Mark packages passed on the command-line # separately from those detected as dependencies. explicit="$explicit $pkg " } done [ "$pkg_update" ] || explicit_build=$explicit # If an explicit package is a dependency of another explicit # package, remove it from the explicit list as it needs to be # installed as a dependency. # shellcheck disable=2086 for pkg do contains "$deps" "$pkg" && explicit=$(pop "$pkg" from $explicit) done # See [1] at top of script. # shellcheck disable=2046,2086 set -- $deps $explicit log "Building: $*" # Only ask for confirmation if more than one package needs to be built. [ $# -gt 1 ] || [ "$pkg_update" ] && prompt log "Checking for pre-built dependencies" for pkg do pkg_lint "$pkg"; done # Install any pre-built dependencies if they exist in the binary # directory and are up to date. for pkg do ! contains "$explicit_build" "$pkg" && pkg_cache "$pkg" && { log "$pkg" "Found pre-built binary, installing" (CPT_FORCE=1 cpt-install "$tar_file") # Remove the now installed package from the build list. # See [1] at top of script. # shellcheck disable=2046,2086 set -- $(pop "$pkg" from "$@") } done for pkg do pkg_sources "$pkg"; done pkg_verify "$@" # Finally build and create tarballs for all passed packages and # dependencies. for pkg do log "$pkg" "Building package ($((in = in + 1))/$#)" mkdir -p "$mak_dir/$pkg" pkg_extract "$pkg" repo_dir=$(pkg_find "$pkg") read -r build_version _ < "$repo_dir/version" # Copy the build file to the build directory to users to modify it # temporarily at runtime. cp -f "$repo_dir/build" "$mak_dir/$pkg/.build.cpt" # Install built packages to a directory under the package name # to avoid collisions with other packages. mkdir -p "$pkg_dir/$pkg/$pkg_db" # Move to the build directory. cd "$mak_dir/$pkg" log "$pkg" "Starting build" run_hook pre-build "$pkg" "$pkg_dir/$pkg" # Notify the user if the build script is changed during the pre-build # hook. diff -q "$repo_dir/build" .build.cpt || log "$pkg" "Executing the modified build file" # Call the build script, log the output to the terminal # and to a file. There's no PIPEFAIL in POSIX shelll so # we must resort to tricks like killing the script ourselves. { ./.build.cpt "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { log "$pkg" "Build failed" log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" run_hook build-fail "$pkg" "$pkg_dir/$pkg" pkg_clean kill 0 } } | tee "$log_dir/$pkg-$time-$pid" # Run the test script if it exists and the user wants to run tests. This # is turned off by default. [ -x "$repo_dir/test" ] && [ "$CPT_TEST" = 1 ] && { run_hook pre-test "$pkg" "$pkg_dir/$pkg" log "$pkg" "Running tests" "$repo_dir/test" "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { log "$pkg" "Test failed" log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" run_hook test-fail "$pkg" "$pkg_dir/$pkg" pkg_clean kill 0 } } | tee -a "$log_dir/$pkg-$time-$pid" # Delete the log file if the build succeeded to prevent # the directory from filling very quickly with useless logs. [ "$CPT_KEEPLOG" = 1 ] || rm -f "$log_dir/$pkg-$time-$pid" # Copy the repository files to the package directory. # This acts as the database entry. cp -LRf "$repo_dir" "$pkg_dir/$pkg/$pkg_db/" # Copy the modified build file to the package directory. pkg_build="$pkg_dir/$pkg/$pkg_db/$pkg/build" diff -U 3 "$pkg_build" .build.cpt > "$pkg_build.diff" && rm -f "$pkg_build.diff" # We don't want the package manager to track 'dir' pages of the info # directory. We don't want every single package to create their own dir # files either. rm -f "$pkg_dir/$pkg/usr/share/info/dir" # We never ever want this. Let's end the endless conflicts # and remove it. find "$pkg_dir/$pkg" -name charset.alias -exec rm -f {} + # Remove libtool's '*.la' library files. This removes cross-build # system conflicts that may arise. Build-systems change, libtool # is getting deprecated, we don't want a package that depends on # some package's '.la' files. find "$pkg_dir/$pkg" -name '*.la' -exec rm -f {} + log "$pkg" "Successfully built package" run_hook post-build "$pkg" "$pkg_dir/$pkg" # Create the manifest file early and make it empty. # This ensures that the manifest is added to the manifest. : > "$pkg_dir/$pkg/$pkg_db/$pkg/manifest" # If the package contains '/etc', add a file called # 'etcsums' to the manifest. See comment directly above. [ -d "$pkg_dir/$pkg/etc" ] && : > "$pkg_dir/$pkg/$pkg_db/$pkg/etcsums" pkg_strip "$pkg" pkg_manifest "$pkg" pkg_fix_deps "$pkg" pkg_manifest "$pkg" pkg_etcsums "$pkg" pkg_tar "$pkg" # Install only dependencies of passed packages. # Skip this check if this is a package update. contains "$explicit" "$pkg" && [ -z "$pkg_update" ] && continue log "$pkg" "Needed as a dependency or has an update, installing" (CPT_FORCE=1 cpt-install "$pkg") done # End here as this was a system update and all packages have been installed. [ "$pkg_update" ] && return log "Successfully built package(s)" # Turn the explicit packages into a 'list'. # See [1] at top of script. # shellcheck disable=2046,2086 set -- $explicit # Only ask for confirmation if more than one package needs to be installed. [ $# -gt 1 ] && prompt "Install built packages? [$*]" && { cpt-install "$@" return } log "Run 'cpt i $*' to install the package(s)" } pkg_checksums() { # Generate checksums for packages. repo_dir=$(pkg_find "$1") [ -f "$repo_dir/sources" ] || return 0 case ${2:-b3sum} in b3sum) out "%BLAKE3"; esac while read -r src _ || [ "$src" ]; do # Skip checksums if it's a comment, or a VCS repository. if _re "$re_vcs_or_com" "$src"; then continue # File is local to the package. elif [ -f "$repo_dir/$src" ]; then src_path=$repo_dir/${src%/*} # File is remote and was downloaded. elif [ -f "$src_dir/$1/${src##*/}" ]; then src_path=$src_dir/$1 # Die here if source for some reason, doesn't exist. else die "$1" "Couldn't find source '$src'" fi # An easy way to get 'b3sum' to print with the 'basename' # of files is to 'cd' to the file's directory beforehand. (cd "$src_path" && "${2:-b3sum}" "${src##*/}") || die "$1" "Failed to generate checksums" done < "$repo_dir/sources" } pkg_verify() { # Verify all package checksums. This is achieved by generating a new set of # checksums and then comparing those with the old set. vcmd="NR==FNR{a[\$1];next}/^git .*/{next}!((\$1)in a){exit 1}" for pkg; do repo_dir=$(pkg_find "$pkg") [ -f "$repo_dir/sources" ] || continue # Determine the type of digest algorithm from the checksums file to do # verification with. digest="$(_get_digest "$repo_dir/checksums")" pkg_checksums "$pkg" "$digest" | awk "$vcmd" - "$repo_dir/checksums" || { log "$pkg" "Checksum mismatch" # Instead of dying above, log it to the terminal. Also define a # variable so we *can* die after all checksum files have been # checked. mismatch="$mismatch$pkg " } done [ -z "$mismatch" ] || die "Checksum mismatch with: ${mismatch% }" } pkg_conflicts() { # Check to see if a package conflicts with another. log "$1" "Checking for package conflicts" # Filter the tarball's manifest and select only files # and any files they resolve to on the filesystem # (/bin/ls -> /usr/bin/ls). while read -r file; do case $file in */) continue; esac # Use $CPT_ROOT in filename so that we follow its symlinks. file=$CPT_ROOT/${file#/} # We will only follow the symlinks of the directories, so we reserve the # directory name in this 'dirname' value. If we cannot find it in the # system, we don't need to make this much more complex by trying so hard # to find it. Simply use the original directory name. dirname="$(_readlinkf "${file%/*}" 2>/dev/null)" || dirname="${file%/*}" # Combine the dirname and file values, and print them into the # temporary manifest to be parsed. printf '%s/%s\n' "${dirname#$CPT_ROOT}" "${file##*/}" done < "$tar_dir/$1/$pkg_db/$1/manifest" > "$CPT_TMPDIR/$pid/manifest" p_name=$1 # Generate a list of all installed package manifests # and remove the current package from the list. # shellcheck disable=2046,2086 set -- $(set +f; pop "$sys_db/$p_name/manifest" from "$sys_db"/*/manifest) [ -s "$CPT_TMPDIR/$pid/manifest" ] || return 0 # In rare cases where the system only has one package installed # and you are reinstalling that package, grep will try to read from # standard input if we continue here. # # Also, if we don't have any packages installed grep will give an # error. This will not cause the installation to fail, but we don't # need to check for conflicts if that's the case anyway. If we have # only zero packages or one package, just stop wasting time and continue # with the installation. [ "$1" ] && [ -f "$1" ] || return 0 # Store the list of found conflicts in a file as we will be using the # information multiple times. Storing it in the cache dir allows us # to be lazy as they'll be automatically removed on script end. sed '/\/$/d' "$@" | sort "$CPT_TMPDIR/$pid/manifest" - | uniq -d > "$CPT_TMPDIR/$pid/conflict" ||: # Enable alternatives automatically if it is safe to do so. # This checks to see that the package that is about to be installed # doesn't overwrite anything it shouldn't in '/var/db/cpt/installed'. "$grep" -q "/var/db/cpt/installed/" "$CPT_TMPDIR/$pid/conflict" || choice_auto=1 # Use 'grep' to list matching lines between the to # be installed package's manifest and the above filtered # list. if [ "$CPT_CHOICE" != 0 ] && [ "$choice_auto" = 1 ]; then # This is a novel way of offering an "alternatives" system. # It is entirely dynamic and all "choices" are created and # destroyed on the fly. # # When a conflict is found between two packages, the file # is moved to a directory called "choices" and its name # changed to store its parent package and its intended # location. # # The package's manifest is then updated to reflect this # new location. # # The 'cpt choices' command parses this directory and # offers you the CHOICE of *swapping* entries in this # directory for those on the filesystem. # # The choices command does the same thing we do here, # it rewrites manifests and moves files around to make # this work. # # Pretty nifty huh? while read -r con; do printf '%s\n' "Found conflict $con" # Create the "choices" directory inside of the tarball. # This directory will store the conflicting file. mkdir -p "$tar_dir/$p_name/${cho_dir:=var/db/cpt/choices}" # Construct the file name of the "db" entry of the # conflicting file. (pkg_name>usr>bin>ls) con_name=$(printf %s "$con" | sed 's|/|>|g') # Move the conflicting file to the choices directory # and name it according to the format above. mv -f "$tar_dir/$p_name/$con" \ "$tar_dir/$p_name/$cho_dir/$p_name$con_name" 2>/dev/null || { log "File must be in ${con%/*} and not a symlink to it" log "This usually occurs when a binary is installed to" log "/sbin instead of /usr/bin (example)" log "Before this package can be used as an alternative," log "this must be fixed in $p_name. Contact the maintainer" die "by checking 'git log' or by running 'cpt-maintainer'" } done < "$CPT_TMPDIR/$pid/conflict" # Rewrite the package's manifest to update its location # to its new spot (and name) in the choices directory. pkg_manifest "$p_name" "$tar_dir" 2>/dev/null elif [ -s "$CPT_TMPDIR/$pid/conflict" ]; then log "Package '$p_name' conflicts with another package" "" "!>" log "Run 'CPT_CHOICE=1 cpt i $p_name' to add conflicts" "" "!>" die "as alternatives." fi } pkg_swap() { # Swap between package alternatives. pkg_list "$1" >/dev/null alt=$(printf %s "$1$2" | sed 's|/|>|g') cd "$sys_db/../choices" [ -f "$alt" ] || [ -h "$alt" ] || die "Alternative '$1 $2' doesn't exist" if [ -f "$2" ]; then # Figure out which package owns the file we are going to swap for # another package's. # # Print the full path to the manifest file which contains # the match to our search. pkg_owns=$(pkg_owner -lFx "$2") || die "File '$2' exists on filesystem but isn't owned" log "Swapping '$2' from '$pkg_owns' to '$1'" # Convert the current owner to an alternative and rewrite # its manifest file to reflect this. We then resort this file # so no issues arise when removing packages. cp -Pf "$CPT_ROOT/$2" "$pkg_owns>${alt#*>}" sed "s#^$(regesc "$2")\$#${PWD#$CPT_ROOT}/$pkg_owns>${alt#*>}#" \ "../installed/$pkg_owns/manifest" | sort -r -o "../installed/$pkg_owns/manifest" fi # Convert the desired alternative to a real file and rewrite # the manifest file to reflect this. The reverse of above. mv -f "$alt" "$CPT_ROOT/$2" sed "s#^${PWD#$CPT_ROOT}/$(regesc "$alt")\$#$2#" "../installed/$1/manifest" | sort -r -o "../installed/$1/manifest" } pkg_etc() { [ -d "$tar_dir/$pkg_name/etc" ] || return 0 (cd "$tar_dir/$pkg_name" # Create all directories beforehand. find etc -type d | while read -r dir; do mkdir -p "$CPT_ROOT/$dir" done digest=$(_get_digest "$_etcsums") || digest=b3sum # Handle files in /etc/ based on a 3-way checksum check. find etc ! -type d | while read -r file; do { sum_new=$("$digest" "$file") sum_sys=$(cd "$CPT_ROOT/"; "$digest" "$file") sum_old=$("$grep" "$file$" "$_etcsums"); } 2>/dev/null ||: logv "$pkg_name" "Doing 3-way handshake for $file" outv "Previous: ${sum_old:-null}" outv "System: ${sum_sys:-null}" outv "New: ${sum_new:-null}" # Use a case statement to easily compare three strings at # the same time. Pretty nifty. case ${sum_old:-null}${sum_sys:-null}${sum_new} in # old = Y, sys = X, new = Y "${sum_new}${sum_sys}${sum_old}") logv "Skipping $file" continue ;; # old = X, sys = X, new = X # old = X, sys = Y, new = Y # old = X, sys = X, new = Y "${sum_old}${sum_old}${sum_old}"|\ "${sum_old:-null}${sum_sys}${sum_sys}"|\ "${sum_sys}${sum_old}"*) logv "Installing $file" new= ;; # All other cases. *) warn "$pkg_name" "saving /$file as /$file.new" "->" new=.new ;; esac cp -fPp "$file" "$CPT_ROOT/${file}${new}" chown root:root "$CPT_ROOT/${file}${new}" 2>/dev/null done) ||: } pkg_remove() { # Remove a package and all of its files. The '/etc' directory # is handled differently and configuration files are *not* # overwritten. pkg_list "$1" >/dev/null || return # Make sure that nothing depends on this package. [ "$CPT_FORCE" = 1 ] || { log "$1" "Checking for reverse dependencies" (cd "$sys_db"; set +f; grep -lFx -- "$1" */depends) && die "$1" "Can't remove package, others depend on it" } # Block being able to abort the script with 'Ctrl+C' during removal. # Removes all risk of the user aborting a package removal leaving # an incomplete package installed. trap_set block if [ -x "$sys_db/$1/pre-remove" ]; then log "$1" "Running pre-remove script" "$sys_db/$1/pre-remove" ||: fi # Create a temporary list of all directories, so we don't accidentally # remove anything from packages that create empty directories for a # purpose (such as baselayout). manifest_list="$(set +f; pop "$sys_db/$1/manifest" from "$sys_db/"*/manifest)" dirs="$(_tmp_name "directories")" # shellcheck disable=2086 [ "$manifest_list" ] && grep -h '/$' $manifest_list | sort -ur > "$dirs" run_hook pre-remove "$1" "$sys_db/$1" root while read -r file; do # The file is in '/etc' skip it. This prevents the package # manager from removing user edited configuration files. [ "${file##/etc/*}" ] || continue if [ -d "$CPT_ROOT/$file" ]; then "$grep" -Fxq "$file" "$dirs" 2>/dev/null && continue rmdir "$CPT_ROOT/$file" 2>/dev/null || continue else rm -f "$CPT_ROOT/$file" fi done < "$sys_db/$1/manifest" # Reset 'trap' to its original value. Removal is done so # we no longer need to block 'Ctrl+C'. trap_set cleanup run_hook post-remove "$1" "$CPT_ROOT/" root log "$1" "Removed successfully" } pkg_install() { # Install a built package tarball. # Install can also take the full path to a tarball. # We don't need to check the repository if this is the case. if [ -f "$1" ] && [ -z "${1%%*.tar*}" ] ; then tar_file=$1 pkg_name=${1##*/} pkg_name=${pkg_name%#*} else pkg_cache "$1" || die "package has not been built, run 'cpt b pkg'" pkg_name=$1 fi mkdir -p "$tar_dir/$pkg_name" cd "$tar_dir/$pkg_name" log "$pkg_name" "Extracting $tar_file" # Extract the tarball to catch any errors before installation begins. decompress "$tar_file" | pax -rpp [ -f "./$pkg_db/$pkg_name/manifest" ] || die "'${tar_file##*/}' is not a valid CPT package" # Ensure that the tarball's manifest is correct by checking that # each file and directory inside of it actually exists. [ "$CPT_FORCE" != 1 ] && log "$pkg_name" "Checking package manifest" && while read -r line; do # Skip symbolic links [ -h "./$line" ] || [ -e "./$line" ] || { log "File $line missing from tarball but mentioned in manifest" "" "!>" tarball_fail=1 } done < "$pkg_db/$pkg_name/manifest" [ "$tarball_fail" ] && { log "You can still install this package by setting CPT_FORCE variable" die "$pkg_name" "Missing files in manifest" } log "$pkg_name" "Checking that all dependencies are installed" # Make sure that all run-time dependencies are installed prior to # installing the package. [ -f "$pkg_db/$pkg_name/depends" ] && [ "$CPT_FORCE" != 1 ] && while read -r dep dep_type || [ "$dep" ]; do [ "${dep##\#*}" ] || continue [ "$dep_type" ] || pkg_list "$dep" >/dev/null || install_dep="$install_dep'$dep', " done < "$pkg_db/$pkg_name/depends" [ "$install_dep" ] && die "$1" "Package requires ${install_dep%, }" run_hook pre-install "$pkg_name" "$tar_dir/$pkg_name" root pkg_conflicts "$pkg_name" log "$pkg_name" "Installing package incrementally" # Block being able to abort the script with Ctrl+C during installation. # Removes all risk of the user aborting a package installation leaving # an incomplete package installed. trap_set block # If the package is already installed (and this is an upgrade) make a # backup of the manifest and etcsums files. _manifest=$(_tmp_cp "$sys_db/$pkg_name/manifest" 2>/dev/null) ||: _etcsums=$(_tmp_cp "$sys_db/$pkg_name/etcsums" 2>/dev/null) ||: # This is repeated multiple times. Better to make it a function. pkg_rsync() { rsync "--chown=$USER:$USER" --chmod=Du-s,Dg-s,Do-s \ -WhHKa --no-compress --exclude /etc "${1:---}" \ "$tar_dir/$pkg_name/" "$CPT_ROOT/" } # Install the package by using 'rsync' and overwrite any existing files # (excluding '/etc/'). pkg_rsync --info=progress2 pkg_etc # Remove any leftover files if this is an upgrade. "$grep" -vFxf "$sys_db/$pkg_name/manifest" "$_manifest" 2>/dev/null | while read -r file; do file=$CPT_ROOT/$file # Skip deleting some leftover files. case $file in /etc/*) continue; esac # Remove files. if [ -f "$file" ] && [ ! -L "$file" ]; then rm -f "$file" # Remove file symlinks. elif [ -h "$file" ] && [ ! -d "$file" ]; then unlink "$file" ||: # Skip directory symlinks. elif [ -h "$file" ] && [ -d "$file" ]; then : # Remove directories if empty. elif [ -d "$file" ]; then rmdir "$file" 2>/dev/null ||: fi done ||: log "$pkg_name" "Verifying installation" { pkg_rsync; pkg_rsync; } ||: # Reset 'trap' to its original value. Installation is done so # we no longer need to block 'Ctrl+C'. trap_set cleanup if [ -x "$sys_db/$pkg_name/post-install" ]; then log "$pkg_name" "Running post-install script" "$sys_db/$pkg_name/post-install" ||: fi run_hook post-install "$pkg_name" "$sys_db/$pkg_name" root log "$pkg_name" "Installed successfully" } pkg_repository_update() { # Function to update the given package repository. cd "$1" repo_type=$(pkg_vcs_info) repo_root=${repo_type#$PWD:} repo_type=${repo_type##*:} repo_root=${repo_root%:*} contains "$repos" "$repo_root" || { repos="$repos $repo_root " cd "$repo_root" "pkg_vcs_pull_$repo_type" # Repositories can contain a "Message of the Day" file in order to # relay important information to their users. ! [ -r "$repo_root/MOTD" ] || { printf '%s\n%s\n%s\n\n' \ "$(_multiply_char '=' 60)" \ "Message of the Day [$PWD]" \ "$(_multiply_char '=' 60)" cat "$repo_root/MOTD" printf '\n%s\n' "$(_multiply_char '=' 60)" } } } pkg_vcs_clone_git() { # $1: Clone URL # $2: Branch or Commit Object git init git remote add origin "${1%[#@]*}" case $2 in @*) git fetch -t --depth=1 origin "${2#@}" || git fetch ;; *) git fetch --depth=1 origin "$2" || git fetch esac git checkout "${2:-FETCH_HEAD}" } pkg_vcs_clone_hg() { # $1: Clone URL # $2: Branch or Commit Object hg clone -u "${2:-tip}" "${1%[#@]*}" . } pkg_vcs_clone_fossil() { # $1: Clone URL # $2: Branch or Commit Object fossil open -f "${1%[#@]*}" "${2:-trunk}" } pkg_vcs_pull_fossil() { # Pull function for Fossil. log "$PWD" " " [ "$(fossil remote 2>/dev/null)" != off ] || { out "No remote, skipping." return 0 } # Ensure we have proper permissions to do the pull operation. if [ -w "$PWD" ] && [ "$uid" != 0 ]; then fossil pull fossil update else pkg_vcs_as_root "fossil pull && fossil update" fi } pkg_vcs_pull_git() { # Pull function for Git. if [ "$(git remote 2>/dev/null)" ]; then # Display a message if signing is enabled for this repository. case $(git config merge.verifySignatures) in true) log "$PWD" "[signed] " ;; *) log "$PWD" " " ;; esac # Ensure we have proper permissions to do the pull operation. if [ -w "$PWD" ] && [ "$uid" != 0 ]; then git fetch git merge git submodule update --remote --init -f else pkg_vcs_as_root \ "git fetch && git merge && git submodule update --remote --init -f" fi else log "$PWD" " " # Skip if there are no submodules [ -f .gitmodules ] || { out "No remote, skipping." return 0 } if [ -w "$PWD" ] && [ "$uid" != 0 ]; then git submodule update --remote --init -f else pkg_vcs_as_root "git submodule update --remote --init -f" fi fi } pkg_vcs_pull_hg() { # Pull function for Mercurial. log "$PWD" " " [ "$(hg showconfig paths 2>/dev/null)" ] || { out "No remote, skipping." return 0 } if [ -w "$PWD" ] && [ "$uid" != 0 ]; then hg pull hg update else pkg_vcs_as_root "hg pull && hg update" fi } pkg_vcs_pull_rsync() { # Pull function for rsync repositories. The details of our rsync # repositories are explained in the user manual. log "$PWD" " " # Read remote repository address from the '.rsync' file. read -r remote < .rsync if [ -w "$PWD" ] && [ "$uid" != 0 ]; then rsync -acvzzC --include=core --delete "$remote/" "$PWD" else pkg_vcs_as_root "rsync -acvzzC --include=core --delete \"$remote/\" \"$PWD\"" fi } pkg_vcs_pull_local() { # Local repository. We don't do a "pull" here, we just notify the user that # this is the case. log "$PWD" " " out "Not a remote repository, skipping." } pkg_vcs_as_root() ( # Helper function for pkg_vcs_pull* functions used for proper # privilege escalation. [ "$uid" = 0 ] || log "$PWD" "Need root to update" # Find out the owner of the repository and spawn the operation as the user # below. # # This prevents the VCS from changing the original ownership of files and # directories in the rare case that the repository is owned by a third user. user=$(_stat "$PWD") [ "$user" = root ] || log "Dropping permissions to $user for pull" case ${su##*/} in su) set -- "'$1'"; esac # Spawn a subhsell to run multiple commands as root at once. This makes # things easier on users who aren't using persist/timestamps for auth # caching. as_root sh -c "$@" ) pkg_vcs_info() { # Finds and returns repository information for the current directory. It # will return current directory, repository root, and the type of repository # in a colon separated format. : "${repo_file:=$cac_dir/repository-cache}" set -- if [ "$CPT_REPO_CACHE" != 0 ] && information=$(grep "^$PWD:" "$repo_file" 2>/dev/null); then # Repository information is already cached. printf '%s\n' "$information" | sed 1q return elif rootdir=$(git rev-parse --show-toplevel 2>/dev/null); then # Git repository backend=git elif rootdir=$(hg root 2>/dev/null); then # Mercurial repository backend=hg elif rootdir=$(fossil info 2>/dev/null | grep ^local-root:); then # Fossil repository backend=fossil # We want to remove the initial spacing before the root directory, and # the leading dash on the root directory. rootdir=${rootdir#local-root: *} rootdir=${rootdir%/} elif [ -f .rsync ]; then backend=rsync rootdir=$PWD # If an .rsync_root file exists, we check that the repository root # exists. If it does, we change to that directory to do the fetch. # This way, we allow for partial repositories while making sure that # we can fetch the repository in a single operation. [ -f .rsync_root ] && { read -r rsync_root < .rsync_root [ -f "$rsync_root/.rsync" ] && rootdir=$(_readlinkf "$rsync_root") } else # Local repository backend=local rootdir=$PWD fi # We cache all these information, so that we don't have to spend much time # looking these up the next time we are doing it. If CPT_REPO_CACHE is set # to 0, we will not write this cache. [ "$CPT_REPO_CACHE" = 0 ] || set -- "$repo_file" printf '%s:%s:%s\n' "$PWD" "$rootdir" "$backend" | tee -a "$@" } pkg_fetch() { log "Updating repositories" run_hook pre-fetch # Create a list of all repositories. # See [1] at top of script. # shellcheck disable=2046,2086 { IFS=:; set -- $CPT_PATH; IFS=$old_ifs ;} # Update each repository in '$CPT_PATH'. It is assumed that # each repository is 'git' tracked. for repo; do pkg_repository_update "$repo"; done run_hook post-fetch } pkg_updates(){ # Check all installed packages for updates. So long as the installed # version and the version in the repositories differ, it's considered # an update. [ "$CPT_FETCH" = 0 ] || pkg_fetch # Be quiet if we are doing self update, no need to print the same # information twice. We add this basic function, because we will be using it # more than once. _not_update () { [ "$cpt_self_update" ] || "$@" ;} _not_update log "Checking for new package versions" set +f for pkg in "$sys_db/"*; do pkg_name=${pkg##*/} # Read version and release information from the installed packages # and repository. read -r db_ver db_rel < "$pkg/version" read -r re_ver re_rel < "$(pkg_find "$pkg_name")/version" # Compare installed packages to repository packages. [ "$db_ver-$db_rel" != "$re_ver-$re_rel" ] && { _not_update printf '%s\n' "$pkg_name $db_ver-$db_rel ==> $re_ver-$re_rel" outdated="$outdated$pkg_name " } done set -f # If the download option is specified only download the outdated packages # and exit. # shellcheck disable=2154 [ "$download_only" = 1 ] && { log "Only sources for the packages will be acquired" prompt for pkg in $outdated; do pkg_sources "$pkg" done exit 0 } [ "$outdated" ] || { log "Everything is up to date" return } _not_update log "Packages to update: ${outdated% }" contains "$outdated" cpt && { log "Detected package manager update" log "The package manager will be updated first" prompt pkg_build cpt cpt-install cpt log "Updated the package manager" log "Re-executing the package manager to continue the update" # We export this variable so that cpt knows it's running for the second # time. We make the new process promptless, and we avoid fetching # repositories. We are assuming that the user was already prompted once, # and that their repositories are up to date, or they have also passed # the '-y' or '-n' flags themselves which leads to the same outcome. export cpt_self_update=1 exec cpt-update -yn } # Tell 'pkg_build' to always prompt before build. pkg_update=1 # Build all packages requiring an update. # See [1] at top of script. # shellcheck disable=2046,2086 { pkg_order $outdated pkg_build $order } log "Updated all packages" } pkg_get_base() ( # Print the packages defined in the /etc/cpt-base file. # If an argument is given, it prints a space seperated list instead # of a list seperated by newlines. # cpt-base is an optional file, return with success if it doesn't exist. [ -f "$CPT_ROOT/etc/cpt-base" ] || return 0 # If there is an argument, change the format to use spaces instead of # newlines. format='%s\n' [ "$#" -eq 0 ] || format='%s '; set -- # Older versions of shellcheck warns us that the variable is changing on the # subshell. That is our purpose here, thank you very much. # shellcheck disable=SC2030 while read -r pkgname _; do # Ignore comments [ "${pkgname##\#*}" ] || continue # Store the package list in arguments set -- "$@" "$pkgname" # Retrieve the dependency tree of the package, so they are listed as # base packages too. This ensures that no packages are broken in a # "base reset", and the user has a working base. deps=$(pkg_gentree "$pkgname" xn) for dep in $deps; do contains "$*" "$dep" || set -- "$@" "$dep" done done < "$CPT_ROOT/etc/cpt-base" # Format variable is intentional. # shellcheck disable=2059 printf "$format" "$@" ) pkg_gentree() ( # Generate an ordered dependency tree of a package. Useful for testing # whether the generated dependency tree is enough to actually building a # given package. A second argument can be given as a combination of # characters (similar to 'tar(1)' keys) which will be used as an option # parser. See the documentation for more information on the keys. deps='' reverse='' format='%s\n' make_deps=first for op in $(sepchar "$2"); do case "$op" in b) deps=$(pkg_get_base nonl) ;; f) make_deps='' ;; x) make_deps=first-nomake ;; r) reverse=1 ;; n) format='%s ' ;; *) die "pkg_gentree: Unknown key '$op'" esac done pkg_depends "$1" tree "$make_deps" # Unless 'f' is given, pop the package from the list so that we don't list # the package (for example if it's part of the base package list). Normally # this should happen only when 'b' is used, so getting the package when no # keys are supplied points to a circular dependency, which isn't supported # by the package manager. # # Word splitting is intentional. # shellcheck disable=2086 [ -z "${2##*f*}" ] || deps=$(pop "$1" from $deps) eval set -- "$deps" pkg_order "$@" if [ "$reverse" ]; then eval set -- "$redro"; else eval set -- "$order"; fi [ "$1" ] || return 0 # Format variable is intentional. # shellcheck disable=2059 printf "$format" "$@" ) pkg_query_meta() { # Query the 'meta' file of the given meta package. If there is no meta file, # or the key being queried is unavailable, the function will return with # error. Full path can be specified instead of package names. case $1 in */*) repo_dir=$1 ;; *) repo_dir=$(pkg_find "$1") esac [ -f "$repo_dir/meta" ] || return while IFS=': ' read -r key val; do case $key in "$2") printf '%s\n' "$val"; return 0 esac done < "$repo_dir/meta" return 1 } pkg_clean() { # Clean up on exit or error. This removes everything related # to the build. [ "$CPT_DEBUG" != 1 ] || return 0 # Block 'Ctrl+C' while cache is being cleaned. trap_set block # Remove temporary items. rm -rf -- "${CPT_TMPDIR:=$cac_dir/proc}/$pid" } _tmp_name() { # Name a temporary file/directory out "$tmp_dir/$1" } _tmp_cp() { # Copy given file to the temporary directory and return its name. If a # second argument is not given, use the basename of the copied file. _ret=${2:-${1##*/}} _ret=$(_tmp_name "$_ret") cp "$1" "$_ret" out "$_ret" } _tmp_create() { # Create given file to the temporary directory and return its name _ret=$(_tmp_name "$1") # False positive, we are not reading from the file. # shellcheck disable=2094 out "$_ret" 3>> "$_ret" } create_tmp() { # Create the required temporary directories and set the variables which # point to them. mak_dir=$tmp_dir/build pkg_dir=$tmp_dir/pkg tar_dir=$tmp_dir/export mkdir -p "$mak_dir" "$pkg_dir" "$tar_dir" } create_cache() { # DEPRECATED, use create_tmp() instead. # # If an argument is given, skip the creation of other cache directories. [ "$1" ] || create_tmp } # main() { set -ef # If a parser definition exists, let's run it ourselves. This makes sure we # get the variables as soon as possible. command -v parser_definition >/dev/null && { eval "$(getoptions parser_definition parse "$0")" parse "$@" eval set -- "$REST" } # The PID of the current shell process is used to isolate directories # to each specific CPT instance. This allows multiple package manager # instances to be run at once. Store the value in another variable so # that it doesn't change beneath us. pid=${CPT_PID:-$$} # A temporary directory can be specified apart from the cache directory in # order to build in a user specified directory. /tmp could be used in order # to build on ram, useful on SSDs. The user can specify $CPT_TMPDIR for # this. We now also support the usage of $XDG_RUNTIME_DIR, so the directory # naming can be confusing to some. Here are possible $tdir names (by order # of preference): # # 1. $CPT_TMPDIR # 2. $XDG_RUNTIME_DIR/cpt # 3. $XDG_CACHE_DIR/cpt/proc # 4. $HOME/.cache/cpt/proc # # We create the main temporary directory here to avoid permission issues # that can arise from functions that call as_root(). However, the # $pid directories are special for each process and aren't created unless # `create_tmp()` is used. # # We used to assign and create the directories at the same time using a # shell hack, but it made the variables editable outside of the package # manager, but we don't actually want that. Variables that are lower case # aren't meant to be interacted or set by the user. cac_dir=${CPT_CACHE:=${XDG_CACHE_HOME:-${HOME:?}/.cache}}/cpt src_dir=$cac_dir/sources log_dir=$cac_dir/logs bin_dir=$cac_dir/bin tdir=${CPT_TMPDIR:=${XDG_RUNTIME_DIR:-$cac_dir/proc}${XDG_RUNTIME_DIR:+/cpt}} tmp_dir=$tdir/$pid mkdir -p "$cac_dir" "$src_dir" "$log_dir" "$bin_dir" "$tdir" # Set the location to the repository and package database. pkg_db=var/db/cpt/installed # Force the C locale to speed up things like 'grep' which disable unicode # etc when this is set. We don't need unicode and a speed up is always # welcome. export LC_ALL=C LANG=C # Catch errors and ensure that build files and directories are cleaned # up before we die. This occurs on 'Ctrl+C' as well as success and error. trap_set cleanup # Prefer GNU grep if installed as it is much much faster than busybox's # implementation. Very much worth it if you value performance over # POSIX correctness (grep quoted to avoid shellcheck false-positive). grep=$(command -v ggrep) || grep='grep' # Figure out which 'sudo' command to use based on the user's choice or # what is available on the system. su=${CPT_SU:-$(command -v ssu || command -v doas || command -v sudo || command -v sls)} || su=su # Store the date and time of script invocation to be used as the name # of the log files the package manager creates uring builds. time=$(date '+%Y-%m-%d-%H:%M') # Use readelf for fixing dependencies if it is available, fallback to # ldd. readelf shows only the actual dependencies and doesn't include # the libraries required by the dependencies. elf_prog=${CPT_ELF:="$( command -v readelf || command -v llvm-readelf || command -v eu-readelf)"} || elf_prog=ldd # Use one of the following programs to download package sources. Downloads # are made using the `pkg_download()` function. dl_prog=${CPT_DOWNLOADER:="$( command -v curl || command -v wget || command -v wget2 || command -v axel || command -v aria2c)"} || dl_prog=curl # Make note of the user's current ID to do root checks later on. # This is used enough to warrant a place here. uid=$(id -u) # Save IFS, so we can restore it back to what it was before. old_ifs=$IFS # Make sure that the CPT_ROOT doesn't end with a '/'. This might # break some operations. CPT_ROOT=${CPT_ROOT%"${CPT_ROOT##*[!/]}"} # Define an optional sys_arch variable in order to provide # information to build files with architectural information. sys_arch=$(uname -m 2>/dev/null) ||: # Define this variable but don't create its directory structure from # the get go. It will be created as needed by package installation. sys_db=$CPT_ROOT/$pkg_db # Regular expression used in pkg_checksums() and pkg_sources() in order to # identify VCS and comments re_vcs_or_com='^(#|(fossil|git|hg)\+)' # This allows for automatic setup of a CPT chroot and will # do nothing on a normal system. mkdir -p "$CPT_ROOT/" 2>/dev/null ||: # Set the default compression to gzip, and warn the user if the value is # invalid. case ${CPT_COMPRESS:=gz} in bz2|gz|xz|zst|lz) ;; *) warn "'$CPT_COMPRESS' is not a valid CPT_COMPRESS value, falling back to 'gz'" CPT_COMPRESS=gz esac # Set colors if they are to be enabled. # shellcheck disable=2034 colors_enabled && colory="\033[1;33m" colorb="\033[1;34m" colre="\033[m" colbold="\033[1m" } # If the library is being called with its own name, run arguments. if [ "${0##*/}" = cpt-lib ]; then pd() { setup REST help:usage -- "usage: ${0##*/} [funcall...]" global_options } eval "$(getoptions pd parse "$0")" parse "$@" eval set -- "$REST" "$@" fi