aboutsummaryrefslogtreecommitdiff
path: root/src/cpt-lib.in
diff options
context:
space:
mode:
Diffstat (limited to 'src/cpt-lib.in')
-rw-r--r--src/cpt-lib.in659
1 files changed, 465 insertions, 194 deletions
diff --git a/src/cpt-lib.in b/src/cpt-lib.in
index c055331..1896920 100644
--- a/src/cpt-lib.in
+++ b/src/cpt-lib.in
@@ -9,7 +9,7 @@
# Currently maintained by Cem Keylan.
version() {
- out "Carbs Packaging Tools, version @VERSION@" \
+ out "Carbs Packaging Tools, version $cpt_version" \
@LICENSE@
exit 0
@@ -25,11 +25,12 @@ log() {
#
# All messages are printed to stderr to allow the user to hide build
# output which is the only thing printed to stdout.
- #
- # '${3:-->}': If the 3rd argument is missing, set prefix to '->'.
- # '${2:+colorb}': If the 2nd argument exists, set text style of '$1'.
- printf '%b%s %b%b%s%b %s\n' \
- "$colory" "${3:-->}" "$colre" "${2:+$colorb}" "$1" "$colre" "$2" >&2
+ case $# in
+ 1) printf '%b->%b %s\n' "$colory" "$colre" "$1" ;;
+ 2) printf '%b->%b %b%s%b %s\n' "$colory" "$colre" "$colorb" "$1" "$colre" "$2" ;;
+ 3) printf '%b%s%b %b%s%b %s\n' "$colory" "${3:-->}" "$colre" "$colorb" "$1" "$colre" "$2" ;;
+ *) return 1
+ esac >&2
}
warn() {
@@ -55,12 +56,135 @@ warnv() {
warn "$@"
}
+execv() {
+ # Redirect the output to /dev/null unless CPT_VERBOSE is set.
+ if [ "$CPT_VERBOSE" = 1 ]; then "$@"; else "$@" >/dev/null 2>&1; fi
+}
+
die() {
# Print a message and exit with '1' (error).
log "$1" "$2" "!>"
exit 1
}
+colors_enabled() {
+ case ${CPT_COLOR:=auto} in
+ auto) [ -t 1 ] ;;
+ 1|always) return 0 ;;
+ 0|never) return 1 ;;
+ *) die "Unknown color value: '$CPT_COLOR'"
+ esac
+}
+
+_dep_append() {
+ dep_graph=$(printf '%s\n%s %s\n' "$dep_graph" "$@" ;)
+}
+
+_tsort() {
+ # Return a linear reverse topological sort of the piped input, so we
+ # generate a proper build order. Returns 1 if a dependency cycle occurs.
+ #
+ # I was really excited when I saw POSIX specified a tsort(1) implementation,
+ # but the specification is quite vague, it doesn't specify cycles as a
+ # reason of error, and implementations differ on how it's handled. coreutils
+ # tsort(1) exits with an error, while openbsd tsort(1) doesn't. Both
+ # implementations are correct according to the specification.
+ #
+ # The script below was taken from <https://gist.github.com/apainintheneck/1803fb91dde3ba048ec51d44fa6065a4>
+ #
+ # The MIT License (MIT)
+ # Copyright (c) 2023 Kevin Robell
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining a
+ # copy of this software and associated documentation files (the “Software”),
+ # to deal in the Software without restriction, including without limitation
+ # the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ # and/or sell copies of the Software, and to permit persons to whom the
+ # Software is furnished to do so, subject to the following conditions:
+ #
+ # The above copyright notice and this permission notice shall be included in
+ # all copies or substantial portions of the Software.
+ #
+ # THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+ # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ # DEALINGS IN THE SOFTWARE.
+ awk '{
+ for (i = 1; i <= NF; ++i) {
+ # Store each node.
+ nodes[$i] = 1
+ if (is_child) {
+ child = $i
+ # Skip nodes that point to themselves.
+ # This traditionally means that the node
+ # is disconnected from the rest of the graph.
+ if (parent != child) {
+ # Store from parent to child.
+ idx = ++child_count[parent]
+ child_graph[parent, idx] = child
+ # Store count from child to parent.
+ ++parent_count[child]
+ }
+ } else {
+ parent = $i
+ }
+ # Flip switch
+ is_child = !is_child
+ }
+ }
+ END {
+ # Print errors to the stderr
+ stderr = "/dev/stderr"
+
+ # Sanity Check
+ if (is_child) {
+ print("Error: odd number of input values: expected pairs of values") > stderr
+ exit(1)
+ }
+
+ #####
+ # Topological Sort
+ #####
+
+ # Remove unconnected nodes first.
+ for (node in nodes) {
+ if (parent_count[node] == 0 && child_count[node] == 0) {
+ delete nodes[node]
+ print(node)
+ }
+ }
+
+ # Remove the rest of the nodes starting with those without parents.
+ while (length(nodes) > 0) {
+ removed_node = 0
+ for (node in nodes) {
+ # Delete and print nodes without any remaining parents.
+ if (parent_count[node] == 0) {
+ delete nodes[node]
+ removed_node = 1
+ # Decrease child_count for each parent node.
+ for (i = child_count[node]; i > 0; --i) {
+ child = child_graph[node, i]
+ --parent_count[child]
+ }
+ print(node)
+ }
+ }
+
+ # If we havent removed any nodes, it means that there
+ # are no nodes without any remaining parents so we have
+ # a cycle.
+ if (!removed_node) {
+ print("Error: Cycle found") > stderr
+ exit(1)
+ }
+ }
+ }'
+}
+
trap_set() {
# Function to set the trap value.
case ${1:-cleanup} in
@@ -76,20 +200,16 @@ trap_set() {
esac
}
-sepchar() (
+sepchar() {
# Seperate every character on the given string without resorting to external
# processes.
[ "$1" ] || return 0; str=$1; set --
while [ "$str" ]; do
- str_tmp=$str
- for i in $(_seq $(( ${#str} - 1 ))); do
- str_tmp=${str_tmp%?}
- done
- set -- "$@" "$str_tmp"
- str=${str#$str_tmp}
+ set -- "$@" "${str%"${str#?}"}"
+ str=${str#?}
done
printf '%s\n' "$@"
-)
+}
_re() {
# Check that the string supplied in $2 conforms to the regular expression
@@ -120,7 +240,7 @@ _stat() (
printf '%s' "$_user"
)
-_readlinkf() (
+_readlinkf() {
# Public domain POSIX sh readlink function by Koichi Nakashima
[ "${1:-}" ] || return 1
max_symlinks=40
@@ -154,7 +274,7 @@ _readlinkf() (
target=${link#*" $target -> "}
done
return 1
-)
+}
_get_digest() {
# Get digest algorithm from the given file. It looks for a header on the
@@ -176,7 +296,7 @@ _get_digest() {
# function.
# URL: https://github.com/ko1nksm/getoptions (v2.5.0)
# License: Creative Commons Zero v1.0 Universal
-# shellcheck disable=2016,2086
+# shellcheck disable=2016,2086,2317
getoptions() {
_error='' _on=1 _off='' _export='' _plus='' _mode='' _alt='' _rest=''
_flags='' _nflags='' _opts='' _help='' _abbr='' _cmds='' _init=@empty IFS=' '
@@ -371,6 +491,7 @@ getoptions() {
}
# URL: https://github.com/ko1nksm/getoptions (v2.5.0)
# License: Creative Commons Zero v1.0 Universal
+# shellcheck disable=2317
getoptions_help() {
_width='30,12' _plus='' _leading=' '
@@ -415,14 +536,34 @@ getoptions_help() {
echo "}"
}
+# 2086:
+# The lack of quotes are intentional. We do this so `getoptions()` do not try
+# to parse the empty string.
+# 2120:
+# The library does not call this function with any positional arguments, but
+# that does not mean that other programs will not do it, so this can also be
+# safely ignored.
+# shellcheck disable=2086,2120
global_options() {
- msg -- '' 'Global Options:'
- flag CPT_FORCE -f --force init:@export -- "Force operation"
- flag CPT_PROMPT -y --no-prompt on:0 off:0 init:@export -- "Do not prompt for confirmation"
- param CPT_ROOT --root init:@export -- "Use an alternate root directory"
- disp :usage -h --help -- "Show this help message"
- disp :version -v --version -- "Print version information"
- flag CPT_VERBOSE --verbose init:@export -- "Be more verbose"
+ # These are options that are supported by most utilities. If the optional
+ # argument 'silent' is given, the usage will not print these options, but
+ # the arguments will still be accepted. Alternatively, if the 'compact'
+ # argument is given, the function only prints the '--help' and '--version'
+ # flags. Sometimes it doesn't make sense to pollute the screen with options
+ # that will be rarely ever used.
+ _h=hidden:1
+ case $1 in
+ silent) _c=$_h ;;
+ compact) _c='' ;;
+ *) msg -- '' 'Global Options:'; _c='' _h=''
+ esac
+ flag CPT_FORCE -f --force $_h init:@export -- "Force operation"
+ flag CPT_PROMPT -y --no-prompt on:0 off:0 $_h init:@export -- "Do not prompt for confirmation"
+ param CPT_ROOT --root $_h init:@export -- "Use an alternate root directory"
+ param CPT_COLOR --color $_h init:@export -- "Colorize the output [default:auto]"
+ disp :usage -h --help $_c -- "Show this help message"
+ disp :version -v --version $_c -- "Print version information"
+ flag CPT_VERBOSE --verbose $_h init:@export -- "Be more verbose"
}
contains() {
@@ -440,11 +581,12 @@ regesc() {
pkg_download() {
# $1: URL
# $2: Output (Optional)
- set -- "$1" "${2:-${1##*/}}"
+ set -- "$1" "$(_readlinkf "${2:-${1##*/}}")"
case ${dl_prog##*/} in
- aria2c|axel) set -- -o "$2" "$1" ;;
- curl) set -- -fLo "$2" "$1" ;;
- wget|wget2) set -- -O "$2" "$1" ;;
+ axel) set -- -o "$2" "$1" ;;
+ aria2c) set -- -d "${2%/*}" -o "${2##*/}" "$1" ;;
+ curl) set -- -fLo "$2" "$1" ;;
+ wget|wget2) set -- -O "$2" "$1" ;;
esac
"$dl_prog" "$@" || {
@@ -476,6 +618,9 @@ as_root() {
# We are exporting package manager variables, so that we still have the
# same repository paths / access to the same cache directories etc.
+ #
+ # It doesn't matter whether CPT_HOOK is defined or not.
+ # shellcheck disable=2153
set -- HOME="$HOME" \
USER="$user" \
XDG_CACHE_HOME="$XDG_CACHE_HOME" \
@@ -517,23 +662,38 @@ pop() {
}
run_hook() {
- # Store the CPT_HOOK variable so that we can revert it if it is changed.
- oldCPT_HOOK=$CPT_HOOK
-
- # If a fourth parameter 'root' is specified, source the hook from a
- # predefined location to avoid privilige escalation through user scripts.
- [ "$4" ] && CPT_HOOK=$CPT_ROOT/etc/cpt-hook
-
- [ -f "$CPT_HOOK" ] || { CPT_HOOK=$oldCPT_HOOK; return 0 ;}
+ # Check that hooks exist before announcing that we are running a hook.
+ set +f
+ for hook in "$cpt_confdir/hooks/"* "$CPT_HOOK"; do
+ [ -f "$hook" ] && {
+ if [ "$2" ]; then
+ logv "$2" "Running $1 hook"
+ else
+ logv "Running $1 hook"
+ fi
+ break
+ }
+ done
- if [ "$2" ]; then
- logv "$2" "Running $1 hook"
- else
- logv "Running $1 hook"
- fi
+ # Run all the hooks found in the configuration directory, and the user
+ # defined hook.
+ for hook in "$cpt_confdir/hooks/"* "$CPT_HOOK"; do
+ set -f
+ [ -f "$hook" ] || continue
+ TYPE=${1:-null} PKG=${2:-null} DEST=${3:-null} . "$hook"
+ done
+}
- TYPE=${1:-null} PKG=${2:-null} DEST=${3:-null} . "$CPT_HOOK"
- CPT_HOOK=$oldCPT_HOOK
+# An optional argument could be provided to enforce a compression algorithm.
+# shellcheck disable=2120
+compress() {
+ case ${1:-$CPT_COMPRESS} in
+ bz2) bzip2 -z ;;
+ gz) gzip -6 ;;
+ xz) xz -zT 0 ;;
+ zst) zstd -3 ;;
+ lz) lzip -6 ;;
+ esac
}
decompress() {
@@ -574,6 +734,18 @@ pkg_owner() {
[ "$1" ] && printf '%s\n' "$1"
}
+pkg_owner_multi() {
+ set +f
+
+ [ "$3" ] || set -- "$1" "$2" "$sys_db"/*/manifest
+
+ grep "$@" | while read -r pkg_owner; do
+ pkg_owner=${pkg_owner%/*}
+ pkg_owner=${pkg_owner##*/}
+ printf '%s\n' "${pkg_owner##*/}"
+ done
+}
+
pkg_isbuilt() (
# Check if a package is built or not.
read -r ver rel < "$(pkg_find "$1")/version"
@@ -755,7 +927,10 @@ pkg_extract() {
# VCS Repository
git+*|hg+*|fossil+*)
backend=${src%%+*}
- url=${src##${backend}+} com=${url##*[@#]} com=${com#${url%[@#]*}}
+ url=${src##"${backend}"+} com=${url##*[@#]} com=${com#"${url%[@#]*}"}
+
+ # Add back @ to com
+ case $url in *@*) com=@$com; esac
log "$1" "Cloning ${url%[#@]*}"
"pkg_vcs_clone_$backend" "${url%[#@]*}" "$com"
@@ -798,12 +973,10 @@ pkg_depends() {
# Resolve all dependencies and generate an ordered list.
# This does a depth-first search. The deepest dependencies are
# listed first and then the parents in reverse order.
- contains "$deps" "$1" || {
- # Filter out non-explicit, aleady installed dependencies.
- # Only filter installed if called from 'pkg_build()'.
- [ "$pkg_build" ] && [ -z "$2" ] &&
- (pkg_list "$1" >/dev/null) && return
-
+ #
+ # shellcheck disable=2015
+ contains "$pkgs" "$1" && [ -z "$2" ] || {
+ [ "$2" = raw ] && _dep_append "$1" "$1"
while read -r dep type || [ "$dep" ]; do
# Skip comments and empty lines.
[ "${dep##\#*}" ] || continue
@@ -816,6 +989,16 @@ pkg_depends() {
make) [ "$2" = tree ] && [ -z "${3#first-nomake}" ] && continue
esac
+ # Filter out non-explicit, already installed dependencies if called
+ # from 'pkg_build()'.
+ [ "$pkg_build" ] && (pkg_list "$dep" >/dev/null) && continue
+
+ if [ "$2" = explicit ] || [ "$3" ]; then
+ _dep_append "$dep" "$dep"
+ else
+ _dep_append "$dep" "$1"
+ fi
+
# Recurse through the dependencies of the child packages. Forward
# the 'tree' operation.
if [ "$2" = tree ]; then
@@ -825,12 +1008,15 @@ pkg_depends() {
fi
done 2>/dev/null < "$(pkg_find "$1")/depends" ||:
- # After child dependencies are added to the list,
- # add the package which depends on them.
- [ "$2" = explicit ] || [ "$3" ] || deps="$deps $1 "
+ pkgs="$pkgs $1 "
}
}
+pkg_depends_commit() {
+ # Set deps, and cleanup dep_graph, pkgs
+ deps=$(printf '%s\n' "$dep_graph" | _tsort) dep_graph='' pkgs='' || warn "Dependency cycle detected"
+}
+
pkg_order() {
# Order a list of packages based on dependence and
# take into account pre-built tarballs if this is
@@ -838,9 +1024,10 @@ pkg_order() {
order=; redro=; deps=
for pkg do case $pkg in
- *.tar.*) deps="$deps $pkg " ;;
+ *.tar.*) _dep_append "$pkg" "$pkg" ;;
*) pkg_depends "$pkg" raw
esac done
+ pkg_depends_commit
# Filter the list, only keeping explicit packages.
# The purpose of these two loops is to order the
@@ -858,7 +1045,7 @@ pkg_strip() {
# system as well as on the tarballs we ship for installation.
# Package has stripping disabled, stop here.
- [ -f "$mak_dir/$pkg/nostrip" ] && return
+ [ "$CPT_NOSTRIP" ] || [ -f "$mak_dir/$pkg/nostrip" ] && return
log "$1" "Stripping binaries and libraries"
@@ -883,9 +1070,12 @@ pkg_strip() {
pkg_fix_deps_fullpath() {
# Return the canonical path of libraries extracted by readelf.
- while read -r dep _ rslv _; do
- [ "$dep" = "$1" ] || continue
- printf '%s\n' "$rslv"
+ while read -r line _ rslv _; do
+ [ "$line" = "$1" ] || continue
+ case $rslv in
+ ldd) out "$line" ;;
+ *) out "$rslv" ;;
+ esac
done
}
@@ -899,11 +1089,10 @@ pkg_fix_deps() {
# simplify path building.
cd "$pkg_dir/$1/$pkg_db/$1"
- # Make a copy of the depends file if it exists to have a
- # reference to 'diff' against.
+ # Make a copy of the depends file if it exists to have a reference to 'diff'
+ # against.
if [ -f depends ]; then
- cp -f depends "$mak_dir/d"
- dep_file=$mak_dir/d
+ dep_file=$(_tmp_cp depends)
else
dep_file=/dev/null
fi
@@ -912,52 +1101,72 @@ pkg_fix_deps() {
pkg_name=$1
set +f; set -f -- "$sys_db/"*/manifest
- # Get a list of binaries and libraries, false files
- # will be found, however it's faster to get 'ldd' to check
- # them anyway than to filter them out.
- find "$pkg_dir/$pkg_name/" -type f 2>/dev/null |
+ # We create two separate files for storing dependency information.
+ #
+ # 'lddfile' is where we will be storing the output of ldd, so that we can
+ # reference it later.
+ #
+ # 'dep_file_list' is where we will be listing the needed files which we will
+ # be passing to grep.
+ #
+ lddfile=$(_tmp_create lddfile) dep_file_list=$(_tmp_create dfl)
- while read -r file; do
- # We call ldd regardless here, because we also use it to retrieve the
- # fullpath of a library when using readelf. Best use we have here is
- # saving it in a buffer, so we don't use the dynamic loader everytime we
- # need to reference it.
- lddbuf=$(ldd -- "$file" 2>/dev/null) ||:
-
- case ${elf_prog:-ldd} in
- *readelf) "$elf_prog" -d "$file" 2>/dev/null ;;
- *) pirntf '%s\n' "$lddbuf" ;;
- esac |
- while read -r dep; do
- # Skip lines containing 'ldd'.
- [ "${dep##*ldd*}" ] || continue
- case $dep in *NEEDED*\[*\] | *'=>'*) ;; *) continue; esac
-
- # readelf output:
- # 0x0000 (NEEDED) Shared library: [libc.so]
- dep=${dep##*\[}
- dep=${dep%%\]*}
-
- # Retrieve the fullpath of the library from our ldd buffer.
- case $elf_prog in
- *readelf) line=$(printf '%s\n' "$lddbuf" |
- pkg_fix_deps_fullpath "$line")
- esac
+ pkg_fix_deps_find() {
+ # We run the similar command twice, might as well be a function.
+ # Basically runs find on the package directory and executes the
+ # given command.
+ end=+; [ "$1" = ldd ] && end=';'
- # ldd output:
- # libc.so => /lib/ld-musl-x86_64.so.1
- dep=${dep#* => }
- dep=${dep% *}
-
- # Figure out which package owns the file. Skip file if it is owned
- # by the current package. This also handles cases where a '*-bin'
- # package exists on the system, so the package manager doesn't think
- # that the package we are building depends on the *-bin version of
- # itself, or any other renamed versions of the same software.
- pkg_owner -l "/${dep#/}\$" "$PWD/manifest" >/dev/null && continue
- pkg_owner -l "/${dep#/}\$" "$@" ||:
- done ||:
- done >> depends
+ # Get a list of binaries and libraries, false files will be found,
+ # however it's faster to get 'ldd' to check them anyway than to filter
+ # them out.
+ #
+ # We are terminating exec, so no worries.
+ # shellcheck disable=2067
+ find "$pkg_dir/$pkg_name/" -type f -exec "$@" {} "$end" 2>/dev/null |
+ sed 's/([^)]*) *$//' | sort -u
+ }
+
+ # Record all the dependencies in the 'lddfile'. This will include all
+ # dependencies, including non-direct ones. Unless the user prefers ldd,
+ # readelf will be used to filter the non-direct dependencies out.
+ pkg_fix_deps_find ldd -- > "$lddfile"
+
+ case "$elf_prog" in
+ *readelf) pkg_fix_deps_find "$elf_prog" -d ;;
+ *) cat "$lddfile"
+ esac | while read -r dep; do
+ # Skip lines containing 'ldd'.
+ [ "${dep##*ldd*}" ] || continue
+ case $dep in *NEEDED*\[*\] | *'=>'*) ;; *) continue; esac
+
+ # readelf output:
+ # 0x0000 (NEEDED) Shared library: [libc.so]
+ dep=${dep##*\[}
+ dep=${dep%%\]*}
+
+ # Retrieve the fullpath of the library from our ldd buffer.
+ case $elf_prog in
+ *readelf) dep=$(pkg_fix_deps_fullpath "$dep" < "$lddfile")
+ esac
+
+ # ldd output:
+ # libc.so => /lib/ld-musl-x86_64.so.1
+ dep=${dep#* => }
+ dep=${dep% *}
+
+ # Figure out which package owns the file. Skip file if it is owned
+ # by the current package. This also handles cases where a '*-bin'
+ # package exists on the system, so the package manager doesn't think
+ # that the package we are building depends on the *-bin version of
+ # itself, or any other renamed versions of the same software.
+ pkg_owner -l "/${dep#/}\$" "$PWD/manifest" >/dev/null && continue
+ out "/${dep#/}\$"
+ done >> "$dep_file_list"
+
+ # We write all the files into 'dep_file_list' so that we don't need to call
+ # grep on our entire database manifest hundreds of times.
+ pkg_owner_multi -lf "$dep_file_list" "$@" >> depends
# Remove duplicate entries from the new depends file.
# This removes duplicate lines looking *only* at the
@@ -965,7 +1174,7 @@ pkg_fix_deps() {
sort -uk1,1 -o depends depends 2>/dev/null ||:
# Display a diff of the new dependencies against the old ones.
- diff -U 3 "$dep_file" depends 2>/dev/null ||:
+ execv diff -U 3 "$dep_file" depends 2>/dev/null ||:
# Remove the depends file if it is empty.
[ -s depends ] || rm -f depends
@@ -987,7 +1196,7 @@ pkg_manifest() (
# sed: Remove the first character in each line (./dir -> /dir) and
# remove all lines which only contain '.'.
find . -type d -exec printf '%s/\n' {} + -o -print |
- sort -r | sed '/^\.$/d;/^\.\/$/d;ss.ss' > "${2:-$pkg_dir}/$1/$pkg_db/$1/manifest"
+ sort -r | sed '/^\.\/*$/d;ss.ss' > "${2:-$pkg_dir}/$1/$pkg_db/$1/manifest"
)
pkg_etcsums() (
@@ -1017,23 +1226,12 @@ pkg_tar() {
log "$1" "Creating tarball"
# Read the version information to name the package.
- read -r version release < "$(pkg_find "$1")/version"
+ read -r version release < "$pkg_dir/$1/$pkg_db/$1/version"
# Create a tarball from the contents of the built package.
cd "$pkg_dir/$1"
- pax -w . |
- case $CPT_COMPRESS in
- bz2) bzip2 -z ;;
- xz) xz -zT 0 ;;
- gz) gzip -6 ;;
- zst) zstd -3 ;;
- lz) lzip -6 ;;
- *) gzip -6 ;; # Fallback to gzip
- esac \
- > "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS"
-
+ pax -w . | compress > "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS"
log "$1" "Successfully created tarball"
-
run_hook post-package "$1" "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS"
}
@@ -1052,6 +1250,7 @@ pkg_build() {
# separately from those detected as dependencies.
explicit="$explicit $pkg "
} done
+ pkg_depends_commit
[ "$pkg_update" ] || explicit_build=$explicit
@@ -1259,7 +1458,7 @@ pkg_checksums() {
pkg_verify() {
# Verify all package checksums. This is achieved by generating a new set of
# checksums and then comparing those with the old set.
- vcmd="NR==FNR{a[\$1];next}/^git .*/{next}!((\$1)in a){exit 1}"
+ vcmd="NR==FNR{a[\$1];next}/^git .*/{next}!((\$1)in a){exit 1}END{if(NR/2!=FNR)exit 1}"
for pkg; do
repo_dir=$(pkg_find "$pkg")
@@ -1286,6 +1485,9 @@ pkg_conflicts() {
# Check to see if a package conflicts with another.
log "$1" "Checking for package conflicts"
+ c_manifest=$(_tmp_create conflict-manifest)
+ c_conflicts=$(_tmp_create conflicts)
+
# Filter the tarball's manifest and select only files
# and any files they resolve to on the filesystem
# (/bin/ls -> /usr/bin/ls).
@@ -1305,9 +1507,9 @@ pkg_conflicts() {
# Combine the dirname and file values, and print them into the
# temporary manifest to be parsed.
- printf '%s/%s\n' "${dirname#$CPT_ROOT}" "${file##*/}"
+ printf '%s/%s\n' "${dirname#"$CPT_ROOT"}" "${file##*/}"
- done < "$tar_dir/$1/$pkg_db/$1/manifest" > "$CPT_TMPDIR/$pid/manifest"
+ done < "$tar_dir/$1/$pkg_db/$1/manifest" > "$c_manifest"
p_name=$1
@@ -1316,7 +1518,7 @@ pkg_conflicts() {
# shellcheck disable=2046,2086
set -- $(set +f; pop "$sys_db/$p_name/manifest" from "$sys_db"/*/manifest)
- [ -s "$CPT_TMPDIR/$pid/manifest" ] || return 0
+ [ -s "$c_manifest" ] || return 0
# In rare cases where the system only has one package installed
# and you are reinstalling that package, grep will try to read from
@@ -1332,12 +1534,12 @@ pkg_conflicts() {
# Store the list of found conflicts in a file as we will be using the
# information multiple times. Storing it in the cache dir allows us
# to be lazy as they'll be automatically removed on script end.
- sed '/\/$/d' "$@" | sort "$CPT_TMPDIR/$pid/manifest" - | uniq -d > "$CPT_TMPDIR/$pid/conflict" ||:
+ sed '/\/$/d' "$@" | sort "$c_manifest" - | uniq -d > "$c_conflicts" ||:
# Enable alternatives automatically if it is safe to do so.
# This checks to see that the package that is about to be installed
# doesn't overwrite anything it shouldn't in '/var/db/cpt/installed'.
- "$grep" -q "/var/db/cpt/installed/" "$CPT_TMPDIR/$pid/conflict" ||
+ "$grep" -q "/var/db/cpt/installed/" "$c_conflicts" ||
choice_auto=1
# Use 'grep' to list matching lines between the to
@@ -1388,13 +1590,13 @@ pkg_conflicts() {
log "this must be fixed in $p_name. Contact the maintainer"
die "by checking 'git log' or by running 'cpt-maintainer'"
}
- done < "$CPT_TMPDIR/$pid/conflict"
+ done < "$c_conflicts"
# Rewrite the package's manifest to update its location
# to its new spot (and name) in the choices directory.
pkg_manifest "$p_name" "$tar_dir" 2>/dev/null
- elif [ -s "$CPT_TMPDIR/$pid/conflict" ]; then
+ elif [ -s "$c_conflicts" ]; then
log "Package '$p_name' conflicts with another package" "" "!>"
log "Run 'CPT_CHOICE=1 cpt i $p_name' to add conflicts" "" "!>"
die "as alternatives."
@@ -1427,15 +1629,22 @@ pkg_swap() {
# its manifest file to reflect this. We then resort this file
# so no issues arise when removing packages.
cp -Pf "$CPT_ROOT/$2" "$pkg_owns>${alt#*>}"
- sed "s#^$(regesc "$2")\$#${PWD#$CPT_ROOT}/$pkg_owns>${alt#*>}#" \
+ sed "s#^$(regesc "$2")\$#${PWD#"$CPT_ROOT"}/$pkg_owns>${alt#*>}#" \
"../installed/$pkg_owns/manifest" |
sort -r -o "../installed/$pkg_owns/manifest"
+ else
+ # If the file doesn't exist, we assume that there was a previous owner,
+ # but the package was then removed. We want the message to be short
+ # and clear, I thought of writing "Swapping [...] from 'null' to '$1'",
+ # but that would probably sound more like a package manager bug. Instead
+ # we are printing the message below which should be informative enough.
+ log "Installing '$2' from '$1'"
fi
# Convert the desired alternative to a real file and rewrite
# the manifest file to reflect this. The reverse of above.
mv -f "$alt" "$CPT_ROOT/$2"
- sed "s#^${PWD#$CPT_ROOT}/$(regesc "$alt")\$#$2#" "../installed/$1/manifest" |
+ sed "s#^${PWD#"$CPT_ROOT"}/$(regesc "$alt")\$#$2#" "../installed/$1/manifest" |
sort -r -o "../installed/$1/manifest"
}
@@ -1449,13 +1658,13 @@ pkg_etc() {
mkdir -p "$CPT_ROOT/$dir"
done
- digest=$(_get_digest "$mak_dir/c") || digest=b3sum
+ digest=$(_get_digest "$_etcsums") || digest=b3sum
# Handle files in /etc/ based on a 3-way checksum check.
find etc ! -type d | while read -r file; do
{ sum_new=$("$digest" "$file")
sum_sys=$(cd "$CPT_ROOT/"; "$digest" "$file")
- sum_old=$("$grep" "$file$" "$mak_dir/c"); } 2>/dev/null ||:
+ sum_old=$("$grep" "$file$" "$_etcsums"); } 2>/dev/null ||:
logv "$pkg_name" "Doing 3-way handshake for $file"
outv "Previous: ${sum_old:-null}"
@@ -1520,10 +1729,11 @@ pkg_remove() {
# remove anything from packages that create empty directories for a
# purpose (such as baselayout).
manifest_list="$(set +f; pop "$sys_db/$1/manifest" from "$sys_db/"*/manifest)"
+ dirs="$(_tmp_name "directories")"
# shellcheck disable=2086
- [ "$manifest_list" ] && grep -h '/$' $manifest_list | sort -ur > "$mak_dir/dirs"
+ [ "$manifest_list" ] && grep -h '/$' $manifest_list | sort -ur > "$dirs"
- run_hook pre-remove "$1" "$sys_db/$1" root
+ run_hook pre-remove "$1" "$sys_db/$1"
while read -r file; do
# The file is in '/etc' skip it. This prevents the package
@@ -1531,7 +1741,7 @@ pkg_remove() {
[ "${file##/etc/*}" ] || continue
if [ -d "$CPT_ROOT/$file" ]; then
- "$grep" -Fxq "$file" "$mak_dir/dirs" 2>/dev/null && continue
+ "$grep" -Fxq "$file" "$dirs" 2>/dev/null && continue
rmdir "$CPT_ROOT/$file" 2>/dev/null || continue
else
rm -f "$CPT_ROOT/$file"
@@ -1542,7 +1752,7 @@ pkg_remove() {
# we no longer need to block 'Ctrl+C'.
trap_set cleanup
- run_hook post-remove "$1" "$CPT_ROOT/" root
+ run_hook post-remove "$1" "$CPT_ROOT/"
log "$1" "Removed successfully"
}
@@ -1605,7 +1815,7 @@ pkg_install() {
[ "$install_dep" ] && die "$1" "Package requires ${install_dep%, }"
- run_hook pre-install "$pkg_name" "$tar_dir/$pkg_name" root
+ run_hook pre-install "$pkg_name" "$tar_dir/$pkg_name"
pkg_conflicts "$pkg_name"
log "$pkg_name" "Installing package incrementally"
@@ -1617,8 +1827,8 @@ pkg_install() {
# If the package is already installed (and this is an upgrade) make a
# backup of the manifest and etcsums files.
- cp -f "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null ||:
- cp -f "$sys_db/$pkg_name/etcsums" "$mak_dir/c" 2>/dev/null ||:
+ _manifest=$(_tmp_cp "$sys_db/$pkg_name/manifest" 2>/dev/null) ||:
+ _etcsums=$(_tmp_cp "$sys_db/$pkg_name/etcsums" 2>/dev/null) ||:
# This is repeated multiple times. Better to make it a function.
pkg_rsync() {
@@ -1633,7 +1843,7 @@ pkg_install() {
pkg_etc
# Remove any leftover files if this is an upgrade.
- "$grep" -vFxf "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null |
+ "$grep" -vFxf "$sys_db/$pkg_name/manifest" "$_manifest" 2>/dev/null |
while read -r file; do
file=$CPT_ROOT/$file
@@ -1670,7 +1880,7 @@ pkg_install() {
"$sys_db/$pkg_name/post-install" ||:
fi
- run_hook post-install "$pkg_name" "$sys_db/$pkg_name" root
+ run_hook post-install "$pkg_name" "$sys_db/$pkg_name"
log "$pkg_name" "Installed successfully"
}
@@ -1679,7 +1889,7 @@ pkg_repository_update() {
# Function to update the given package repository.
cd "$1"
repo_type=$(pkg_vcs_info)
- repo_root=${repo_type#$PWD:}
+ repo_root=${repo_type#"$PWD":}
repo_type=${repo_type##*:} repo_root=${repo_root%:*}
contains "$repos" "$repo_root" || {
repos="$repos $repo_root "
@@ -1706,7 +1916,7 @@ pkg_vcs_clone_git() {
git init
git remote add origin "${1%[#@]*}"
case $2 in
- @*) git fetch -t --depth=1 origin "${2#@}" || git fetch ;;
+ @*) git fetch -t --depth=1 origin "${2#@}" || git fetch; set -- "$1" "${2#@}" ;;
*) git fetch --depth=1 origin "$2" || git fetch
esac
git checkout "${2:-FETCH_HEAD}"
@@ -1905,7 +2115,12 @@ pkg_updates(){
# an update.
[ "$CPT_FETCH" = 0 ] || pkg_fetch
- log "Checking for new package versions"
+ # Be quiet if we are doing self update, no need to print the same
+ # information twice. We add this basic function, because we will be using it
+ # more than once.
+ _not_update () { [ "$cpt_self_update" ] || "$@" ;}
+
+ _not_update log "Checking for new package versions"
set +f
@@ -1919,7 +2134,7 @@ pkg_updates(){
# Compare installed packages to repository packages.
[ "$db_ver-$db_rel" != "$re_ver-$re_rel" ] && {
- printf '%s\n' "$pkg_name $db_ver-$db_rel ==> $re_ver-$re_rel"
+ _not_update printf '%s\n' "$pkg_name $db_ver-$db_rel ==> $re_ver-$re_rel"
outdated="$outdated$pkg_name "
}
done
@@ -1940,6 +2155,13 @@ pkg_updates(){
exit 0
}
+ [ "$outdated" ] || {
+ log "Everything is up to date"
+ return
+ }
+
+ _not_update log "Packages to update: ${outdated% }"
+
contains "$outdated" cpt && {
log "Detected package manager update"
log "The package manager will be updated first"
@@ -1950,18 +2172,17 @@ pkg_updates(){
cpt-install cpt
log "Updated the package manager"
- log "Re-run 'cpt update' to update your system"
-
- exit 0
- }
-
- [ "$outdated" ] || {
- log "Everything is up to date"
- return
+ log "Re-executing the package manager to continue the update"
+
+ # We export this variable so that cpt knows it's running for the second
+ # time. We make the new process promptless, and we avoid fetching
+ # repositories. We are assuming that the user was already prompted once,
+ # and that their repositories are up to date, or they have also passed
+ # the '-y' or '-n' flags themselves which leads to the same outcome.
+ export cpt_self_update=1
+ exec cpt-update -yn
}
- log "Packages to update: ${outdated% }"
-
# Tell 'pkg_build' to always prompt before build.
pkg_update=1
@@ -1977,12 +2198,12 @@ pkg_updates(){
}
pkg_get_base() (
- # Print the packages defined in the /etc/cpt-base file.
+ # Print the packages defined in the CPT base file.
# If an argument is given, it prints a space seperated list instead
# of a list seperated by newlines.
- # cpt-base is an optional file, return with success if it doesn't exist.
- [ -f "$CPT_ROOT/etc/cpt-base" ] || return 0
+ # CPT base is an optional file, return with success if it doesn't exist.
+ [ -f "$cpt_base" ] || return 0
# If there is an argument, change the format to use spaces instead of
# newlines.
@@ -1993,13 +2214,20 @@ pkg_get_base() (
# subshell. That is our purpose here, thank you very much.
# shellcheck disable=SC2030
while read -r pkgname _; do
+ # Ignore comments
[ "${pkgname##\#*}" ] || continue
+
+ # Store the package list in arguments
set -- "$@" "$pkgname"
+
+ # Retrieve the dependency tree of the package, so they are listed as
+ # base packages too. This ensures that no packages are broken in a
+ # "base reset", and the user has a working base.
deps=$(pkg_gentree "$pkgname" xn)
for dep in $deps; do
contains "$*" "$dep" || set -- "$@" "$dep"
done
- done < "$CPT_ROOT/etc/cpt-base"
+ done < "$cpt_base"
# Format variable is intentional.
# shellcheck disable=2059
@@ -2024,6 +2252,7 @@ pkg_gentree() (
esac
done
pkg_depends "$1" tree "$make_deps"
+ pkg_depends_commit
# Unless 'f' is given, pop the package from the list so that we don't list
# the package (for example if it's part of the base package list). Normally
@@ -2035,7 +2264,9 @@ pkg_gentree() (
# shellcheck disable=2086
[ -z "${2##*f*}" ] || deps=$(pop "$1" from $deps)
- eval set -- "$deps"
+ # Word splitting is intentional.
+ # shellcheck disable=2086
+ set -- $deps
pkg_order "$@"
if [ "$reverse" ]; then eval set -- "$redro"; else eval set -- "$order"; fi
[ "$1" ] || return 0
@@ -2074,12 +2305,35 @@ pkg_clean() {
rm -rf -- "${CPT_TMPDIR:=$cac_dir/proc}/$pid"
}
+_tmp_name() {
+ # Name a temporary file/directory
+ out "$tmp_dir/$1"
+}
+
+_tmp_cp() {
+ # Copy given file to the temporary directory and return its name. If a
+ # second argument is not given, use the basename of the copied file.
+ _ret=${2:-${1##*/}}
+ _ret=$(_tmp_name "$_ret")
+ cp -p "$1" "$_ret"
+ out "$_ret"
+}
+
+_tmp_create() {
+ # Create given file to the temporary directory and return its name
+ create_tmp
+ _ret=$(_tmp_name "$1")
+ :> "$_ret" || return 1
+ out "$_ret"
+}
+
create_tmp() {
# Create the required temporary directories and set the variables which
# point to them.
- mkdir -p "${mak_dir:=$tmp_dir/build}" \
- "${pkg_dir:=$tmp_dir/pkg}" \
- "${tar_dir:=$tmp_dir/export}"
+ mak_dir=$tmp_dir/build
+ pkg_dir=$tmp_dir/pkg
+ tar_dir=$tmp_dir/export
+ mkdir -p "$mak_dir" "$pkg_dir" "$tar_dir"
}
create_cache() {
@@ -2093,6 +2347,9 @@ create_cache() {
{
set -ef
+ # Package manager version.
+ cpt_version=@VERSION@
+
# If a parser definition exists, let's run it ourselves. This makes sure we
# get the variables as soon as possible.
command -v parser_definition >/dev/null && {
@@ -2107,25 +2364,34 @@ create_cache() {
# that it doesn't change beneath us.
pid=${CPT_PID:-$$}
- # Create the cache directories for CPT and set the variables which point
- # to them. This is seperate from temporary directories created in
- # create_cache(). That's because we need these variables set on most
- # occasions.
- #
# A temporary directory can be specified apart from the cache directory in
# order to build in a user specified directory. /tmp could be used in order
- # to build on ram, useful on SSDs. The user can specify CPT_TMPDIR for this.
- # We create the temporary directory here to avoid permission issues that can
- # arise from functions that call as_root().
- mkdir -p "${cac_dir:=${CPT_CACHE:=${XDG_CACHE_HOME:-$HOME/.cache}/cpt}}" \
- "${CPT_TMPDIR:=$cac_dir/proc}" \
- "${src_dir:=$cac_dir/sources}" \
- "${log_dir:=$cac_dir/logs}" \
- "${bin_dir:=$cac_dir/bin}"
-
- # We don't create the temporary $pid directory until `create_tmp()` is
- # called, but we still declare its variable here.
- : "${tmp_dir:=${CPT_TMPDIR:=$cac_dir/proc}/$pid}"
+ # to build on ram, useful on SSDs. The user can specify $CPT_TMPDIR for
+ # this. We now also support the usage of $XDG_RUNTIME_DIR, so the directory
+ # naming can be confusing to some. Here are possible $tdir names (by order
+ # of preference):
+ #
+ # 1. $CPT_TMPDIR
+ # 2. $XDG_RUNTIME_DIR/cpt
+ # 3. $XDG_CACHE_DIR/cpt/proc
+ # 4. $HOME/.cache/cpt/proc
+ #
+ # We create the main temporary directory here to avoid permission issues
+ # that can arise from functions that call as_root(). However, the
+ # $pid directories are special for each process and aren't created unless
+ # `create_tmp()` is used.
+ #
+ # We used to assign and create the directories at the same time using a
+ # shell hack, but it made the variables editable outside of the package
+ # manager, but we don't actually want that. Variables that are lower case
+ # aren't meant to be interacted or set by the user.
+ cac_dir=${CPT_CACHE:=${XDG_CACHE_HOME:-${HOME:?}/.cache}}/cpt
+ src_dir=$cac_dir/sources
+ log_dir=$cac_dir/logs
+ bin_dir=$cac_dir/bin
+ tdir=${CPT_TMPDIR:=${XDG_RUNTIME_DIR:-$cac_dir/proc}${XDG_RUNTIME_DIR:+/cpt}}
+ tmp_dir=$tdir/$pid
+ mkdir -p "$cac_dir" "$src_dir" "$log_dir" "$bin_dir" "$tdir"
# Set the location to the repository and package database.
pkg_db=var/db/cpt/installed
@@ -2181,10 +2447,7 @@ create_cache() {
# Make sure that the CPT_ROOT doesn't end with a '/'. This might
# break some operations.
- [ -z "$CPT_ROOT" ] || [ "${CPT_ROOT##*/}" ] || {
- warn "" "Your CPT_ROOT variable shouldn't end with '/'"
- CPT_ROOT=${CPT_ROOT%/}
- }
+ CPT_ROOT=${CPT_ROOT%"${CPT_ROOT##*[!/]}"}
# Define an optional sys_arch variable in order to provide
# information to build files with architectural information.
@@ -2194,6 +2457,13 @@ create_cache() {
# the get go. It will be created as needed by package installation.
sys_db=$CPT_ROOT/$pkg_db
+ # CPT system configuration directory
+ cpt_confdir=$CPT_ROOT@SYSCONFDIR@/cpt
+
+ # Backwards compatibility for the old cpt-base location
+ cpt_base=$CPT_ROOT/etc/cpt-base
+ [ -f "$cpt_confdir/base" ] && cpt_base=$cpt_confdir/base
+
# Regular expression used in pkg_checksums() and pkg_sources() in order to
# identify VCS and comments
re_vcs_or_com='^(#|(fossil|git|hg)\+)'
@@ -2202,16 +2472,17 @@ create_cache() {
# do nothing on a normal system.
mkdir -p "$CPT_ROOT/" 2>/dev/null ||:
- # Set a value for CPT_COMPRESS if it isn't set.
- : "${CPT_COMPRESS:=gz}"
-
- # Unless being piped or the user specifically doesn't want colors, set
- # colors. This can of course be overriden if the user specifically want
- # colors during piping.
- if { [ "$CPT_COLOR" != 0 ] && [ -t 1 ] ;} || [ "$CPT_COLOR" = 1 ]; then
- colory="\033[1;33m" colorb="\033[1;34m" colre="\033[m"
- fi
+ # Set the default compression to gzip, and warn the user if the value is
+ # invalid.
+ case ${CPT_COMPRESS:=gz} in
+ bz2|gz|xz|zst|lz) ;;
+ *) warn "'$CPT_COMPRESS' is not a valid CPT_COMPRESS value, falling back to 'gz'"
+ CPT_COMPRESS=gz
+ esac
+ # Set colors if they are to be enabled.
+ # shellcheck disable=2034
+ colors_enabled && colory="\033[1;33m" colorb="\033[1;34m" colre="\033[m" colbold="\033[1m"
}
# If the library is being called with its own name, run arguments.