From c72c2cc00b2a9e2c7ce7ba0ff22908c209f24822 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 08:13:57 +0000 Subject: try out the redo build system FossilOrigin-Name: 6ca0330a7e548be4e516ad6c275b2878a1521d2de5263fd75defc16f8f52c93b --- .gitignore | 11 +++++++++- all.do | 2 ++ bin/all.do | 2 ++ bin/clean.do | 2 ++ clean.do | 5 +++++ config.rc | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ default.do | 42 ++++++++++++++++++++++++++++++++++++++ dist.do | 3 +++ 8 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 all.do create mode 100644 bin/all.do create mode 100644 bin/clean.do create mode 100644 clean.do create mode 100644 config.rc create mode 100644 default.do create mode 100644 dist.do diff --git a/.gitignore b/.gitignore index 6a09127..a4f8bc0 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,13 @@ getopt [._]*.sw[a-p] [._]s[a-rt-v][a-z] [._]ss[a-gi-z] -[._]sw[a-p] \ No newline at end of file +[._]sw[a-p] + +### Redo files +/.redo +/.do_built +/.do_built.dir +*.tmp +*.did +.dep* +.target* diff --git a/all.do b/all.do new file mode 100644 index 0000000..60dfc91 --- /dev/null +++ b/all.do @@ -0,0 +1,2 @@ +. ./config.rc +redo-ifchange bin/all doc/cpt.info diff --git a/bin/all.do b/bin/all.do new file mode 100644 index 0000000..aceda74 --- /dev/null +++ b/bin/all.do @@ -0,0 +1,2 @@ +. ../config.rc +redo-ifchange cpt-readlink cpt-stat diff --git a/bin/clean.do b/bin/clean.do new file mode 100644 index 0000000..6234248 --- /dev/null +++ b/bin/clean.do @@ -0,0 +1,2 @@ +. ../config.rc +rm -f -- ./*.o cpt-readlink cpt-stat .dep.* diff --git a/clean.do b/clean.do new file mode 100644 index 0000000..389daf3 --- /dev/null +++ b/clean.do @@ -0,0 +1,5 @@ +. ./config.rc +redo bin/clean +redo_clean +rm -f "cpt-$VERSION.tar.xz" +find doc -name '*.info' -exec rm -f -- {} + diff --git a/config.rc b/config.rc new file mode 100644 index 0000000..d1b2281 --- /dev/null +++ b/config.rc @@ -0,0 +1,66 @@ +# -*- mode: redo -*- +# See LICENSE for copyright information + +setv() { + # Set variables if unset. Works similar to the Makefile syntax. + [ "$3" ] || { + printf '%s\n' "Faulty variable syntax" >&2 + exit 1 + } + var=$1; sym=$2; shift 2 + case "$sym" in + \?=|=) eval "[ \"\$$var\" ]" || export "$var=$*" ;; + +=) eval "export \"$var=\$$var $*\"" + esac +} + +redo_clean() { + # Clean function for various redo implementations + [ -r .do_built ] && { + while read -r file; do + [ -d "$file" ] || rm -f "$file" + done < .do_built + } + find . -type f \( -name '*.tmp' -o -name '*.did' -o -name '.dep*' -o -name '.target*' \) \ + -exec rm -f -- {} + + [ "$DO_BUILT" ] || find . -name '.do_built*' -exec rm -rf -- {} + + [ "$REDO_BASE" ] || rm -rf -- .redo +} + +PHONY() { + # Function that resembles the .PHONY: target on the classic 'make' build + # system. + [ "$1" ] || { + trap 'rm -f $3' EXIT INT + return 0 + } + setv PHONY += "$@" + trap 'case " $PHONY " in *" $1 "*) rm -f $3; esac' EXIT INT +} + +setv VERSION = 5.1.0 + +# Paths +setv PREFIX = /usr/local +setv BINDIR = "${PREFIX}/bin" +setv SHAREDIR = "${PREFIX}/share" +setv DOCDIR = "${SHAREDIR}/doc" +setv CPTDOC = "${DOCDIR}/cpt" +setv MANPREFIX = "${SHAREDIR}/man" +setv MAN1 = "${MANPREFIX}/man1" + +# Flags +setv CFLAGS = -std=c99 -Wpedantic -Wall -Os +setv CFLAGS += -D_XOPEN_SOURCE=700 +setv LDFLAGS = -s -static +setv LIBS = -lc + +setv CC = cc +setv LD = "${CC}" + +# Documentation tools +setv EMACS = emacs +setv MAKEINFO = makeinfo + +# Phony targets +PHONY all dist clean install uninstall test diff --git a/default.do b/default.do new file mode 100644 index 0000000..2d091c6 --- /dev/null +++ b/default.do @@ -0,0 +1,42 @@ +. ./config.rc + +# Extensionless name of file +fn="${1%.*}" + +case "$1" in + bin/cpt-readlink|bin/cpt-stat) + redo-ifchange "$1.o" + "$CC" -o "$3" $LDFLAGS "$1.o" $LIBS + ;; + *.o) + [ -f "${1%.o}.c" ] || exit 99 + redo-ifchange "$fn.c" + "$CC" -c -o "$3" $CFLAGS "$fn.c" + ;; + *.info) + redo-ifchange "$fn.texi" + $MAKEINFO "$fn.texi" -o "$3" + ;; + *.texi) + [ -f "$fn.org" ] || exit 99 + redo-ifchange "$fn.org" + $EMACS "$fn.org" --batch -f org-texinfo-export-to-texinfo + mv "$1" "$3" + ;; + "cpt-$VERSION.tar.xz") + redo doc/cpt.info + rm -rf -- "cpt-$VERSION" + find . -type f ! -name '.*' ! -path './.*' | + while read -r file; do + mkdir -p "cpt-$VERSION/${file%/*}" + cp "$file" "cpt-$VERSION/$file" + done + tar cf "cpt-$VERSION.tar" "cpt-$VERSION" + xz -z "cpt-$VERSION.tar" + rm -rf -- "cpt-$VERSION" + mv "$1" "$3" + ;; + *) + echo "Unknown target $1" + exit 99 +esac diff --git a/dist.do b/dist.do new file mode 100644 index 0000000..c7f21f1 --- /dev/null +++ b/dist.do @@ -0,0 +1,3 @@ +. ./config.rc +redo clean +redo "cpt-$VERSION.tar.xz" -- cgit v1.2.3 From b38c7fd3760994ef8321c8d807b4ebca9a674d0a Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 09:37:03 +0000 Subject: cpt-lib: move to cpt-lib.in FossilOrigin-Name: ffb46492fd34c44bd9d422c3e5febec38f8938901d339e5f952cac5c91274ee7 --- src/cpt-lib | 1874 -------------------------------------------------------- src/cpt-lib.in | 1874 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 1874 insertions(+), 1874 deletions(-) delete mode 100644 src/cpt-lib create mode 100644 src/cpt-lib.in diff --git a/src/cpt-lib b/src/cpt-lib deleted file mode 100644 index c75fbcf..0000000 --- a/src/cpt-lib +++ /dev/null @@ -1,1874 +0,0 @@ -#!/bin/sh -ef -# shellcheck source=/dev/null -# -# This is the Carbs Packaging Toolchain written for Carbs Linux. -# It was originally forked from the kiss package manager by -# Dylan Araps. -# -# Currently maintained by Cem Keylan. - -version() { - log "Carbs Packaging Tools" 5.1.1 - exit 0 -} - -out() { - # Print a message as is. - printf '%s\n' "$@" -} - -log() { - # Print a message prettily. - # - # All messages are printed to stderr to allow the user to hide build - # output which is the only thing printed to stdout. - # - # '${3:-->}': If the 3rd argument is missing, set prefix to '->'. - # '${2:+colorb}': If the 2nd argument exists, set text style of '$1'. - printf '%b%s %b%b%s%b %s\n' \ - "$colory" "${3:-->}" "$colre" "${2:+$colorb}" "$1" "$colre" "$2" >&2 -} - -die() { - # Print a message and exit with '1' (error). - log "$1" "$2" "!>" - exit 1 -} - -trap_set() { - # Function to set the trap value. - case ${1:-cleanup} in - cleanup) trap pkg_clean EXIT INT ;; - block) trap '' INT ;; - unset) trap - EXIT INT ;; - esac -} - -# This is the public domain getoptions shell library. It also forms a usage -# function. -# URL: https://github.com/ko1nksm/getoptions (v2.0.1) -# License: Creative Commons Zero v1.0 Universal -# shellcheck disable=2016 -getoptions() { - _error='' _on=1 _off='' _export='' _plus='' _mode='' _alt='' _rest='' - _opts='' _help='' _indent='' _init=@empty IFS=' ' - - for i in 0 1 2 3 4 5; do - eval "_$i() { echo \"$_indent\$@\"; }" - _indent="$_indent " - done - - quote() { - q="$2'" r='' - while [ "$q" ]; do r="$r${q%%\'*}'\''" && q=${q#*\'}; done - q="'${r%????}'" && q=${q#\'\'} && q=${q%\'\'} - eval "$1=\${q:-\"''\"}" - } - code() { - [ "${1#:}" = "$1" ] && c=3 || c=4 - eval "[ ! \${$c:+x} ] || $2 \"\$$c\"" - } - - invoke() { eval '"_$@"'; } - prehook() { invoke "$@"; } - for i in setup flag param option disp msg; do - eval "$i() { prehook $i \"\$@\"; }" - done - - args() { - on=$_on off=$_off export=$_export init=$_init _hasarg=$1 - while [ $# -gt 2 ] && [ "$3" != '--' ] && shift; do - case $2 in - -?) [ "$_hasarg" ] || _opts="$_opts${2#-}" ;; - +*) _plus=1 ;; - [!-+]*) eval "${2%%:*}=\${2#*:}" - esac - done - } - defvar() { - case $init in - @none) : ;; - @export) code "$1" _0 "export $1" ;; - @empty) code "$1" _0 "${export:+export }$1=''" ;; - @unset) code "$1" _0 "unset $1 ||:" "unset OPTARG ||:; ${1#:}" ;; - *) - case $init in @*) eval "init=\"=\${${init#@}}\""; esac - case $init in [!=]*) _0 "$init"; return 0; esac - quote init "${init#=}" - code "$1" _0 "${export:+export }$1=$init" "OPTARG=$init; ${1#:}" - esac - } - _setup() { - [ $# -gt 0 ] && { [ "$1" ] && _rest=$1; shift; } - for i; do [ "$i" = '--' ] && break; eval "_${i%%:*}=\${i#*:}"; done - } - _flag() { args : "$@"; defvar "$@"; } - _param() { args '' "$@"; defvar "$@"; } - _option() { args '' "$@"; defvar "$@"; } - _disp() { args : "$@"; } - _msg() { args : _ "$@"; } - - "$@" - _0 "${_rest:?}=''" - - args() { - sw='' validate='' pattern='' counter='' on=$_on off=$_off export=$_export - while [ $# -gt 1 ] && [ "$2" != '--' ] && shift; do - case $1 in - --\{no-\}*) sw="$sw${sw:+ | }--${1#--?no-?} | --no-${1#--?no-?}" ;; - [-+]? | --*) sw="$sw${sw:+ | }$1" ;; - *) eval "${1%%:*}=\"\${1#*:}\"" - esac - done - } - setup() { :; } - _flag() { - args "$@" - quote on "$on" && quote off "$off" - [ "$counter" ] && on=1 off=-1 v="\$((\${$1:-0}+\${OPTARG:-0}))" || v='' - _3 "$sw)" - _4 '[ "${OPTARG:-}" ] && OPTARG=${OPTARG#*\=} && set -- noarg "$1" && break' - _4 "eval '[ \${OPTARG+x} ] &&:' && OPTARG=$on || OPTARG=$off" - valid "$1" "${v:-\$OPTARG}" - _4 ';;' - } - _param() { - args "$@" - _3 "$sw)" - _4 '[ $# -le 1 ] && set -- required "$1" && break' - _4 'OPTARG=$2' - valid "$1" '$OPTARG' - _4 'shift ;;' - } - _option() { - args "$@" - quote on "$on" && quote off "$off" - _3 "$sw)" - _4 'set -- "$1" "$@"' - _4 '[ ${OPTARG+x} ] && {' - _5 'case $1 in --no-*) set -- noarg "${1%%\=*}"; break; esac' - _5 '[ "${OPTARG:-}" ] && { shift; OPTARG=$2; } ||' "OPTARG=$on" - _4 "} || OPTARG=$off" - valid "$1" '$OPTARG' - _4 'shift ;;' - } - valid() { - set -- "$validate" "$pattern" "$1" "$2" - [ "$1" ] && _4 "$1 || { set -- ${1%% *}:\$? \"\$1\" $1; break; }" - [ "$2" ] && { - quote pattern "$2" - _4 "case \$OPTARG in $2) ;;" - _5 "*) set -- pattern:$pattern \"\$1\"; break" - _4 "esac" - } - code "$3" _4 "${export:+export }$3=\"$4\"" "${3#:}" - } - _disp() { - args "$@" - _3 "$sw)" - code "$1" _4 "echo \"\${$1}\"" "${1#:}" - _4 'exit 0 ;;' - } - _msg() { :; } - - _0 "$2() {" - _1 'OPTIND=$(($#+1))' - _1 'while OPTARG= && [ $# -gt 0 ]; do' - [ "$_alt" ] && _2 'case $1 in -[!-]?*) set -- "-$@"; esac' - _2 'case $1 in' - wa() { _4 "eval '${1% *}' \${1+'\"\$@\"'}"; } - _3 '--?*=*) OPTARG=$1; shift' - wa 'set -- "${OPTARG%%\=*}" "${OPTARG#*\=}" "$@"' - _4 ';;' - _3 '--no-*) unset OPTARG ;;' - [ "$_alt" ] || { - [ "$_opts" ] && { - _3 "-[$_opts]?*) OPTARG=\$1; shift" - wa 'set -- "${OPTARG%"${OPTARG#??}"}" "${OPTARG#??}" "$@"' - _4 ';;' - } - _3 '-[!-]?*) OPTARG=$1; shift' - wa 'set -- "${OPTARG%"${OPTARG#??}"}" "-${OPTARG#??}" "$@"' - _4 'OPTARG= ;;' - } - [ "$_plus" ] && { - _3 '+??*) OPTARG=$1; shift' - wa 'set -- "${OPTARG%"${OPTARG#??}"}" "+${OPTARG#??}" "$@"' - _4 'unset OPTARG ;;' - _3 '+*) unset OPTARG ;;' - } - _2 'esac' - _2 'case $1 in' - "$@" - rest() { - _3 "$1" - _4 'while [ $# -gt 0 ]; do' - _5 "$_rest=\"\${$_rest}" '\"\${$((${OPTIND:-0}-$#))}\""' - _5 'shift' - _4 'done' - _4 'break ;;' - } - rest '--) shift' - _3 "[-${_plus:++}]?*)" 'set -- unknown "$1" && break ;;' - case $_mode in - +) rest '*)' ;; - *) _3 "*) $_rest=\"\${$_rest}" '\"\${$((${OPTIND:-0}-$#))}\""' - esac - _2 'esac' - _2 'shift' - _1 'done' - _1 '[ $# -eq 0 ] && { OPTIND=1; unset OPTARG; return 0; }' - _1 'case $1 in' - _2 'unknown) set -- "Unrecognized option: $2" "$@" ;;' - _2 'noarg) set -- "Does not allow an argument: $2" "$@" ;;' - _2 'required) set -- "Requires an argument: $2" "$@" ;;' - _2 'pattern:*) set -- "Does not match the pattern (${1#*:}): $2" "$@" ;;' - _2 '*) set -- "Validation error ($1): $2" "$@"' - _1 'esac' - [ "$_error" ] && _1 "$_error" '"$@" >&2 || exit $?' - _1 'echo "$1" >&2' - _1 'exit 1' - _0 '}' - - # This throws an error on older versions of shellcheck. - # shellcheck disable=2086 - [ ! "$_help" ] || eval "shift 2; getoptions_help $1 $_help" ${3+'"$@"'} -} -# URL: https://github.com/ko1nksm/getoptions (v2.0.1) -# License: Creative Commons Zero v1.0 Universal -getoptions_help() { - width=30 plus='' leading=' ' - - pad() { p=$2; while [ ${#p} -lt "$3" ]; do p="$p "; done; eval "$1=\$p"; } - - args() { - _type=$1 var=${2%% *} sw='' label='' hidden='' _width=$width && shift 2 - while [ $# -gt 0 ] && i=$1 && shift && [ ! "$i" = '--' ]; do - case $i in - --*) pad sw "$sw${sw:+, }" $((${plus:+4}+4)); sw="$sw$i" ;; - -?) sw="$sw${sw:+, }$i" ;; - +?) [ ! "$plus" ] || { pad sw "$sw${sw:+, }" 4; sw="$sw$i"; } ;; - *) eval "${i%%:*}=\${i#*:}" - esac - done - [ "$hidden" ] && return 0 - - [ "$label" ] || case $_type in - setup | msg) label='' _width=0 ;; - flag | disp) label="$sw " ;; - param) label="$sw $var " ;; - option) label="${sw}[=$var] " - esac - pad label "${label:+$leading}$label" "$_width" - [ ${#label} -le "$_width" ] && [ $# -gt 0 ] && label="$label$1" && shift - echo "$label" - pad label '' "$_width" - for i; do echo "$label$i"; done - } - - for i in 'setup :' flag param option disp 'msg :'; do - eval "${i% *}() { args $i \"\$@\"; }" - done - - echo "$2() {" - echo "cat<<'GETOPTIONSHERE'" - "$@" - echo "GETOPTIONSHERE" - echo "}" -} - -global_options() { - msg -- '' 'Global Options:' - flag CPT_FORCE -f --force init:@export -- "Force operation" - flag CPT_PROMPT -y --no-prompt on:0 off:0 init:@export -- "Do not prompt for confirmation" - param CPT_ROOT --root init:@export -- "Use an alternate root directory" - disp :usage -h --help -- "Show this help message" - disp :version -v --version -- "Print version information" -} - -warn() { - # Print a warning message - log "$1" "$2" "${3:-WARNING}" -} - -contains() { - # Check if a "string list" contains a word. - case " $1 " in *" $2 "*) return 0; esac; return 1 -} - -regesc() { - # Escape special regular expression characters as - # defined in POSIX BRE. '$.*[\^' - printf '%s\n' "$1" | - sed 's|\\|\\\\|g;s|\[|\\[|g;s|\$|\\$|g;s|\.|\\.|g;s|\*|\\*|g;s|\^|\\^|g' -} - - -prompt() { - # If a CPT_NOPROMPT variable is set, continue. - # This can be useful for installation scripts and - # bootstrapping. - [ "$CPT_PROMPT" = 0 ] && return 0 - - # Ask the user for some input. - [ "$1" ] && log "$1" - log "Continue?: Press Enter to continue or Ctrl+C to abort here" - - # POSIX 'read' has none of the "nice" options like '-n', '-p' - # etc etc. This is the most basic usage of 'read'. - # '_' is used as 'dash' errors when no variable is given to 'read'. - read -r _ || return 1 -} - -as_root() { - # Simple function to run a command as root using either 'sudo', - # 'doas' or 'su'. Hurrah for choice. - [ "$uid" = 0 ] || log "Using '${su:-su}' (to become ${user:=root})" - - # We are exporting package manager variables, so that we still have the - # same repository paths / access to the same cache directories etc. - set -- HOME="$HOME" \ - USER="$user" \ - XDG_CACHE_HOME="$XDG_CACHE_HOME" \ - CPT_CACHE="$CPT_CACHE" \ - CPT_CHOICE="$CPT_CHOICE" \ - CPT_COMPRESS="$CPT_COMPRESS" \ - CPT_DEBUG="$CPT_DEBUG" \ - CPT_FETCH="$CPT_FETCH" \ - CPT_FORCE="$CPT_FORCE" \ - CPT_HOOK="$CPT_HOOK" \ - CPT_KEEPLOG="$CPT_KEEPLOG" \ - CPT_PATH="$CPT_PATH" \ - CPT_PID="$CPT_PID" \ - CPT_PROMPT="$CPT_PROMPT" \ - CPT_ROOT="$CPT_ROOT" \ - CPT_TMPDIR="$CPT_TMPDIR" \ - "$@" - - case ${su##*/} in - sudo|doas) "$su" -u "$user" -- env "$@" ;; - su) su -c "env $* <&3" "$user" 3<&0 /dev/null || - sha256 -r "$1" 2>/dev/null || - openssl dgst -r -sha256 "$1" || - die "No sha256 program could be run." ;} | - - while read -r hash _; do printf '%s %s\n' "$hash" "$1"; done -} - -pkg_owner() { - set +f - - [ "$3" ] || set -- "$1" "$2" "$sys_db"/*/manifest - - pkg_owner=$(grep "$@") - pkg_owner=${pkg_owner%/*} - pkg_owner=${pkg_owner##*/} - - set -f -- "$pkg_owner"; unset pkg_owner - [ "$1" ] && printf '%s\n' "$1" -} - -pkg_isbuilt() ( - # Check if a package is built or not. - read -r ver rel < "$(pkg_find "$1")/version" - - set +f - for tarball in "$bin_dir/$1#$ver-$rel.tar."*; do - [ -f "$tarball" ] && return 0 - done - return 1 -) - -pkg_lint() { - # Check that each mandatory file in the package entry exists. - log "$1" "Checking repository files" - - repo_dir=$(pkg_find "$1") - - cd "$repo_dir" || die "'$repo_dir' not accessible" - [ -f sources ] || warn "$1" "Sources file not found" - [ -x build ] || die "$1" "Build file not found or not executable" - [ -s version ] || die "$1" "Version file not found or empty" - - read -r _ release 2>/dev/null < version || die "Version file not found" - [ "$release" ] || die "Release field not found in version file" - - [ "$2" ] || [ -f checksums ] || die "$pkg" "Checksums are missing" -} - -pkg_find() { - # Use a SEARCH_PATH variable so that we can get the sys_db into - # the same variable as CPT_PATH. This makes it easier when we are - # searching for executables instead of CPT_PATH. - : "${SEARCH_PATH:=$CPT_PATH:$sys_db}" - - # Figure out which repository a package belongs to by - # searching for directories matching the package name - # in $CPT_PATH/*. - query=$1 match=$2 type=$3 IFS=:; set -- - - # Word splitting is intentional here. - # shellcheck disable=2086 - for path in $SEARCH_PATH ; do - set +f - - for path2 in "$path/"$query; do - test "${type:--d}" "$path2" && set -f -- "$@" "$path2" - done - done - - IFS=$old_ifs - - # A package may also not be found due to a repository not being - # readable by the current user. Either way, we need to die here. - [ "$1" ] || die "Package '$query' not in any repository" - - # Show all search results if called from 'cpt search', else - # print only the first match. - [ "$match" ] && printf '%s\n' "$@" || printf '%s\n' "$1" -} - -pkg_list() { - # List installed packages. As the format is files and - # directories, this just involves a simple for loop and - # file read. - - # Change directories to the database. This allows us to - # avoid having to 'basename' each path. If this fails, - # set '$1' to mimic a failed glob which indicates that - # nothing is installed. - cd "$sys_db" 2>/dev/null || set -- "$sys_db/"\* - - # Optional arguments can be passed to check for specific - # packages. If no arguments are passed, list all. As we - # loop over '$@', if there aren't any arguments we can - # just set the directory contents to the argument list. - [ "$1" ] || { set +f; set -f -- *; } - - # If the 'glob' above failed, exit early as there are no - # packages installed. - [ "$1" = "$sys_db/"\* ] && return 1 - - # Loop over each package and print its name and version. - for pkg do - [ -d "$pkg" ] || { log "$pkg" "not installed"; return 1; } - - read -r version 2>/dev/null < "$pkg/version" || version=null - printf '%s\n' "$pkg $version" - done -} - -pkg_cache() { - read -r version release 2>/dev/null < "$(pkg_find "$1")/version" - - # Initially assume that the package tarball is built with the CPT_COMPRESS - # value. - if [ -f "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" ]; then - tar_file="$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" - else - set +f; set -f -- "$bin_dir/$1#$version-$release.tar."* - tar_file=$1 - fi - - [ -f "$tar_file" ] -} - -pkg_sources() { - # Download any remote package sources. The existence of local - # files is also checked. - repo_dir=$(pkg_find "$1") - - # Support packages without sources. Simply do nothing. - [ -f "$repo_dir/sources" ] || return 0 - - log "$1" "Downloading sources" - - # Store each downloaded source in a directory named after the - # package it belongs to. This avoid conflicts between two packages - # having a source of the same name. - mkdir -p "$src_dir/$1" && cd "$src_dir/$1" - - repo_dir=$(pkg_find "$1") - - while read -r src dest || [ "$src" ]; do - # Remote git/hg repository or comment. - if [ -z "${src##\#*}" ] || - [ -z "${src##git+*}" ] || - [ -z "${src##hg+*}" ] - - then : - - # Remote source (cached). - elif [ -f "${src##*/}" ]; then - log "$1" "Found cached source '${src##*/}'" - - # Remote source. - elif [ -z "${src##*://*}" ]; then - log "$1" "Downloading $src" - - curl "$src" -fLo "${src##*/}" || { - rm -f "${src##*/}" - die "$1" "Failed to download $src" - } - - # Local source. - elif [ -f "$repo_dir/$src" ]; then - log "$1" "Found local file '$src'" - - else - die "$1" "No local file '$src'" - fi - done < "$repo_dir/sources" -} - -pkg_extract() { - # Extract all source archives to the build directory and copy over - # any local repository files. - repo_dir=$(pkg_find "$1") - - # Support packages without sources. Simply do nothing. - [ -f "$repo_dir/sources" ] || return 0 - - log "$1" "Extracting sources" - - while read -r src dest || [ "$src" ]; do - mkdir -p "$mak_dir/$1/$dest" && cd "$mak_dir/$1/$dest" - - case $src in - # Git repository. - git+*) - # Split the source into URL + OBJECT (branch or commit). - url=${src##git+} com=${url##*[@#]} com=${com#${url%[@#]*}} - - log "$1" "Cloning ${url%[@#]*}"; { - git init - git remote add origin "${url%[@#]*}" - case "$url" in - # Tags are specified via '@' - *@*) git fetch -t --depth=1 origin "$com" || git fetch ;; - *) git fetch --depth=1 origin "$com" || git fetch - esac - git checkout "${com:-FETCH_HEAD}" - } || die "$1" "Failed to clone $src" - ;; - - # Mercurial repository. - hg+*) - # Split the source into URL + OBJECT (branch or commit). - url=${src##hg+} com=${url##*[@#]} com=${com#${url%[@#]*}} - - # Unfortunately, there is no shallow cloning with Mercurial. - log "$1" "Cloning ${url%[@#]*}" - hg clone -u "${com:-tip}" - - ;; - - # Comment or blank line. - \#*|'') continue ;; - - # Only 'tar', 'cpio', and 'zip' archives are currently supported for - # extraction. Other filetypes are simply copied to '$mak_dir' - # which allows for manual extraction. - *://*.tar|*://*.tar.??|*://*.tar.???|*://*.tar.????|*://*.tgz|*://*.txz) - - decompress "$src_dir/$1/${src##*/}" > .ktar - - "$tar" xf .ktar || die "$1" "Couldn't extract ${src##*/}" - - # We now list the contents of the tarball so we can do our - # version of 'strip-components'. - "$tar" tf .ktar | - while read -r file; do printf '%s\n' "${file%%/*}"; done | - - # Do not repeat files. - uniq | - - # For every directory in the base we move each file - # inside it to the upper directory. - while read -r dir ; do - - # Skip if we are not dealing with a directory here. - # This way we don't remove files on the upper directory - # if a tar archive doesn't need directory stripping. - [ -d "${dir#.}" ] || continue - - # Change into the directory in a subshell so we don't - # need to cd back to the upper directory. - ( - cd "$dir" - - # We use find because we want to move hidden files - # as well. - # - # Skip the file if it has the same name as the directory. - # We will deal with it later. - # - # Word splitting is intentional here. - # shellcheck disable=2046 - find . \( ! -name . -prune \) ! -name "$dir" \ - -exec mv -f {} .. \; - - # If a file/directory with the same name as the directory - # exists, append a '.cptbak' to it and move it to the - # upper directory. - ! [ -e "$dir" ] || mv "$dir" "../${dir}.cptbak" - ) - rmdir "$dir" - - # If a backup file exists, move it into the original location. - ! [ -e "${dir}.cptbak" ] || mv "${dir}.cptbak" "$dir" - done - - # Clean up the temporary tarball. - rm -f .ktar - ;; - - *://*.cpio|*://*.cpio.??|*://*.cpio.???|*://*.cpio.????) - decompress "$src_dir/$1/${src##*/}" | cpio -i - - ;; - - *://*.zip) - unzip "$src_dir/$1/${src##*/}" || - die "$1" "Couldn't extract ${src##*/}" - - ;; - - *) - # Local file. - if [ -f "$repo_dir/$src" ]; then - cp -f "$repo_dir/$src" . - - # Remote file. - elif [ -f "$src_dir/$1/${src##*/}" ]; then - cp -f "$src_dir/$1/${src##*/}" . - - else - die "$1" "Local file $src not found" - fi - ;; - esac - done < "$repo_dir/sources" -} - -pkg_depends() { - # Resolve all dependencies and generate an ordered list. - # This does a depth-first search. The deepest dependencies are - # listed first and then the parents in reverse order. - contains "$deps" "$1" || { - # Filter out non-explicit, aleady installed dependencies. - # Only filter installed if called from 'pkg_build()'. - [ "$pkg_build" ] && [ -z "$2" ] && - (pkg_list "$1" >/dev/null) && return - - while read -r dep type || [ "$dep" ]; do - # Skip test dependencies unless $CPT_TEST is set to 1. - case $type in test) [ "$CPT_TEST" = 1 ] || continue; esac - - # Recurse through the dependencies of the child packages. - [ "${dep##\#*}" ] && pkg_depends "$dep" - done 2>/dev/null < "$(pkg_find "$1")/depends" ||: - - # After child dependencies are added to the list, - # add the package which depends on them. - [ "$2" = explicit ] || deps="$deps $1 " - } -} - -pkg_order() { - # Order a list of packages based on dependence and - # take into account pre-built tarballs if this is - # to be called from 'cpt i'. - order=; redro=; deps= - - for pkg do case $pkg in - *.tar.*) deps="$deps $pkg " ;; - *) pkg_depends "$pkg" raw - esac done - - # Filter the list, only keeping explicit packages. - # The purpose of these two loops is to order the - # argument list based on dependence. - for pkg in $deps; do ! contains "$*" "$pkg" || { - order="$order $pkg " - redro=" $pkg $redro" - } done - - deps= -} - -pkg_strip() { - # Strip package binaries and libraries. This saves space on the - # system as well as on the tarballs we ship for installation. - - # Package has stripping disabled, stop here. - [ -f "$mak_dir/$pkg/nostrip" ] && return - - log "$1" "Stripping binaries and libraries" - - find "$pkg_dir/$1" -type f | while read -r file; do - case $(od -A o -t c -N 18 "$file") in - # REL (object files (.o), static libraries (.a)). - *177*E*L*F*0000020\ 001\ *|*\!*\<*a*r*c*h*\>*) - strip -g -R .comment -R .note "$file" - ;; - - # EXEC (static binaries). - # DYN (shared libraries, dynamic binaries). - # Shared libraries keep global symbols in a separate ELF section - # called '.dynsym'. '--strip-all/-s' does not touch the dynamic - # symbol entries which makes this safe to do. - *177*E*L*F*0000020\ 00[23]\ *) - strip -s -R .comment -R .note "$file" - ;; - esac - done 2>/dev/null ||: -} - -pkg_fix_deps() { - # Dynamically look for missing runtime dependencies by checking each binary - # and library with either 'ldd' or 'readelf'. This catches any extra - # libraries and or dependencies pulled in by the package's build suite. - log "$1" "Checking for missing dependencies" - - # Go to the directory containing the built package to - # simplify path building. - cd "$pkg_dir/$1/$pkg_db/$1" - - # Make a copy of the depends file if it exists to have a - # reference to 'diff' against. - if [ -f depends ]; then - cp -f depends "$mak_dir/d" - dep_file=$mak_dir/d - else - dep_file=/dev/null - fi - - # Generate a list of all installed manifests. - pkg_name=$1 - set +f; set -f -- "$sys_db/"*/manifest - - # Get a list of binaries and libraries, false files - # will be found, however it's faster to get 'ldd' to check - # them anyway than to filter them out. - find "$pkg_dir/$pkg_name/" -type f 2>/dev/null | - - while read -r file; do - case ${elf_prog:-ldd} in - *readelf) "$elf_prog" -d "$file" 2>/dev/null ;; - *) ldd "$file" 2>/dev/null ;; - esac | - while read -r dep; do - # Skip lines containing 'ldd'. - [ "${dep##*ldd*}" ] || continue - case $dep in *NEEDED*\[*\] | *'=>'*) ;; *) continue; esac - - # readelf output: - # 0x0000 (NEEDED) Shared library: [libc.so] - dep=${dep##*\[} - dep=${dep%%\]*} - - # ldd output: - # libc.so => /lib/ld-musl-x86_64.so.1 - dep=${dep#* => } - dep=${dep% *} - - # Figure out which package owns the file. Skip file if it is owned - # by the current package. This also handles cases where a '*-bin' - # package exists on the system, so the package manager doesn't think - # that the package we are building depends on the *-bin version of - # itself, or any other renamed versions of the same software. - pkg_owner -l "/${dep#/}\$" "$PWD/manifest" >/dev/null && continue - pkg_owner -l "/${dep#/}\$" "$@" ||: - done ||: - done >> depends - - # Remove duplicate entries from the new depends file. - # This removes duplicate lines looking *only* at the - # first column. - sort -uk1,1 -o depends depends 2>/dev/null ||: - - # Display a diff of the new dependencies against the old ones. - diff -U 3 "$dep_file" depends 2>/dev/null ||: - - # Remove the depends file if it is empty. - [ -s depends ] || rm -f depends -} - -pkg_manifest() ( - # Generate the package's manifest file. This is a list of each file - # and directory inside the package. The file is used when uninstalling - # packages, checking for package conflicts and for general debugging. - log "$1" "Generating manifest" - - # This function runs as a sub-shell to avoid having to 'cd' back to the - # prior directory before being able to continue. - cd "${2:-$pkg_dir}/$1" - - # find: Print all files and directories and append '/' to directories. - # sort: Sort the output in *reverse*. Directories appear *after* their - # contents. - # sed: Remove the first character in each line (./dir -> /dir) and - # remove all lines which only contain '.'. - find . -type d -exec printf '%s/\n' {} + -o -print | - sort -r | sed '/^\.\/$/d;ss.ss' > "${2:-$pkg_dir}/$1/$pkg_db/$1/manifest" -) - -pkg_etcsums() ( - # This function runs as a sub-shell to avoid having to 'cd' back to the - # prior directory before being able to continue. - cd "$pkg_dir/$1/etc" 2>/dev/null || return 0; cd .. - - # Generate checksums for each configuration file in the package's - # /etc/ directory for use in "smart" handling of these files. - log "$1" "Generating etcsums" - - - find etc -type f | while read -r file; do - sh256 "$file" - done > "$pkg_dir/$1/$pkg_db/$1/etcsums" -) - -pkg_tar() { - # Create a tarball from the built package's files. - # This tarball also contains the package's database entry. - log "$1" "Creating tarball" - - # Read the version information to name the package. - read -r version release < "$(pkg_find "$1")/version" - - # Create a tarball from the contents of the built package. - "$tar" cf - -C "$pkg_dir/$1" . | - case $CPT_COMPRESS in - bz2) bzip2 -z ;; - xz) xz -zT 0 ;; - gz) gzip -6 ;; - zst) zstd -3 ;; - *) gzip -6 ;; # Fallback to gzip - esac \ - > "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" - - log "$1" "Successfully created tarball" - - run_hook post-package "$1" "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" -} - -pkg_build() { - # Build packages and turn them into packaged tarballs. This function - # also checks checksums, downloads sources and ensure all dependencies - # are installed. - pkg_build=1 - - log "Resolving dependencies" - - for pkg do contains "$explicit" "$pkg" || { - pkg_depends "$pkg" explicit - - # Mark packages passed on the command-line - # separately from those detected as dependencies. - explicit="$explicit $pkg " - } done - - [ "$pkg_update" ] || explicit_build=$explicit - - # If an explicit package is a dependency of another explicit - # package, remove it from the explicit list as it needs to be - # installed as a dependency. - # shellcheck disable=2086 - for pkg do - contains "$deps" "$pkg" && explicit=$(pop "$pkg" from $explicit) - done - - # See [1] at top of script. - # shellcheck disable=2046,2086 - set -- $deps $explicit - - log "Building: $*" - - # Only ask for confirmation if more than one package needs to be built. - [ $# -gt 1 ] || [ "$pkg_update" ] && { prompt || exit 0 ;} - - log "Checking for pre-built dependencies" - - for pkg do pkg_lint "$pkg"; done - - # Install any pre-built dependencies if they exist in the binary - # directory and are up to date. - for pkg do ! contains "$explicit_build" "$pkg" && pkg_cache "$pkg" && { - log "$pkg" "Found pre-built binary, installing" - (CPT_FORCE=1 cpt-install "$tar_file") - - # Remove the now installed package from the build list. - # See [1] at top of script. - # shellcheck disable=2046,2086 - set -- $(pop "$pkg" from "$@") - } done - - for pkg do pkg_sources "$pkg"; done - - pkg_verify "$@" - - # Finally build and create tarballs for all passed packages and - # dependencies. - for pkg do - log "$pkg" "Building package ($((in = in + 1))/$#)" - - pkg_extract "$pkg" - repo_dir=$(pkg_find "$pkg") - - read -r build_version _ < "$repo_dir/version" - - # Copy the build file to the build directory to users to modify it - # temporarily at runtime. - cp -f "$repo_dir/build" "$mak_dir/$pkg/.build.cpt" - - # Install built packages to a directory under the package name - # to avoid collisions with other packages. - mkdir -p "$pkg_dir/$pkg/$pkg_db" - - # Move to the build directory. - cd "$mak_dir/$pkg" - - log "$pkg" "Starting build" - - run_hook pre-build "$pkg" "$pkg_dir/$pkg" - - # Notify the user if the build script is changed during the pre-build - # hook. - diff -q "$repo_dir/build" .build.cpt || - log "$pkg" "Executing the modified build file" - - # Call the build script, log the output to the terminal - # and to a file. There's no PIPEFAIL in POSIX shelll so - # we must resort to tricks like killing the script ourselves. - { ./.build.cpt "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { - log "$pkg" "Build failed" - log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" - run_hook build-fail "$pkg" "$pkg_dir/$pkg" - pkg_clean - kill 0 - } } | tee "$log_dir/$pkg-$time-$pid" - - # Run the test script if it exists and the user wants to run tests. This - # is turned off by default. - [ -x "$repo_dir/test" ] && [ "$CPT_TEST" = 1 ] && { - run_hook pre-test "$pkg" "$pkg_dir/$pkg" - log "$pkg" "Running tests" - "$repo_dir/test" "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { - log "$pkg" "Test failed" - log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" - run_hook test-fail "$pkg" "$pkg_dir/$pkg" - pkg_clean - kill 0 - } } | tee -a "$log_dir/$pkg-$time-$pid" - - # Delete the log file if the build succeeded to prevent - # the directory from filling very quickly with useless logs. - [ "$CPT_KEEPLOG" = 1 ] || rm -f "$log_dir/$pkg-$time-$pid" - - # Copy the repository files to the package directory. - # This acts as the database entry. - cp -LRf "$repo_dir" "$pkg_dir/$pkg/$pkg_db/" - - # Copy the modified build file to the package directory. - pkg_build="$pkg_dir/$pkg/$pkg_db/$pkg/build" - diff -U 3 "$pkg_build" .build.cpt > "$pkg_build.diff" && - rm -f "$pkg_build.diff" - - # We never ever want this. Let's end the endless conflicts - # and remove it. - find "$pkg_dir/$pkg" -name charset.alias -exec rm -f {} + - - # Remove libtool's '*.la' library files. This removes cross-build - # system conflicts that may arise. Build-systems change, libtool - # is getting deprecated, we don't want a package that depends on - # some package's '.la' files. - find "$pkg_dir/$pkg" -name '*.la' -exec rm -f {} + - - log "$pkg" "Successfully built package" - - run_hook post-build "$pkg" "$pkg_dir/$pkg" - - # Create the manifest file early and make it empty. - # This ensures that the manifest is added to the manifest. - : > "$pkg_dir/$pkg/$pkg_db/$pkg/manifest" - - # If the package contains '/etc', add a file called - # 'etcsums' to the manifest. See comment directly above. - [ -d "$pkg_dir/$pkg/etc" ] && - : > "$pkg_dir/$pkg/$pkg_db/$pkg/etcsums" - - pkg_strip "$pkg" - pkg_manifest "$pkg" - pkg_fix_deps "$pkg" - pkg_manifest "$pkg" - pkg_etcsums "$pkg" - pkg_tar "$pkg" - - # Install only dependencies of passed packages. - # Skip this check if this is a package update. - contains "$explicit" "$pkg" && [ -z "$pkg_update" ] && continue - - log "$pkg" "Needed as a dependency or has an update, installing" - - (CPT_FORCE=1 cpt-install "$pkg") - done - - # End here as this was a system update and all packages have been installed. - [ "$pkg_update" ] && return - - log "Successfully built package(s)" - - # Turn the explicit packages into a 'list'. - # See [1] at top of script. - # shellcheck disable=2046,2086 - set -- $explicit - - # Only ask for confirmation if more than one package needs to be installed. - [ $# -gt 1 ] && prompt "Install built packages? [$*]" && { - cpt-install "$@" - return - } - - log "Run 'cpt i $*' to install the package(s)" -} - -pkg_checksums() { - # Generate checksums for packages. - repo_dir=$(pkg_find "$1") - - [ -f "$repo_dir/sources" ] || return 0 - - while read -r src _ || [ "$src" ]; do - # Comment. - if [ -z "${src##\#*}" ]; then - continue - - # File is local to the package. - elif [ -f "$repo_dir/$src" ]; then - src_path=$repo_dir/${src%/*} - - # File is remote and was downloaded. - elif [ -f "$src_dir/$1/${src##*/}" ]; then - src_path=$src_dir/$1 - - # File is a git repository. - elif [ -z "${src##git+*}" ]; then continue - - # Die here if source for some reason, doesn't exist. - else - die "$1" "Couldn't find source '$src'" - fi - - # An easy way to get 'sha256sum' to print with the 'basename' - # of files is to 'cd' to the file's directory beforehand. - (cd "$src_path" && sh256 "${src##*/}") || - die "$1" "Failed to generate checksums" - done < "$repo_dir/sources" -} - -pkg_verify() { - # Verify all package checksums. This is achieved by generating a new set of - # checksums and then comparing those with the old set. - verify_cmd="NR==FNR{a[\$1];next}/^git .*/{next}!((\$1)in a){exit 1}" - - for pkg; do - repo_dir=$(pkg_find "$pkg") - [ -f "$repo_dir/sources" ] || continue - - pkg_checksums "$pkg" | awk "$verify_cmd" - "$repo_dir/checksums" || { - log "$pkg" "Checksum mismatch" - - # Instead of dying above, log it to the terminal. Also define a - # variable so we *can* die after all checksum files have been - # checked. - mismatch="$mismatch$pkg " - } done - - [ -z "$mismatch" ] || die "Checksum mismatch with: ${mismatch% }" -} - -pkg_conflicts() { - # Check to see if a package conflicts with another. - log "$1" "Checking for package conflicts" - - # Filter the tarball's manifest and select only files - # and any files they resolve to on the filesystem - # (/bin/ls -> /usr/bin/ls). - while read -r file; do - case $file in */) continue; esac - - # Use $CPT_ROOT in filename so that we follow its symlinks. - file=$CPT_ROOT/${file#/} - - # We will only follow the symlinks of the directories, so we - # reserve the directory name in this 'dirname' value. cpt-readlink - # functions in a similar fashion to 'readlink -f', it makes sure - # every component except for the first one to be available on - # the directory structure. If we cannot find it in the system, - # we don't need to make this much more complex by trying so - # hard to find it. Simply use the original directory name. - dirname="$(cpt-readlink "${file%/*}" 2>/dev/null)" || - dirname="${file%/*}" - - - # Combine the dirname and file values, and print them into the - # temporary manifest to be parsed. - printf '%s/%s\n' "${dirname#$CPT_ROOT}" "${file##*/}" - - done < "$tar_dir/$1/$pkg_db/$1/manifest" > "$CPT_TMPDIR/$pid/manifest" - - p_name=$1 - - # Generate a list of all installed package manifests - # and remove the current package from the list. - # shellcheck disable=2046,2086 - set -- $(set +f; pop "$sys_db/$p_name/manifest" from "$sys_db"/*/manifest) - - [ -s "$CPT_TMPDIR/$pid/manifest" ] || return 0 - - # In rare cases where the system only has one package installed - # and you are reinstalling that package, grep will try to read from - # standard input if we continue here. - # - # Also, if we don't have any packages installed grep will give an - # error. This will not cause the installation to fail, but we don't - # need to check for conflicts if that's the case anyway. If we have - # only zero packages or one package, just stop wasting time and continue - # with the installation. - [ "$1" ] && [ -f "$1" ] || return 0 - - # Store the list of found conflicts in a file as we will be using the - # information multiple times. Storing it in the cache dir allows us - # to be lazy as they'll be automatically removed on script end. - "$grep" -Fxf "$CPT_TMPDIR/$pid/manifest" -- "$@" > "$CPT_TMPDIR/$pid/conflict" ||: - - - # Enable alternatives automatically if it is safe to do so. - # This checks to see that the package that is about to be installed - # doesn't overwrite anything it shouldn't in '/var/db/cpt/installed'. - "$grep" -q ":/var/db/cpt/installed/" "$CPT_TMPDIR/$pid/conflict" || - choice_auto=1 - - # Use 'grep' to list matching lines between the to - # be installed package's manifest and the above filtered - # list. - if [ "$CPT_CHOICE" != 0 ] && [ "$choice_auto" = 1 ]; then - - # This is a novel way of offering an "alternatives" system. - # It is entirely dynamic and all "choices" are created and - # destroyed on the fly. - # - # When a conflict is found between two packages, the file - # is moved to a directory called "choices" and its name - # changed to store its parent package and its intended - # location. - # - # The package's manifest is then updated to reflect this - # new location. - # - # The 'cpt choices' command parses this directory and - # offers you the CHOICE of *swapping* entries in this - # directory for those on the filesystem. - # - # The choices command does the same thing we do here, - # it rewrites manifests and moves files around to make - # this work. - # - # Pretty nifty huh? - while IFS=: read -r _ con; do - printf '%s\n' "Found conflict $con" - - # Create the "choices" directory inside of the tarball. - # This directory will store the conflicting file. - mkdir -p "$tar_dir/$p_name/${cho_dir:=var/db/cpt/choices}" - - # Construct the file name of the "db" entry of the - # conflicting file. (pkg_name>usr>bin>ls) - con_name=$(printf %s "$con" | sed 's|/|>|g') - - # Move the conflicting file to the choices directory - # and name it according to the format above. - mv -f "$tar_dir/$p_name/$con" \ - "$tar_dir/$p_name/$cho_dir/$p_name$con_name" 2>/dev/null || { - log "File must be in ${con%/*} and not a symlink to it" - log "This usually occurs when a binary is installed to" - log "/sbin instead of /usr/bin (example)" - log "Before this package can be used as an alternative," - log "this must be fixed in $p_name. Contact the maintainer" - die "by checking 'git log' or by running 'cpt-maintainer'" - } - done < "$CPT_TMPDIR/$pid/conflict" - - # Rewrite the package's manifest to update its location - # to its new spot (and name) in the choices directory. - pkg_manifest "$p_name" "$tar_dir" 2>/dev/null - - elif [ -s "$CPT_TMPDIR/$pid/conflict" ]; then - log "Package '$p_name' conflicts with another package" "" "!>" - log "Run 'CPT_CHOICE=1 cpt i $p_name' to add conflicts" "" "!>" - die "as alternatives." - fi -} - -pkg_swap() { - # Swap between package alternatives. - pkg_list "$1" >/dev/null - - alt=$(printf %s "$1$2" | sed 's|/|>|g') - cd "$sys_db/../choices" - - [ -f "$alt" ] || [ -h "$alt" ] || - die "Alternative '$1 $2' doesn't exist" - - if [ -f "$2" ]; then - # Figure out which package owns the file we are going to swap for - # another package's. - # - # Print the full path to the manifest file which contains - # the match to our search. - - pkg_owns=$(pkg_owner -lFx "$2") || - die "File '$2' exists on filesystem but isn't owned" - - log "Swapping '$2' from '$pkg_owns' to '$1'" - - # Convert the current owner to an alternative and rewrite - # its manifest file to reflect this. We then resort this file - # so no issues arise when removing packages. - cp -Pf "$CPT_ROOT/$2" "$pkg_owns>${alt#*>}" - sed "s#^$(regesc "$2")#${PWD#$CPT_ROOT}/$pkg_owns>${alt#*>}#" \ - "../installed/$pkg_owns/manifest" | - sort -r -o "../installed/$pkg_owns/manifest" - fi - - # Convert the desired alternative to a real file and rewrite - # the manifest file to reflect this. The reverse of above. - mv -f "$alt" "$CPT_ROOT/$2" - sed "s#^${PWD#$CPT_ROOT}/$(regesc "$alt")#$2#" "../installed/$1/manifest" | - sort -r -o "../installed/$1/manifest" -} - -pkg_etc() { - [ -d "$tar_dir/$pkg_name/etc" ] || return 0 - - (cd "$tar_dir/$pkg_name" - - # Create all directories beforehand. - find etc -type d | while read -r dir; do - mkdir -p "$CPT_ROOT/$dir" - done - - # Handle files in /etc/ based on a 3-way checksum check. - find etc ! -type d | while read -r file; do - { sum_new=$(sh256 "$file") - sum_sys=$(cd "$CPT_ROOT/"; sh256 "$file") - sum_old=$("$grep" "$file$" "$mak_dir/c"); } 2>/dev/null ||: - - log "$pkg_name" "Doing 3-way handshake for $file" - printf '%s\n' "Previous: ${sum_old:-null}" - printf '%s\n' "System: ${sum_sys:-null}" - printf '%s\n' "New: ${sum_new:-null}" - - # Use a case statement to easily compare three strings at - # the same time. Pretty nifty. - case ${sum_old:-null}${sum_sys:-null}${sum_new} in - # old = Y, sys = X, new = Y - "${sum_new}${sum_sys}${sum_old}") - log "Skipping $file" - continue - ;; - - # old = X, sys = X, new = X - # old = X, sys = Y, new = Y - # old = X, sys = X, new = Y - "${sum_old}${sum_old}${sum_old}"|\ - "${sum_old:-null}${sum_sys}${sum_sys}"|\ - "${sum_sys}${sum_old}"*) - log "Installing $file" - new= - ;; - - # All other cases. - *) - warn "$pkg_name" "saving /$file as /$file.new" "->" - new=.new - ;; - esac - - cp -fPp "$file" "$CPT_ROOT/${file}${new}" - chown root:root "$CPT_ROOT/${file}${new}" 2>/dev/null - done) ||: -} - -pkg_remove() { - # Remove a package and all of its files. The '/etc' directory - # is handled differently and configuration files are *not* - # overwritten. - pkg_list "$1" >/dev/null || return - - # Make sure that nothing depends on this package. - [ "$CPT_FORCE" = 1 ] || { - log "$1" "Checking for reverse dependencies" - - (cd "$sys_db"; set +f; grep -lFx "$1" -- */depends) && - die "$1" "Can't remove package, others depend on it" - } - # Block being able to abort the script with 'Ctrl+C' during removal. - # Removes all risk of the user aborting a package removal leaving - # an incomplete package installed. - trap_set block - - if [ -x "$sys_db/$1/pre-remove" ]; then - log "$1" "Running pre-remove script" - "$sys_db/$1/pre-remove" ||: - fi - - # Create a temporary list of all directories, so we don't accidentally - # remove anything from packages that create empty directories for a - # purpose (such as baselayout). - manifest_list="$(set +f; pop "$sys_db/$1/manifest" from "$sys_db/"*/manifest)" - # shellcheck disable=2086 - [ "$manifest_list" ] && grep -h '/$' $manifest_list | sort -ur > "$mak_dir/dirs" - - run_hook pre-remove "$1" "$sys_db/$1" root - - while read -r file; do - # The file is in '/etc' skip it. This prevents the package - # manager from removing user edited configuration files. - [ "${file##/etc/*}" ] || continue - - if [ -d "$CPT_ROOT/$file" ]; then - "$grep" -Fxq "$file" "$mak_dir/dirs" 2>/dev/null && continue - rmdir "$CPT_ROOT/$file" 2>/dev/null || continue - else - rm -f "$CPT_ROOT/$file" - fi - done < "$sys_db/$1/manifest" - - # Reset 'trap' to its original value. Removal is done so - # we no longer need to block 'Ctrl+C'. - trap_set cleanup - - run_hook post-remove "$1" "$CPT_ROOT/" root - - log "$1" "Removed successfully" -} - -pkg_install() { - # Install a built package tarball. - - # Install can also take the full path to a tarball. - # We don't need to check the repository if this is the case. - if [ -f "$1" ] && [ -z "${1%%*.tar*}" ] ; then - tar_file=$1 - pkg_name=${1##*/} - pkg_name=${pkg_name%#*} - - else - pkg_cache "$1" || - die "package has not been built, run 'cpt b pkg'" - - pkg_name=$1 - fi - - mkdir -p "$tar_dir/$pkg_name" - log "$pkg_name" "Extracting $tar_file" - - # Extract the tarball to catch any errors before installation begins. - decompress "$tar_file" | "$tar" xf - -C "$tar_dir/$pkg_name" - - [ -f "$tar_dir/$pkg_name/$pkg_db/$pkg_name/manifest" ] || - die "'${tar_file##*/}' is not a valid CPT package" - - # Ensure that the tarball's manifest is correct by checking that - # each file and directory inside of it actually exists. - [ "$CPT_FORCE" != 1 ] && log "$pkg_name" "Checking package manifest" && - while read -r line; do - # Skip symbolic links - [ -h "$tar_dir/$pkg_name/$line" ] || - [ -e "$tar_dir/$pkg_name/$line" ] || { - log "File $line missing from tarball but mentioned in manifest" "" "!>" - TARBALL_FAIL=1 - } - done < "$tar_dir/$pkg_name/$pkg_db/$pkg_name/manifest" - [ "$TARBALL_FAIL" ] && { - log "You can still install this package by setting CPT_FORCE variable" - die "$pkg_name" "Missing files in manifest" - } - - log "$pkg_name" "Checking that all dependencies are installed" - - # Make sure that all run-time dependencies are installed prior to - # installing the package. - [ -f "$tar_dir/$pkg_name/$pkg_db/$pkg_name/depends" ] && - [ "$CPT_FORCE" != 1 ] && - while read -r dep dep_type || [ "$dep" ]; do - [ "${dep##\#*}" ] || continue - [ "$dep_type" ] || pkg_list "$dep" >/dev/null || - install_dep="$install_dep'$dep', " - done < "$tar_dir/$pkg_name/$pkg_db/$pkg_name/depends" - - [ "$install_dep" ] && die "$1" "Package requires ${install_dep%, }" - - run_hook pre-install "$pkg_name" "$tar_dir/$pkg_name" root - - pkg_conflicts "$pkg_name" - - log "$pkg_name" "Installing package incrementally" - - # Block being able to abort the script with Ctrl+C during installation. - # Removes all risk of the user aborting a package installation leaving - # an incomplete package installed. - trap_set block - - # If the package is already installed (and this is an upgrade) make a - # backup of the manifest and etcsums files. - cp -f "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null ||: - cp -f "$sys_db/$pkg_name/etcsums" "$mak_dir/c" 2>/dev/null ||: - - # This is repeated multiple times. Better to make it a function. - pkg_rsync() { - rsync "--chown=$USER:$USER" --chmod=Du-s,Dg-s,Do-s \ - -WhHKa --no-compress --exclude /etc "${1:---}" \ - "$tar_dir/$pkg_name/" "$CPT_ROOT/" - } - - # Install the package by using 'rsync' and overwrite any existing files - # (excluding '/etc/'). - pkg_rsync --info=progress2 - pkg_etc - - # Remove any leftover files if this is an upgrade. - "$grep" -vFxf "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null | - - while read -r file; do - file=$CPT_ROOT/$file - - # Skip deleting some leftover files. - case $file in /etc/*) continue; esac - - # Remove files. - if [ -f "$file" ] && [ ! -L "$file" ]; then - rm -f "$file" - - # Remove file symlinks. - elif [ -h "$file" ] && [ ! -d "$file" ]; then - unlink "$file" ||: - - # Skip directory symlinks. - elif [ -h "$file" ] && [ -d "$file" ]; then : - - # Remove directories if empty. - elif [ -d "$file" ]; then - rmdir "$file" 2>/dev/null ||: - fi - done ||: - - log "$pkg_name" "Verifying installation" - { pkg_rsync; pkg_rsync; } ||: - - # Reset 'trap' to its original value. Installation is done so - # we no longer need to block 'Ctrl+C'. - trap_set cleanup - - if [ -x "$sys_db/$pkg_name/post-install" ]; then - log "$pkg_name" "Running post-install script" - "$sys_db/$pkg_name/post-install" ||: - fi - - run_hook post-install "$pkg_name" "$sys_db/$pkg_name" root - - log "$pkg_name" "Installed successfully" -} - -pkg_fetch() { - log "Updating repositories" - - run_hook pre-fetch - - # Create a list of all repositories. - # See [1] at top of script. - # shellcheck disable=2046,2086 - { IFS=:; set -- $CPT_PATH; IFS=$old_ifs ;} - - # Update each repository in '$CPT_PATH'. It is assumed that - # each repository is 'git' tracked. - for repo; do - # Go to the root of the repository (if it exists). - cd "$repo" - cd "$(git rev-parse --show-toplevel 2>/dev/null)" 2>/dev/null || - cd "$(hg root 2>/dev/null)" 2>/dev/null ||: - - if [ -d .git ]; then - - [ "$(git remote 2>/dev/null)" ] || { - log "$repo" " " - printf '%s\n' "No remote, skipping." - continue - } - - contains "$repos" "$PWD" || { - repos="$repos $PWD " - - # Display a tick if signing is enabled for this - # repository. - case $(git config merge.verifySignatures) in - true) log "$PWD" "[signed] " ;; - *) log "$PWD" " " ;; - esac - - if [ -w "$PWD" ] && [ "$uid" != 0 ]; then - git fetch - git merge - git submodule update --remote --init -f - - else - [ "$uid" = 0 ] || log "$PWD" "Need root to update" - - # Find out the owner of the repository and spawn - # git as this user below. - # - # This prevents 'git' from changing the original - # ownership of files and directories in the rare - # case that the repository is owned by a 3rd user. - ( - user=$(cpt-stat "$PWD") || user=root - id -u "$user" >/dev/null 2>&1 || user=root - - [ "$user" = root ] || - log "Dropping permissions to $user for pull" - - git_cmd="git fetch && git merge && git submodule update --remote --init -f" - case $su in *su) git_cmd="'$git_cmd'"; esac - - # Spawn a subshell to run multiple commands as - # root at once. This makes things easier on users - # who aren't using persist/timestamps for auth - # caching. - user=$user as_root sh -c "$git_cmd" - ) - fi - } - elif [ -d .hg ]; then - - [ "$(hg showconfig paths 2>/dev/null)" ] || { - log "$repo" " " - printf '%s\n' "No remote, skipping." - continue - } - - contains "$repos $PWD" || { - repos="$repos $PWD" - - if [ -w "$PWD" ] && [ "$uid" != 0 ]; then - hg pull - hg update - else - [ "$uid" ] || log "$PWD" "Need root to update" - - # We are going to do the same operation as above, to - # find the owner of the repository. - ( - user=$(cpt-stat "$PWD") || user=root - id -u "$user" >/dev/null 2>&1 || user=root - - [ "$user" = root ] || - log "Dropping permissions to $user for pull" - - hg_cmd="hg pull && hg update" - - case $su in *su) hg_cmd="'$hg_cmd'"; esac - user=$user as_root sh -c "$hg_cmd" - ) - fi - } - elif [ -f .rsync ]; then - # If an .rsync_root file exists, we check that the repository root - # exists. If it does, we change to that directory to do the fetch. - # This way, we allow for partial repositories while making sure that - # we can fetch the repository in a single operation. - [ -f .rsync_root ] && { - read -r rsync_root < .rsync_root - [ -f "$rsync_root/.rsync" ] && cd "$rsync_root" - } - contains "$repos" "$PWD" || { - repos="$repos $PWD" - read -r remote < .rsync - if [ -w "$PWD" ] && [ "$uid" != 0 ]; then - rsync -acvzzC --include=core --delete "$remote/" "$PWD" - else - [ "$uid" = 0 ] || log "$PWD" "Need root to update" - - # Similar to the git update, we find the owner of - # the repository and spawn rsync as that user. - ( - user=$(cpt-stat "$PWD") || user=root - id -u "$user" >/dev/null 2>&1 || user=root - - [ "$user" = root ] || - log "Dropping permissions to $user for pull" - - user=$user as_root rsync -acvzzC --include=core --delete "$remote/" "$PWD" - ) - fi - } - else - log "$repo" " " - printf '%s\n' "Not a remote repository, skipping." - fi - done - - run_hook post-fetch -} - -pkg_updates(){ - # Check all installed packages for updates. So long as the installed - # version and the version in the repositories differ, it's considered - # an update. - [ "$CPT_FETCH" = 0 ] || pkg_fetch - - log "Checking for new package versions" - - set +f - - for pkg in "$sys_db/"*; do - pkg_name=${pkg##*/} - - # Read version and release information from the installed packages - # and repository. - read -r db_ver db_rel < "$pkg/version" - read -r re_ver re_rel < "$(pkg_find "$pkg_name")/version" - - # Compare installed packages to repository packages. - [ "$db_ver-$db_rel" != "$re_ver-$re_rel" ] && { - printf '%s\n' "$pkg_name $db_ver-$db_rel ==> $re_ver-$re_rel" - outdated="$outdated$pkg_name " - } - done - - set -f - - # If the download option is specified only download the outdated packages - # and exit. - # shellcheck disable=2154 - [ "$download_only" = 1 ] && { - log "Only sources for the packages will be acquired" - prompt || exit 0 - - for pkg in $outdated; do - pkg_sources "$pkg" - done - - exit 0 - } - - contains "$outdated" cpt && { - log "Detected package manager update" - log "The package manager will be updated first" - - prompt || exit 0 - - pkg_build cpt - cpt-install cpt - - log "Updated the package manager" - log "Re-run 'cpt update' to update your system" - - exit 0 - } - - [ "$outdated" ] || { - log "Everything is up to date" - return - } - - log "Packages to update: ${outdated% }" - - # Tell 'pkg_build' to always prompt before build. - pkg_update=1 - - # Build all packages requiring an update. - # See [1] at top of script. - # shellcheck disable=2046,2086 - { - pkg_order $outdated - pkg_build $order - } - - log "Updated all packages" -} - -pkg_clean() { - # Clean up on exit or error. This removes everything related - # to the build. - [ "$CPT_DEBUG" != 1 ] || return 0 - - # Block 'Ctrl+C' while cache is being cleaned. - trap_set block - - # Remove temporary items. - rm -rf -- "${CPT_TMPDIR:=$cac_dir/proc}/$pid" -} - -create_cache() { - # A temporary directory can be specified apart from the cache - # directory in order to build in a user specified directory. - # /tmp could be used in order to build on ram, useful on SSDs. - # The user can specify CPT_TMPDIR for this. - # - # Create the required temporary directories and set the variables - # which point to them. - mkdir -p "${CPT_TMPDIR:=$cac_dir/proc}" \ - "${mak_dir:=$CPT_TMPDIR/$pid/build}" \ - "${pkg_dir:=$CPT_TMPDIR/$pid/pkg}" \ - "${tar_dir:=$CPT_TMPDIR/$pid/export}" - -} - -# main() -{ - set -ef - - # If a parser definition exists, let's run it ourselves. This makes sure we - # get the variables as soon as possible. - command -v parser_definition >/dev/null && { - eval "$(getoptions parser_definition parse "$0")" - parse "$@" - eval set -- "$REST" - } - - # Create the cache directories for CPT and set the variables which point - # to them. This is seperate from temporary directories created in - # create_cache(). That's because we need these variables set on most - # occasions. - mkdir -p "${cac_dir:=${CPT_CACHE:=${XDG_CACHE_HOME:-$HOME/.cache}/cpt}}" \ - "${src_dir:=$cac_dir/sources}" \ - "${log_dir:=$cac_dir/logs}" \ - "${bin_dir:=$cac_dir/bin}" - - # Set the location to the repository and package database. - pkg_db=var/db/cpt/installed - - # The PID of the current shell process is used to isolate directories - # to each specific CPT instance. This allows multiple package manager - # instances to be run at once. Store the value in another variable so - # that it doesn't change beneath us. - pid=${CPT_PID:-$$} - - # Force the C locale to speed up things like 'grep' which disable unicode - # etc when this is set. We don't need unicode and a speed up is always - # welcome. - export LC_ALL=C LANG=C - - # Catch errors and ensure that build files and directories are cleaned - # up before we die. This occurs on 'Ctrl+C' as well as success and error. - trap_set cleanup - - # Prefer GNU grep if installed as it is much much faster than busybox's - # implementation. Very much worth it if you value performance over - # POSIX correctness (grep quoted to avoid shellcheck false-positive). - grep=$(command -v ggrep) || grep='grep' - - # Prefer libarchive tar or GNU tar if installed as they are much - # much faster than busybox's implementation. Very much worth it if - # you value performance. - tar=$(command -v bsdtar || command -v gtar) || tar=tar - - # Figure out which 'sudo' command to use based on the user's choice or - # what is available on the system. - su=${CPT_SU:-$(command -v sudo || command -v doas)} || su=su - - # Store the date and time of script invocation to be used as the name - # of the log files the package manager creates uring builds. - time=$(date '+%Y-%m-%d-%H:%M') - - # Use readelf for fixing dependencies if it is available, fallback to - # ldd. readelf shows only the actual dependencies and doesn't include - # the libraries required by the dependencies. - elf_prog=${CPT_ELF:="$( - command -v readelf || - command -v llvm-readelf || - command -v eu-readelf)"} || elf_prog=ldd - - # Make note of the user's current ID to do root checks later on. - # This is used enough to warrant a place here. - uid=$(id -u) - - # Save IFS, so we can restore it back to what it was before. - old_ifs=$IFS - - # Make sure that the CPT_ROOT doesn't end with a '/'. This might - # break some operations. - [ -z "$CPT_ROOT" ] || [ "${CPT_ROOT##*/}" ] || { - warn "" "Your CPT_ROOT variable shouldn't end with '/'" - CPT_ROOT=${CPT_ROOT%/} - } - - # Define an optional sys_arch variable in order to provide - # information to build files with architectural information. - sys_arch=$(uname -m 2>/dev/null) ||: - - # Define this variable but don't create its directory structure from - # the get go. It will be created as needed by package installation. - sys_db=$CPT_ROOT/$pkg_db - - # This allows for automatic setup of a CPT chroot and will - # do nothing on a normal system. - mkdir -p "$CPT_ROOT/" 2>/dev/null ||: - - # Set a value for CPT_COMPRESS if it isn't set. - : "${CPT_COMPRESS:=gz}" - - # Unless being piped or the user specifically doesn't want colors, set - # colors. This can of course be overriden if the user specifically want - # colors during piping. - if { [ "$CPT_COLOR" != 0 ] && [ -t 1 ] ;} || [ "$CPT_COLOR" = 1 ]; then - colory="\033[1;33m" colorb="\033[1;36m" colre="\033[m" - fi - -} diff --git a/src/cpt-lib.in b/src/cpt-lib.in new file mode 100644 index 0000000..6045e22 --- /dev/null +++ b/src/cpt-lib.in @@ -0,0 +1,1874 @@ +#!/bin/sh -ef +# shellcheck source=/dev/null +# +# This is the Carbs Packaging Toolchain written for Carbs Linux. +# It was originally forked from the kiss package manager by +# Dylan Araps. +# +# Currently maintained by Cem Keylan. + +version() { + log "Carbs Packaging Tools" @VERSION@ + exit 0 +} + +out() { + # Print a message as is. + printf '%s\n' "$@" +} + +log() { + # Print a message prettily. + # + # All messages are printed to stderr to allow the user to hide build + # output which is the only thing printed to stdout. + # + # '${3:-->}': If the 3rd argument is missing, set prefix to '->'. + # '${2:+colorb}': If the 2nd argument exists, set text style of '$1'. + printf '%b%s %b%b%s%b %s\n' \ + "$colory" "${3:-->}" "$colre" "${2:+$colorb}" "$1" "$colre" "$2" >&2 +} + +die() { + # Print a message and exit with '1' (error). + log "$1" "$2" "!>" + exit 1 +} + +trap_set() { + # Function to set the trap value. + case ${1:-cleanup} in + cleanup) trap pkg_clean EXIT INT ;; + block) trap '' INT ;; + unset) trap - EXIT INT ;; + esac +} + +# This is the public domain getoptions shell library. It also forms a usage +# function. +# URL: https://github.com/ko1nksm/getoptions (v2.0.1) +# License: Creative Commons Zero v1.0 Universal +# shellcheck disable=2016 +getoptions() { + _error='' _on=1 _off='' _export='' _plus='' _mode='' _alt='' _rest='' + _opts='' _help='' _indent='' _init=@empty IFS=' ' + + for i in 0 1 2 3 4 5; do + eval "_$i() { echo \"$_indent\$@\"; }" + _indent="$_indent " + done + + quote() { + q="$2'" r='' + while [ "$q" ]; do r="$r${q%%\'*}'\''" && q=${q#*\'}; done + q="'${r%????}'" && q=${q#\'\'} && q=${q%\'\'} + eval "$1=\${q:-\"''\"}" + } + code() { + [ "${1#:}" = "$1" ] && c=3 || c=4 + eval "[ ! \${$c:+x} ] || $2 \"\$$c\"" + } + + invoke() { eval '"_$@"'; } + prehook() { invoke "$@"; } + for i in setup flag param option disp msg; do + eval "$i() { prehook $i \"\$@\"; }" + done + + args() { + on=$_on off=$_off export=$_export init=$_init _hasarg=$1 + while [ $# -gt 2 ] && [ "$3" != '--' ] && shift; do + case $2 in + -?) [ "$_hasarg" ] || _opts="$_opts${2#-}" ;; + +*) _plus=1 ;; + [!-+]*) eval "${2%%:*}=\${2#*:}" + esac + done + } + defvar() { + case $init in + @none) : ;; + @export) code "$1" _0 "export $1" ;; + @empty) code "$1" _0 "${export:+export }$1=''" ;; + @unset) code "$1" _0 "unset $1 ||:" "unset OPTARG ||:; ${1#:}" ;; + *) + case $init in @*) eval "init=\"=\${${init#@}}\""; esac + case $init in [!=]*) _0 "$init"; return 0; esac + quote init "${init#=}" + code "$1" _0 "${export:+export }$1=$init" "OPTARG=$init; ${1#:}" + esac + } + _setup() { + [ $# -gt 0 ] && { [ "$1" ] && _rest=$1; shift; } + for i; do [ "$i" = '--' ] && break; eval "_${i%%:*}=\${i#*:}"; done + } + _flag() { args : "$@"; defvar "$@"; } + _param() { args '' "$@"; defvar "$@"; } + _option() { args '' "$@"; defvar "$@"; } + _disp() { args : "$@"; } + _msg() { args : _ "$@"; } + + "$@" + _0 "${_rest:?}=''" + + args() { + sw='' validate='' pattern='' counter='' on=$_on off=$_off export=$_export + while [ $# -gt 1 ] && [ "$2" != '--' ] && shift; do + case $1 in + --\{no-\}*) sw="$sw${sw:+ | }--${1#--?no-?} | --no-${1#--?no-?}" ;; + [-+]? | --*) sw="$sw${sw:+ | }$1" ;; + *) eval "${1%%:*}=\"\${1#*:}\"" + esac + done + } + setup() { :; } + _flag() { + args "$@" + quote on "$on" && quote off "$off" + [ "$counter" ] && on=1 off=-1 v="\$((\${$1:-0}+\${OPTARG:-0}))" || v='' + _3 "$sw)" + _4 '[ "${OPTARG:-}" ] && OPTARG=${OPTARG#*\=} && set -- noarg "$1" && break' + _4 "eval '[ \${OPTARG+x} ] &&:' && OPTARG=$on || OPTARG=$off" + valid "$1" "${v:-\$OPTARG}" + _4 ';;' + } + _param() { + args "$@" + _3 "$sw)" + _4 '[ $# -le 1 ] && set -- required "$1" && break' + _4 'OPTARG=$2' + valid "$1" '$OPTARG' + _4 'shift ;;' + } + _option() { + args "$@" + quote on "$on" && quote off "$off" + _3 "$sw)" + _4 'set -- "$1" "$@"' + _4 '[ ${OPTARG+x} ] && {' + _5 'case $1 in --no-*) set -- noarg "${1%%\=*}"; break; esac' + _5 '[ "${OPTARG:-}" ] && { shift; OPTARG=$2; } ||' "OPTARG=$on" + _4 "} || OPTARG=$off" + valid "$1" '$OPTARG' + _4 'shift ;;' + } + valid() { + set -- "$validate" "$pattern" "$1" "$2" + [ "$1" ] && _4 "$1 || { set -- ${1%% *}:\$? \"\$1\" $1; break; }" + [ "$2" ] && { + quote pattern "$2" + _4 "case \$OPTARG in $2) ;;" + _5 "*) set -- pattern:$pattern \"\$1\"; break" + _4 "esac" + } + code "$3" _4 "${export:+export }$3=\"$4\"" "${3#:}" + } + _disp() { + args "$@" + _3 "$sw)" + code "$1" _4 "echo \"\${$1}\"" "${1#:}" + _4 'exit 0 ;;' + } + _msg() { :; } + + _0 "$2() {" + _1 'OPTIND=$(($#+1))' + _1 'while OPTARG= && [ $# -gt 0 ]; do' + [ "$_alt" ] && _2 'case $1 in -[!-]?*) set -- "-$@"; esac' + _2 'case $1 in' + wa() { _4 "eval '${1% *}' \${1+'\"\$@\"'}"; } + _3 '--?*=*) OPTARG=$1; shift' + wa 'set -- "${OPTARG%%\=*}" "${OPTARG#*\=}" "$@"' + _4 ';;' + _3 '--no-*) unset OPTARG ;;' + [ "$_alt" ] || { + [ "$_opts" ] && { + _3 "-[$_opts]?*) OPTARG=\$1; shift" + wa 'set -- "${OPTARG%"${OPTARG#??}"}" "${OPTARG#??}" "$@"' + _4 ';;' + } + _3 '-[!-]?*) OPTARG=$1; shift' + wa 'set -- "${OPTARG%"${OPTARG#??}"}" "-${OPTARG#??}" "$@"' + _4 'OPTARG= ;;' + } + [ "$_plus" ] && { + _3 '+??*) OPTARG=$1; shift' + wa 'set -- "${OPTARG%"${OPTARG#??}"}" "+${OPTARG#??}" "$@"' + _4 'unset OPTARG ;;' + _3 '+*) unset OPTARG ;;' + } + _2 'esac' + _2 'case $1 in' + "$@" + rest() { + _3 "$1" + _4 'while [ $# -gt 0 ]; do' + _5 "$_rest=\"\${$_rest}" '\"\${$((${OPTIND:-0}-$#))}\""' + _5 'shift' + _4 'done' + _4 'break ;;' + } + rest '--) shift' + _3 "[-${_plus:++}]?*)" 'set -- unknown "$1" && break ;;' + case $_mode in + +) rest '*)' ;; + *) _3 "*) $_rest=\"\${$_rest}" '\"\${$((${OPTIND:-0}-$#))}\""' + esac + _2 'esac' + _2 'shift' + _1 'done' + _1 '[ $# -eq 0 ] && { OPTIND=1; unset OPTARG; return 0; }' + _1 'case $1 in' + _2 'unknown) set -- "Unrecognized option: $2" "$@" ;;' + _2 'noarg) set -- "Does not allow an argument: $2" "$@" ;;' + _2 'required) set -- "Requires an argument: $2" "$@" ;;' + _2 'pattern:*) set -- "Does not match the pattern (${1#*:}): $2" "$@" ;;' + _2 '*) set -- "Validation error ($1): $2" "$@"' + _1 'esac' + [ "$_error" ] && _1 "$_error" '"$@" >&2 || exit $?' + _1 'echo "$1" >&2' + _1 'exit 1' + _0 '}' + + # This throws an error on older versions of shellcheck. + # shellcheck disable=2086 + [ ! "$_help" ] || eval "shift 2; getoptions_help $1 $_help" ${3+'"$@"'} +} +# URL: https://github.com/ko1nksm/getoptions (v2.0.1) +# License: Creative Commons Zero v1.0 Universal +getoptions_help() { + width=30 plus='' leading=' ' + + pad() { p=$2; while [ ${#p} -lt "$3" ]; do p="$p "; done; eval "$1=\$p"; } + + args() { + _type=$1 var=${2%% *} sw='' label='' hidden='' _width=$width && shift 2 + while [ $# -gt 0 ] && i=$1 && shift && [ ! "$i" = '--' ]; do + case $i in + --*) pad sw "$sw${sw:+, }" $((${plus:+4}+4)); sw="$sw$i" ;; + -?) sw="$sw${sw:+, }$i" ;; + +?) [ ! "$plus" ] || { pad sw "$sw${sw:+, }" 4; sw="$sw$i"; } ;; + *) eval "${i%%:*}=\${i#*:}" + esac + done + [ "$hidden" ] && return 0 + + [ "$label" ] || case $_type in + setup | msg) label='' _width=0 ;; + flag | disp) label="$sw " ;; + param) label="$sw $var " ;; + option) label="${sw}[=$var] " + esac + pad label "${label:+$leading}$label" "$_width" + [ ${#label} -le "$_width" ] && [ $# -gt 0 ] && label="$label$1" && shift + echo "$label" + pad label '' "$_width" + for i; do echo "$label$i"; done + } + + for i in 'setup :' flag param option disp 'msg :'; do + eval "${i% *}() { args $i \"\$@\"; }" + done + + echo "$2() {" + echo "cat<<'GETOPTIONSHERE'" + "$@" + echo "GETOPTIONSHERE" + echo "}" +} + +global_options() { + msg -- '' 'Global Options:' + flag CPT_FORCE -f --force init:@export -- "Force operation" + flag CPT_PROMPT -y --no-prompt on:0 off:0 init:@export -- "Do not prompt for confirmation" + param CPT_ROOT --root init:@export -- "Use an alternate root directory" + disp :usage -h --help -- "Show this help message" + disp :version -v --version -- "Print version information" +} + +warn() { + # Print a warning message + log "$1" "$2" "${3:-WARNING}" +} + +contains() { + # Check if a "string list" contains a word. + case " $1 " in *" $2 "*) return 0; esac; return 1 +} + +regesc() { + # Escape special regular expression characters as + # defined in POSIX BRE. '$.*[\^' + printf '%s\n' "$1" | + sed 's|\\|\\\\|g;s|\[|\\[|g;s|\$|\\$|g;s|\.|\\.|g;s|\*|\\*|g;s|\^|\\^|g' +} + + +prompt() { + # If a CPT_NOPROMPT variable is set, continue. + # This can be useful for installation scripts and + # bootstrapping. + [ "$CPT_PROMPT" = 0 ] && return 0 + + # Ask the user for some input. + [ "$1" ] && log "$1" + log "Continue?: Press Enter to continue or Ctrl+C to abort here" + + # POSIX 'read' has none of the "nice" options like '-n', '-p' + # etc etc. This is the most basic usage of 'read'. + # '_' is used as 'dash' errors when no variable is given to 'read'. + read -r _ || return 1 +} + +as_root() { + # Simple function to run a command as root using either 'sudo', + # 'doas' or 'su'. Hurrah for choice. + [ "$uid" = 0 ] || log "Using '${su:-su}' (to become ${user:=root})" + + # We are exporting package manager variables, so that we still have the + # same repository paths / access to the same cache directories etc. + set -- HOME="$HOME" \ + USER="$user" \ + XDG_CACHE_HOME="$XDG_CACHE_HOME" \ + CPT_CACHE="$CPT_CACHE" \ + CPT_CHOICE="$CPT_CHOICE" \ + CPT_COMPRESS="$CPT_COMPRESS" \ + CPT_DEBUG="$CPT_DEBUG" \ + CPT_FETCH="$CPT_FETCH" \ + CPT_FORCE="$CPT_FORCE" \ + CPT_HOOK="$CPT_HOOK" \ + CPT_KEEPLOG="$CPT_KEEPLOG" \ + CPT_PATH="$CPT_PATH" \ + CPT_PID="$CPT_PID" \ + CPT_PROMPT="$CPT_PROMPT" \ + CPT_ROOT="$CPT_ROOT" \ + CPT_TMPDIR="$CPT_TMPDIR" \ + "$@" + + case ${su##*/} in + sudo|doas) "$su" -u "$user" -- env "$@" ;; + su) su -c "env $* <&3" "$user" 3<&0 /dev/null || + sha256 -r "$1" 2>/dev/null || + openssl dgst -r -sha256 "$1" || + die "No sha256 program could be run." ;} | + + while read -r hash _; do printf '%s %s\n' "$hash" "$1"; done +} + +pkg_owner() { + set +f + + [ "$3" ] || set -- "$1" "$2" "$sys_db"/*/manifest + + pkg_owner=$(grep "$@") + pkg_owner=${pkg_owner%/*} + pkg_owner=${pkg_owner##*/} + + set -f -- "$pkg_owner"; unset pkg_owner + [ "$1" ] && printf '%s\n' "$1" +} + +pkg_isbuilt() ( + # Check if a package is built or not. + read -r ver rel < "$(pkg_find "$1")/version" + + set +f + for tarball in "$bin_dir/$1#$ver-$rel.tar."*; do + [ -f "$tarball" ] && return 0 + done + return 1 +) + +pkg_lint() { + # Check that each mandatory file in the package entry exists. + log "$1" "Checking repository files" + + repo_dir=$(pkg_find "$1") + + cd "$repo_dir" || die "'$repo_dir' not accessible" + [ -f sources ] || warn "$1" "Sources file not found" + [ -x build ] || die "$1" "Build file not found or not executable" + [ -s version ] || die "$1" "Version file not found or empty" + + read -r _ release 2>/dev/null < version || die "Version file not found" + [ "$release" ] || die "Release field not found in version file" + + [ "$2" ] || [ -f checksums ] || die "$pkg" "Checksums are missing" +} + +pkg_find() { + # Use a SEARCH_PATH variable so that we can get the sys_db into + # the same variable as CPT_PATH. This makes it easier when we are + # searching for executables instead of CPT_PATH. + : "${SEARCH_PATH:=$CPT_PATH:$sys_db}" + + # Figure out which repository a package belongs to by + # searching for directories matching the package name + # in $CPT_PATH/*. + query=$1 match=$2 type=$3 IFS=:; set -- + + # Word splitting is intentional here. + # shellcheck disable=2086 + for path in $SEARCH_PATH ; do + set +f + + for path2 in "$path/"$query; do + test "${type:--d}" "$path2" && set -f -- "$@" "$path2" + done + done + + IFS=$old_ifs + + # A package may also not be found due to a repository not being + # readable by the current user. Either way, we need to die here. + [ "$1" ] || die "Package '$query' not in any repository" + + # Show all search results if called from 'cpt search', else + # print only the first match. + [ "$match" ] && printf '%s\n' "$@" || printf '%s\n' "$1" +} + +pkg_list() { + # List installed packages. As the format is files and + # directories, this just involves a simple for loop and + # file read. + + # Change directories to the database. This allows us to + # avoid having to 'basename' each path. If this fails, + # set '$1' to mimic a failed glob which indicates that + # nothing is installed. + cd "$sys_db" 2>/dev/null || set -- "$sys_db/"\* + + # Optional arguments can be passed to check for specific + # packages. If no arguments are passed, list all. As we + # loop over '$@', if there aren't any arguments we can + # just set the directory contents to the argument list. + [ "$1" ] || { set +f; set -f -- *; } + + # If the 'glob' above failed, exit early as there are no + # packages installed. + [ "$1" = "$sys_db/"\* ] && return 1 + + # Loop over each package and print its name and version. + for pkg do + [ -d "$pkg" ] || { log "$pkg" "not installed"; return 1; } + + read -r version 2>/dev/null < "$pkg/version" || version=null + printf '%s\n' "$pkg $version" + done +} + +pkg_cache() { + read -r version release 2>/dev/null < "$(pkg_find "$1")/version" + + # Initially assume that the package tarball is built with the CPT_COMPRESS + # value. + if [ -f "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" ]; then + tar_file="$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" + else + set +f; set -f -- "$bin_dir/$1#$version-$release.tar."* + tar_file=$1 + fi + + [ -f "$tar_file" ] +} + +pkg_sources() { + # Download any remote package sources. The existence of local + # files is also checked. + repo_dir=$(pkg_find "$1") + + # Support packages without sources. Simply do nothing. + [ -f "$repo_dir/sources" ] || return 0 + + log "$1" "Downloading sources" + + # Store each downloaded source in a directory named after the + # package it belongs to. This avoid conflicts between two packages + # having a source of the same name. + mkdir -p "$src_dir/$1" && cd "$src_dir/$1" + + repo_dir=$(pkg_find "$1") + + while read -r src dest || [ "$src" ]; do + # Remote git/hg repository or comment. + if [ -z "${src##\#*}" ] || + [ -z "${src##git+*}" ] || + [ -z "${src##hg+*}" ] + + then : + + # Remote source (cached). + elif [ -f "${src##*/}" ]; then + log "$1" "Found cached source '${src##*/}'" + + # Remote source. + elif [ -z "${src##*://*}" ]; then + log "$1" "Downloading $src" + + curl "$src" -fLo "${src##*/}" || { + rm -f "${src##*/}" + die "$1" "Failed to download $src" + } + + # Local source. + elif [ -f "$repo_dir/$src" ]; then + log "$1" "Found local file '$src'" + + else + die "$1" "No local file '$src'" + fi + done < "$repo_dir/sources" +} + +pkg_extract() { + # Extract all source archives to the build directory and copy over + # any local repository files. + repo_dir=$(pkg_find "$1") + + # Support packages without sources. Simply do nothing. + [ -f "$repo_dir/sources" ] || return 0 + + log "$1" "Extracting sources" + + while read -r src dest || [ "$src" ]; do + mkdir -p "$mak_dir/$1/$dest" && cd "$mak_dir/$1/$dest" + + case $src in + # Git repository. + git+*) + # Split the source into URL + OBJECT (branch or commit). + url=${src##git+} com=${url##*[@#]} com=${com#${url%[@#]*}} + + log "$1" "Cloning ${url%[@#]*}"; { + git init + git remote add origin "${url%[@#]*}" + case "$url" in + # Tags are specified via '@' + *@*) git fetch -t --depth=1 origin "$com" || git fetch ;; + *) git fetch --depth=1 origin "$com" || git fetch + esac + git checkout "${com:-FETCH_HEAD}" + } || die "$1" "Failed to clone $src" + ;; + + # Mercurial repository. + hg+*) + # Split the source into URL + OBJECT (branch or commit). + url=${src##hg+} com=${url##*[@#]} com=${com#${url%[@#]*}} + + # Unfortunately, there is no shallow cloning with Mercurial. + log "$1" "Cloning ${url%[@#]*}" + hg clone -u "${com:-tip}" + + ;; + + # Comment or blank line. + \#*|'') continue ;; + + # Only 'tar', 'cpio', and 'zip' archives are currently supported for + # extraction. Other filetypes are simply copied to '$mak_dir' + # which allows for manual extraction. + *://*.tar|*://*.tar.??|*://*.tar.???|*://*.tar.????|*://*.tgz|*://*.txz) + + decompress "$src_dir/$1/${src##*/}" > .ktar + + "$tar" xf .ktar || die "$1" "Couldn't extract ${src##*/}" + + # We now list the contents of the tarball so we can do our + # version of 'strip-components'. + "$tar" tf .ktar | + while read -r file; do printf '%s\n' "${file%%/*}"; done | + + # Do not repeat files. + uniq | + + # For every directory in the base we move each file + # inside it to the upper directory. + while read -r dir ; do + + # Skip if we are not dealing with a directory here. + # This way we don't remove files on the upper directory + # if a tar archive doesn't need directory stripping. + [ -d "${dir#.}" ] || continue + + # Change into the directory in a subshell so we don't + # need to cd back to the upper directory. + ( + cd "$dir" + + # We use find because we want to move hidden files + # as well. + # + # Skip the file if it has the same name as the directory. + # We will deal with it later. + # + # Word splitting is intentional here. + # shellcheck disable=2046 + find . \( ! -name . -prune \) ! -name "$dir" \ + -exec mv -f {} .. \; + + # If a file/directory with the same name as the directory + # exists, append a '.cptbak' to it and move it to the + # upper directory. + ! [ -e "$dir" ] || mv "$dir" "../${dir}.cptbak" + ) + rmdir "$dir" + + # If a backup file exists, move it into the original location. + ! [ -e "${dir}.cptbak" ] || mv "${dir}.cptbak" "$dir" + done + + # Clean up the temporary tarball. + rm -f .ktar + ;; + + *://*.cpio|*://*.cpio.??|*://*.cpio.???|*://*.cpio.????) + decompress "$src_dir/$1/${src##*/}" | cpio -i + + ;; + + *://*.zip) + unzip "$src_dir/$1/${src##*/}" || + die "$1" "Couldn't extract ${src##*/}" + + ;; + + *) + # Local file. + if [ -f "$repo_dir/$src" ]; then + cp -f "$repo_dir/$src" . + + # Remote file. + elif [ -f "$src_dir/$1/${src##*/}" ]; then + cp -f "$src_dir/$1/${src##*/}" . + + else + die "$1" "Local file $src not found" + fi + ;; + esac + done < "$repo_dir/sources" +} + +pkg_depends() { + # Resolve all dependencies and generate an ordered list. + # This does a depth-first search. The deepest dependencies are + # listed first and then the parents in reverse order. + contains "$deps" "$1" || { + # Filter out non-explicit, aleady installed dependencies. + # Only filter installed if called from 'pkg_build()'. + [ "$pkg_build" ] && [ -z "$2" ] && + (pkg_list "$1" >/dev/null) && return + + while read -r dep type || [ "$dep" ]; do + # Skip test dependencies unless $CPT_TEST is set to 1. + case $type in test) [ "$CPT_TEST" = 1 ] || continue; esac + + # Recurse through the dependencies of the child packages. + [ "${dep##\#*}" ] && pkg_depends "$dep" + done 2>/dev/null < "$(pkg_find "$1")/depends" ||: + + # After child dependencies are added to the list, + # add the package which depends on them. + [ "$2" = explicit ] || deps="$deps $1 " + } +} + +pkg_order() { + # Order a list of packages based on dependence and + # take into account pre-built tarballs if this is + # to be called from 'cpt i'. + order=; redro=; deps= + + for pkg do case $pkg in + *.tar.*) deps="$deps $pkg " ;; + *) pkg_depends "$pkg" raw + esac done + + # Filter the list, only keeping explicit packages. + # The purpose of these two loops is to order the + # argument list based on dependence. + for pkg in $deps; do ! contains "$*" "$pkg" || { + order="$order $pkg " + redro=" $pkg $redro" + } done + + deps= +} + +pkg_strip() { + # Strip package binaries and libraries. This saves space on the + # system as well as on the tarballs we ship for installation. + + # Package has stripping disabled, stop here. + [ -f "$mak_dir/$pkg/nostrip" ] && return + + log "$1" "Stripping binaries and libraries" + + find "$pkg_dir/$1" -type f | while read -r file; do + case $(od -A o -t c -N 18 "$file") in + # REL (object files (.o), static libraries (.a)). + *177*E*L*F*0000020\ 001\ *|*\!*\<*a*r*c*h*\>*) + strip -g -R .comment -R .note "$file" + ;; + + # EXEC (static binaries). + # DYN (shared libraries, dynamic binaries). + # Shared libraries keep global symbols in a separate ELF section + # called '.dynsym'. '--strip-all/-s' does not touch the dynamic + # symbol entries which makes this safe to do. + *177*E*L*F*0000020\ 00[23]\ *) + strip -s -R .comment -R .note "$file" + ;; + esac + done 2>/dev/null ||: +} + +pkg_fix_deps() { + # Dynamically look for missing runtime dependencies by checking each binary + # and library with either 'ldd' or 'readelf'. This catches any extra + # libraries and or dependencies pulled in by the package's build suite. + log "$1" "Checking for missing dependencies" + + # Go to the directory containing the built package to + # simplify path building. + cd "$pkg_dir/$1/$pkg_db/$1" + + # Make a copy of the depends file if it exists to have a + # reference to 'diff' against. + if [ -f depends ]; then + cp -f depends "$mak_dir/d" + dep_file=$mak_dir/d + else + dep_file=/dev/null + fi + + # Generate a list of all installed manifests. + pkg_name=$1 + set +f; set -f -- "$sys_db/"*/manifest + + # Get a list of binaries and libraries, false files + # will be found, however it's faster to get 'ldd' to check + # them anyway than to filter them out. + find "$pkg_dir/$pkg_name/" -type f 2>/dev/null | + + while read -r file; do + case ${elf_prog:-ldd} in + *readelf) "$elf_prog" -d "$file" 2>/dev/null ;; + *) ldd "$file" 2>/dev/null ;; + esac | + while read -r dep; do + # Skip lines containing 'ldd'. + [ "${dep##*ldd*}" ] || continue + case $dep in *NEEDED*\[*\] | *'=>'*) ;; *) continue; esac + + # readelf output: + # 0x0000 (NEEDED) Shared library: [libc.so] + dep=${dep##*\[} + dep=${dep%%\]*} + + # ldd output: + # libc.so => /lib/ld-musl-x86_64.so.1 + dep=${dep#* => } + dep=${dep% *} + + # Figure out which package owns the file. Skip file if it is owned + # by the current package. This also handles cases where a '*-bin' + # package exists on the system, so the package manager doesn't think + # that the package we are building depends on the *-bin version of + # itself, or any other renamed versions of the same software. + pkg_owner -l "/${dep#/}\$" "$PWD/manifest" >/dev/null && continue + pkg_owner -l "/${dep#/}\$" "$@" ||: + done ||: + done >> depends + + # Remove duplicate entries from the new depends file. + # This removes duplicate lines looking *only* at the + # first column. + sort -uk1,1 -o depends depends 2>/dev/null ||: + + # Display a diff of the new dependencies against the old ones. + diff -U 3 "$dep_file" depends 2>/dev/null ||: + + # Remove the depends file if it is empty. + [ -s depends ] || rm -f depends +} + +pkg_manifest() ( + # Generate the package's manifest file. This is a list of each file + # and directory inside the package. The file is used when uninstalling + # packages, checking for package conflicts and for general debugging. + log "$1" "Generating manifest" + + # This function runs as a sub-shell to avoid having to 'cd' back to the + # prior directory before being able to continue. + cd "${2:-$pkg_dir}/$1" + + # find: Print all files and directories and append '/' to directories. + # sort: Sort the output in *reverse*. Directories appear *after* their + # contents. + # sed: Remove the first character in each line (./dir -> /dir) and + # remove all lines which only contain '.'. + find . -type d -exec printf '%s/\n' {} + -o -print | + sort -r | sed '/^\.\/$/d;ss.ss' > "${2:-$pkg_dir}/$1/$pkg_db/$1/manifest" +) + +pkg_etcsums() ( + # This function runs as a sub-shell to avoid having to 'cd' back to the + # prior directory before being able to continue. + cd "$pkg_dir/$1/etc" 2>/dev/null || return 0; cd .. + + # Generate checksums for each configuration file in the package's + # /etc/ directory for use in "smart" handling of these files. + log "$1" "Generating etcsums" + + + find etc -type f | while read -r file; do + sh256 "$file" + done > "$pkg_dir/$1/$pkg_db/$1/etcsums" +) + +pkg_tar() { + # Create a tarball from the built package's files. + # This tarball also contains the package's database entry. + log "$1" "Creating tarball" + + # Read the version information to name the package. + read -r version release < "$(pkg_find "$1")/version" + + # Create a tarball from the contents of the built package. + "$tar" cf - -C "$pkg_dir/$1" . | + case $CPT_COMPRESS in + bz2) bzip2 -z ;; + xz) xz -zT 0 ;; + gz) gzip -6 ;; + zst) zstd -3 ;; + *) gzip -6 ;; # Fallback to gzip + esac \ + > "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" + + log "$1" "Successfully created tarball" + + run_hook post-package "$1" "$bin_dir/$1#$version-$release.tar.$CPT_COMPRESS" +} + +pkg_build() { + # Build packages and turn them into packaged tarballs. This function + # also checks checksums, downloads sources and ensure all dependencies + # are installed. + pkg_build=1 + + log "Resolving dependencies" + + for pkg do contains "$explicit" "$pkg" || { + pkg_depends "$pkg" explicit + + # Mark packages passed on the command-line + # separately from those detected as dependencies. + explicit="$explicit $pkg " + } done + + [ "$pkg_update" ] || explicit_build=$explicit + + # If an explicit package is a dependency of another explicit + # package, remove it from the explicit list as it needs to be + # installed as a dependency. + # shellcheck disable=2086 + for pkg do + contains "$deps" "$pkg" && explicit=$(pop "$pkg" from $explicit) + done + + # See [1] at top of script. + # shellcheck disable=2046,2086 + set -- $deps $explicit + + log "Building: $*" + + # Only ask for confirmation if more than one package needs to be built. + [ $# -gt 1 ] || [ "$pkg_update" ] && { prompt || exit 0 ;} + + log "Checking for pre-built dependencies" + + for pkg do pkg_lint "$pkg"; done + + # Install any pre-built dependencies if they exist in the binary + # directory and are up to date. + for pkg do ! contains "$explicit_build" "$pkg" && pkg_cache "$pkg" && { + log "$pkg" "Found pre-built binary, installing" + (CPT_FORCE=1 cpt-install "$tar_file") + + # Remove the now installed package from the build list. + # See [1] at top of script. + # shellcheck disable=2046,2086 + set -- $(pop "$pkg" from "$@") + } done + + for pkg do pkg_sources "$pkg"; done + + pkg_verify "$@" + + # Finally build and create tarballs for all passed packages and + # dependencies. + for pkg do + log "$pkg" "Building package ($((in = in + 1))/$#)" + + pkg_extract "$pkg" + repo_dir=$(pkg_find "$pkg") + + read -r build_version _ < "$repo_dir/version" + + # Copy the build file to the build directory to users to modify it + # temporarily at runtime. + cp -f "$repo_dir/build" "$mak_dir/$pkg/.build.cpt" + + # Install built packages to a directory under the package name + # to avoid collisions with other packages. + mkdir -p "$pkg_dir/$pkg/$pkg_db" + + # Move to the build directory. + cd "$mak_dir/$pkg" + + log "$pkg" "Starting build" + + run_hook pre-build "$pkg" "$pkg_dir/$pkg" + + # Notify the user if the build script is changed during the pre-build + # hook. + diff -q "$repo_dir/build" .build.cpt || + log "$pkg" "Executing the modified build file" + + # Call the build script, log the output to the terminal + # and to a file. There's no PIPEFAIL in POSIX shelll so + # we must resort to tricks like killing the script ourselves. + { ./.build.cpt "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { + log "$pkg" "Build failed" + log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" + run_hook build-fail "$pkg" "$pkg_dir/$pkg" + pkg_clean + kill 0 + } } | tee "$log_dir/$pkg-$time-$pid" + + # Run the test script if it exists and the user wants to run tests. This + # is turned off by default. + [ -x "$repo_dir/test" ] && [ "$CPT_TEST" = 1 ] && { + run_hook pre-test "$pkg" "$pkg_dir/$pkg" + log "$pkg" "Running tests" + "$repo_dir/test" "$pkg_dir/$pkg" "$build_version" "$sys_arch" 2>&1 || { + log "$pkg" "Test failed" + log "$pkg" "Log stored to $log_dir/$pkg-$time-$pid" + run_hook test-fail "$pkg" "$pkg_dir/$pkg" + pkg_clean + kill 0 + } } | tee -a "$log_dir/$pkg-$time-$pid" + + # Delete the log file if the build succeeded to prevent + # the directory from filling very quickly with useless logs. + [ "$CPT_KEEPLOG" = 1 ] || rm -f "$log_dir/$pkg-$time-$pid" + + # Copy the repository files to the package directory. + # This acts as the database entry. + cp -LRf "$repo_dir" "$pkg_dir/$pkg/$pkg_db/" + + # Copy the modified build file to the package directory. + pkg_build="$pkg_dir/$pkg/$pkg_db/$pkg/build" + diff -U 3 "$pkg_build" .build.cpt > "$pkg_build.diff" && + rm -f "$pkg_build.diff" + + # We never ever want this. Let's end the endless conflicts + # and remove it. + find "$pkg_dir/$pkg" -name charset.alias -exec rm -f {} + + + # Remove libtool's '*.la' library files. This removes cross-build + # system conflicts that may arise. Build-systems change, libtool + # is getting deprecated, we don't want a package that depends on + # some package's '.la' files. + find "$pkg_dir/$pkg" -name '*.la' -exec rm -f {} + + + log "$pkg" "Successfully built package" + + run_hook post-build "$pkg" "$pkg_dir/$pkg" + + # Create the manifest file early and make it empty. + # This ensures that the manifest is added to the manifest. + : > "$pkg_dir/$pkg/$pkg_db/$pkg/manifest" + + # If the package contains '/etc', add a file called + # 'etcsums' to the manifest. See comment directly above. + [ -d "$pkg_dir/$pkg/etc" ] && + : > "$pkg_dir/$pkg/$pkg_db/$pkg/etcsums" + + pkg_strip "$pkg" + pkg_manifest "$pkg" + pkg_fix_deps "$pkg" + pkg_manifest "$pkg" + pkg_etcsums "$pkg" + pkg_tar "$pkg" + + # Install only dependencies of passed packages. + # Skip this check if this is a package update. + contains "$explicit" "$pkg" && [ -z "$pkg_update" ] && continue + + log "$pkg" "Needed as a dependency or has an update, installing" + + (CPT_FORCE=1 cpt-install "$pkg") + done + + # End here as this was a system update and all packages have been installed. + [ "$pkg_update" ] && return + + log "Successfully built package(s)" + + # Turn the explicit packages into a 'list'. + # See [1] at top of script. + # shellcheck disable=2046,2086 + set -- $explicit + + # Only ask for confirmation if more than one package needs to be installed. + [ $# -gt 1 ] && prompt "Install built packages? [$*]" && { + cpt-install "$@" + return + } + + log "Run 'cpt i $*' to install the package(s)" +} + +pkg_checksums() { + # Generate checksums for packages. + repo_dir=$(pkg_find "$1") + + [ -f "$repo_dir/sources" ] || return 0 + + while read -r src _ || [ "$src" ]; do + # Comment. + if [ -z "${src##\#*}" ]; then + continue + + # File is local to the package. + elif [ -f "$repo_dir/$src" ]; then + src_path=$repo_dir/${src%/*} + + # File is remote and was downloaded. + elif [ -f "$src_dir/$1/${src##*/}" ]; then + src_path=$src_dir/$1 + + # File is a git repository. + elif [ -z "${src##git+*}" ]; then continue + + # Die here if source for some reason, doesn't exist. + else + die "$1" "Couldn't find source '$src'" + fi + + # An easy way to get 'sha256sum' to print with the 'basename' + # of files is to 'cd' to the file's directory beforehand. + (cd "$src_path" && sh256 "${src##*/}") || + die "$1" "Failed to generate checksums" + done < "$repo_dir/sources" +} + +pkg_verify() { + # Verify all package checksums. This is achieved by generating a new set of + # checksums and then comparing those with the old set. + verify_cmd="NR==FNR{a[\$1];next}/^git .*/{next}!((\$1)in a){exit 1}" + + for pkg; do + repo_dir=$(pkg_find "$pkg") + [ -f "$repo_dir/sources" ] || continue + + pkg_checksums "$pkg" | awk "$verify_cmd" - "$repo_dir/checksums" || { + log "$pkg" "Checksum mismatch" + + # Instead of dying above, log it to the terminal. Also define a + # variable so we *can* die after all checksum files have been + # checked. + mismatch="$mismatch$pkg " + } done + + [ -z "$mismatch" ] || die "Checksum mismatch with: ${mismatch% }" +} + +pkg_conflicts() { + # Check to see if a package conflicts with another. + log "$1" "Checking for package conflicts" + + # Filter the tarball's manifest and select only files + # and any files they resolve to on the filesystem + # (/bin/ls -> /usr/bin/ls). + while read -r file; do + case $file in */) continue; esac + + # Use $CPT_ROOT in filename so that we follow its symlinks. + file=$CPT_ROOT/${file#/} + + # We will only follow the symlinks of the directories, so we + # reserve the directory name in this 'dirname' value. cpt-readlink + # functions in a similar fashion to 'readlink -f', it makes sure + # every component except for the first one to be available on + # the directory structure. If we cannot find it in the system, + # we don't need to make this much more complex by trying so + # hard to find it. Simply use the original directory name. + dirname="$(cpt-readlink "${file%/*}" 2>/dev/null)" || + dirname="${file%/*}" + + + # Combine the dirname and file values, and print them into the + # temporary manifest to be parsed. + printf '%s/%s\n' "${dirname#$CPT_ROOT}" "${file##*/}" + + done < "$tar_dir/$1/$pkg_db/$1/manifest" > "$CPT_TMPDIR/$pid/manifest" + + p_name=$1 + + # Generate a list of all installed package manifests + # and remove the current package from the list. + # shellcheck disable=2046,2086 + set -- $(set +f; pop "$sys_db/$p_name/manifest" from "$sys_db"/*/manifest) + + [ -s "$CPT_TMPDIR/$pid/manifest" ] || return 0 + + # In rare cases where the system only has one package installed + # and you are reinstalling that package, grep will try to read from + # standard input if we continue here. + # + # Also, if we don't have any packages installed grep will give an + # error. This will not cause the installation to fail, but we don't + # need to check for conflicts if that's the case anyway. If we have + # only zero packages or one package, just stop wasting time and continue + # with the installation. + [ "$1" ] && [ -f "$1" ] || return 0 + + # Store the list of found conflicts in a file as we will be using the + # information multiple times. Storing it in the cache dir allows us + # to be lazy as they'll be automatically removed on script end. + "$grep" -Fxf "$CPT_TMPDIR/$pid/manifest" -- "$@" > "$CPT_TMPDIR/$pid/conflict" ||: + + + # Enable alternatives automatically if it is safe to do so. + # This checks to see that the package that is about to be installed + # doesn't overwrite anything it shouldn't in '/var/db/cpt/installed'. + "$grep" -q ":/var/db/cpt/installed/" "$CPT_TMPDIR/$pid/conflict" || + choice_auto=1 + + # Use 'grep' to list matching lines between the to + # be installed package's manifest and the above filtered + # list. + if [ "$CPT_CHOICE" != 0 ] && [ "$choice_auto" = 1 ]; then + + # This is a novel way of offering an "alternatives" system. + # It is entirely dynamic and all "choices" are created and + # destroyed on the fly. + # + # When a conflict is found between two packages, the file + # is moved to a directory called "choices" and its name + # changed to store its parent package and its intended + # location. + # + # The package's manifest is then updated to reflect this + # new location. + # + # The 'cpt choices' command parses this directory and + # offers you the CHOICE of *swapping* entries in this + # directory for those on the filesystem. + # + # The choices command does the same thing we do here, + # it rewrites manifests and moves files around to make + # this work. + # + # Pretty nifty huh? + while IFS=: read -r _ con; do + printf '%s\n' "Found conflict $con" + + # Create the "choices" directory inside of the tarball. + # This directory will store the conflicting file. + mkdir -p "$tar_dir/$p_name/${cho_dir:=var/db/cpt/choices}" + + # Construct the file name of the "db" entry of the + # conflicting file. (pkg_name>usr>bin>ls) + con_name=$(printf %s "$con" | sed 's|/|>|g') + + # Move the conflicting file to the choices directory + # and name it according to the format above. + mv -f "$tar_dir/$p_name/$con" \ + "$tar_dir/$p_name/$cho_dir/$p_name$con_name" 2>/dev/null || { + log "File must be in ${con%/*} and not a symlink to it" + log "This usually occurs when a binary is installed to" + log "/sbin instead of /usr/bin (example)" + log "Before this package can be used as an alternative," + log "this must be fixed in $p_name. Contact the maintainer" + die "by checking 'git log' or by running 'cpt-maintainer'" + } + done < "$CPT_TMPDIR/$pid/conflict" + + # Rewrite the package's manifest to update its location + # to its new spot (and name) in the choices directory. + pkg_manifest "$p_name" "$tar_dir" 2>/dev/null + + elif [ -s "$CPT_TMPDIR/$pid/conflict" ]; then + log "Package '$p_name' conflicts with another package" "" "!>" + log "Run 'CPT_CHOICE=1 cpt i $p_name' to add conflicts" "" "!>" + die "as alternatives." + fi +} + +pkg_swap() { + # Swap between package alternatives. + pkg_list "$1" >/dev/null + + alt=$(printf %s "$1$2" | sed 's|/|>|g') + cd "$sys_db/../choices" + + [ -f "$alt" ] || [ -h "$alt" ] || + die "Alternative '$1 $2' doesn't exist" + + if [ -f "$2" ]; then + # Figure out which package owns the file we are going to swap for + # another package's. + # + # Print the full path to the manifest file which contains + # the match to our search. + + pkg_owns=$(pkg_owner -lFx "$2") || + die "File '$2' exists on filesystem but isn't owned" + + log "Swapping '$2' from '$pkg_owns' to '$1'" + + # Convert the current owner to an alternative and rewrite + # its manifest file to reflect this. We then resort this file + # so no issues arise when removing packages. + cp -Pf "$CPT_ROOT/$2" "$pkg_owns>${alt#*>}" + sed "s#^$(regesc "$2")#${PWD#$CPT_ROOT}/$pkg_owns>${alt#*>}#" \ + "../installed/$pkg_owns/manifest" | + sort -r -o "../installed/$pkg_owns/manifest" + fi + + # Convert the desired alternative to a real file and rewrite + # the manifest file to reflect this. The reverse of above. + mv -f "$alt" "$CPT_ROOT/$2" + sed "s#^${PWD#$CPT_ROOT}/$(regesc "$alt")#$2#" "../installed/$1/manifest" | + sort -r -o "../installed/$1/manifest" +} + +pkg_etc() { + [ -d "$tar_dir/$pkg_name/etc" ] || return 0 + + (cd "$tar_dir/$pkg_name" + + # Create all directories beforehand. + find etc -type d | while read -r dir; do + mkdir -p "$CPT_ROOT/$dir" + done + + # Handle files in /etc/ based on a 3-way checksum check. + find etc ! -type d | while read -r file; do + { sum_new=$(sh256 "$file") + sum_sys=$(cd "$CPT_ROOT/"; sh256 "$file") + sum_old=$("$grep" "$file$" "$mak_dir/c"); } 2>/dev/null ||: + + log "$pkg_name" "Doing 3-way handshake for $file" + printf '%s\n' "Previous: ${sum_old:-null}" + printf '%s\n' "System: ${sum_sys:-null}" + printf '%s\n' "New: ${sum_new:-null}" + + # Use a case statement to easily compare three strings at + # the same time. Pretty nifty. + case ${sum_old:-null}${sum_sys:-null}${sum_new} in + # old = Y, sys = X, new = Y + "${sum_new}${sum_sys}${sum_old}") + log "Skipping $file" + continue + ;; + + # old = X, sys = X, new = X + # old = X, sys = Y, new = Y + # old = X, sys = X, new = Y + "${sum_old}${sum_old}${sum_old}"|\ + "${sum_old:-null}${sum_sys}${sum_sys}"|\ + "${sum_sys}${sum_old}"*) + log "Installing $file" + new= + ;; + + # All other cases. + *) + warn "$pkg_name" "saving /$file as /$file.new" "->" + new=.new + ;; + esac + + cp -fPp "$file" "$CPT_ROOT/${file}${new}" + chown root:root "$CPT_ROOT/${file}${new}" 2>/dev/null + done) ||: +} + +pkg_remove() { + # Remove a package and all of its files. The '/etc' directory + # is handled differently and configuration files are *not* + # overwritten. + pkg_list "$1" >/dev/null || return + + # Make sure that nothing depends on this package. + [ "$CPT_FORCE" = 1 ] || { + log "$1" "Checking for reverse dependencies" + + (cd "$sys_db"; set +f; grep -lFx "$1" -- */depends) && + die "$1" "Can't remove package, others depend on it" + } + # Block being able to abort the script with 'Ctrl+C' during removal. + # Removes all risk of the user aborting a package removal leaving + # an incomplete package installed. + trap_set block + + if [ -x "$sys_db/$1/pre-remove" ]; then + log "$1" "Running pre-remove script" + "$sys_db/$1/pre-remove" ||: + fi + + # Create a temporary list of all directories, so we don't accidentally + # remove anything from packages that create empty directories for a + # purpose (such as baselayout). + manifest_list="$(set +f; pop "$sys_db/$1/manifest" from "$sys_db/"*/manifest)" + # shellcheck disable=2086 + [ "$manifest_list" ] && grep -h '/$' $manifest_list | sort -ur > "$mak_dir/dirs" + + run_hook pre-remove "$1" "$sys_db/$1" root + + while read -r file; do + # The file is in '/etc' skip it. This prevents the package + # manager from removing user edited configuration files. + [ "${file##/etc/*}" ] || continue + + if [ -d "$CPT_ROOT/$file" ]; then + "$grep" -Fxq "$file" "$mak_dir/dirs" 2>/dev/null && continue + rmdir "$CPT_ROOT/$file" 2>/dev/null || continue + else + rm -f "$CPT_ROOT/$file" + fi + done < "$sys_db/$1/manifest" + + # Reset 'trap' to its original value. Removal is done so + # we no longer need to block 'Ctrl+C'. + trap_set cleanup + + run_hook post-remove "$1" "$CPT_ROOT/" root + + log "$1" "Removed successfully" +} + +pkg_install() { + # Install a built package tarball. + + # Install can also take the full path to a tarball. + # We don't need to check the repository if this is the case. + if [ -f "$1" ] && [ -z "${1%%*.tar*}" ] ; then + tar_file=$1 + pkg_name=${1##*/} + pkg_name=${pkg_name%#*} + + else + pkg_cache "$1" || + die "package has not been built, run 'cpt b pkg'" + + pkg_name=$1 + fi + + mkdir -p "$tar_dir/$pkg_name" + log "$pkg_name" "Extracting $tar_file" + + # Extract the tarball to catch any errors before installation begins. + decompress "$tar_file" | "$tar" xf - -C "$tar_dir/$pkg_name" + + [ -f "$tar_dir/$pkg_name/$pkg_db/$pkg_name/manifest" ] || + die "'${tar_file##*/}' is not a valid CPT package" + + # Ensure that the tarball's manifest is correct by checking that + # each file and directory inside of it actually exists. + [ "$CPT_FORCE" != 1 ] && log "$pkg_name" "Checking package manifest" && + while read -r line; do + # Skip symbolic links + [ -h "$tar_dir/$pkg_name/$line" ] || + [ -e "$tar_dir/$pkg_name/$line" ] || { + log "File $line missing from tarball but mentioned in manifest" "" "!>" + TARBALL_FAIL=1 + } + done < "$tar_dir/$pkg_name/$pkg_db/$pkg_name/manifest" + [ "$TARBALL_FAIL" ] && { + log "You can still install this package by setting CPT_FORCE variable" + die "$pkg_name" "Missing files in manifest" + } + + log "$pkg_name" "Checking that all dependencies are installed" + + # Make sure that all run-time dependencies are installed prior to + # installing the package. + [ -f "$tar_dir/$pkg_name/$pkg_db/$pkg_name/depends" ] && + [ "$CPT_FORCE" != 1 ] && + while read -r dep dep_type || [ "$dep" ]; do + [ "${dep##\#*}" ] || continue + [ "$dep_type" ] || pkg_list "$dep" >/dev/null || + install_dep="$install_dep'$dep', " + done < "$tar_dir/$pkg_name/$pkg_db/$pkg_name/depends" + + [ "$install_dep" ] && die "$1" "Package requires ${install_dep%, }" + + run_hook pre-install "$pkg_name" "$tar_dir/$pkg_name" root + + pkg_conflicts "$pkg_name" + + log "$pkg_name" "Installing package incrementally" + + # Block being able to abort the script with Ctrl+C during installation. + # Removes all risk of the user aborting a package installation leaving + # an incomplete package installed. + trap_set block + + # If the package is already installed (and this is an upgrade) make a + # backup of the manifest and etcsums files. + cp -f "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null ||: + cp -f "$sys_db/$pkg_name/etcsums" "$mak_dir/c" 2>/dev/null ||: + + # This is repeated multiple times. Better to make it a function. + pkg_rsync() { + rsync "--chown=$USER:$USER" --chmod=Du-s,Dg-s,Do-s \ + -WhHKa --no-compress --exclude /etc "${1:---}" \ + "$tar_dir/$pkg_name/" "$CPT_ROOT/" + } + + # Install the package by using 'rsync' and overwrite any existing files + # (excluding '/etc/'). + pkg_rsync --info=progress2 + pkg_etc + + # Remove any leftover files if this is an upgrade. + "$grep" -vFxf "$sys_db/$pkg_name/manifest" "$mak_dir/m" 2>/dev/null | + + while read -r file; do + file=$CPT_ROOT/$file + + # Skip deleting some leftover files. + case $file in /etc/*) continue; esac + + # Remove files. + if [ -f "$file" ] && [ ! -L "$file" ]; then + rm -f "$file" + + # Remove file symlinks. + elif [ -h "$file" ] && [ ! -d "$file" ]; then + unlink "$file" ||: + + # Skip directory symlinks. + elif [ -h "$file" ] && [ -d "$file" ]; then : + + # Remove directories if empty. + elif [ -d "$file" ]; then + rmdir "$file" 2>/dev/null ||: + fi + done ||: + + log "$pkg_name" "Verifying installation" + { pkg_rsync; pkg_rsync; } ||: + + # Reset 'trap' to its original value. Installation is done so + # we no longer need to block 'Ctrl+C'. + trap_set cleanup + + if [ -x "$sys_db/$pkg_name/post-install" ]; then + log "$pkg_name" "Running post-install script" + "$sys_db/$pkg_name/post-install" ||: + fi + + run_hook post-install "$pkg_name" "$sys_db/$pkg_name" root + + log "$pkg_name" "Installed successfully" +} + +pkg_fetch() { + log "Updating repositories" + + run_hook pre-fetch + + # Create a list of all repositories. + # See [1] at top of script. + # shellcheck disable=2046,2086 + { IFS=:; set -- $CPT_PATH; IFS=$old_ifs ;} + + # Update each repository in '$CPT_PATH'. It is assumed that + # each repository is 'git' tracked. + for repo; do + # Go to the root of the repository (if it exists). + cd "$repo" + cd "$(git rev-parse --show-toplevel 2>/dev/null)" 2>/dev/null || + cd "$(hg root 2>/dev/null)" 2>/dev/null ||: + + if [ -d .git ]; then + + [ "$(git remote 2>/dev/null)" ] || { + log "$repo" " " + printf '%s\n' "No remote, skipping." + continue + } + + contains "$repos" "$PWD" || { + repos="$repos $PWD " + + # Display a tick if signing is enabled for this + # repository. + case $(git config merge.verifySignatures) in + true) log "$PWD" "[signed] " ;; + *) log "$PWD" " " ;; + esac + + if [ -w "$PWD" ] && [ "$uid" != 0 ]; then + git fetch + git merge + git submodule update --remote --init -f + + else + [ "$uid" = 0 ] || log "$PWD" "Need root to update" + + # Find out the owner of the repository and spawn + # git as this user below. + # + # This prevents 'git' from changing the original + # ownership of files and directories in the rare + # case that the repository is owned by a 3rd user. + ( + user=$(cpt-stat "$PWD") || user=root + id -u "$user" >/dev/null 2>&1 || user=root + + [ "$user" = root ] || + log "Dropping permissions to $user for pull" + + git_cmd="git fetch && git merge && git submodule update --remote --init -f" + case $su in *su) git_cmd="'$git_cmd'"; esac + + # Spawn a subshell to run multiple commands as + # root at once. This makes things easier on users + # who aren't using persist/timestamps for auth + # caching. + user=$user as_root sh -c "$git_cmd" + ) + fi + } + elif [ -d .hg ]; then + + [ "$(hg showconfig paths 2>/dev/null)" ] || { + log "$repo" " " + printf '%s\n' "No remote, skipping." + continue + } + + contains "$repos $PWD" || { + repos="$repos $PWD" + + if [ -w "$PWD" ] && [ "$uid" != 0 ]; then + hg pull + hg update + else + [ "$uid" ] || log "$PWD" "Need root to update" + + # We are going to do the same operation as above, to + # find the owner of the repository. + ( + user=$(cpt-stat "$PWD") || user=root + id -u "$user" >/dev/null 2>&1 || user=root + + [ "$user" = root ] || + log "Dropping permissions to $user for pull" + + hg_cmd="hg pull && hg update" + + case $su in *su) hg_cmd="'$hg_cmd'"; esac + user=$user as_root sh -c "$hg_cmd" + ) + fi + } + elif [ -f .rsync ]; then + # If an .rsync_root file exists, we check that the repository root + # exists. If it does, we change to that directory to do the fetch. + # This way, we allow for partial repositories while making sure that + # we can fetch the repository in a single operation. + [ -f .rsync_root ] && { + read -r rsync_root < .rsync_root + [ -f "$rsync_root/.rsync" ] && cd "$rsync_root" + } + contains "$repos" "$PWD" || { + repos="$repos $PWD" + read -r remote < .rsync + if [ -w "$PWD" ] && [ "$uid" != 0 ]; then + rsync -acvzzC --include=core --delete "$remote/" "$PWD" + else + [ "$uid" = 0 ] || log "$PWD" "Need root to update" + + # Similar to the git update, we find the owner of + # the repository and spawn rsync as that user. + ( + user=$(cpt-stat "$PWD") || user=root + id -u "$user" >/dev/null 2>&1 || user=root + + [ "$user" = root ] || + log "Dropping permissions to $user for pull" + + user=$user as_root rsync -acvzzC --include=core --delete "$remote/" "$PWD" + ) + fi + } + else + log "$repo" " " + printf '%s\n' "Not a remote repository, skipping." + fi + done + + run_hook post-fetch +} + +pkg_updates(){ + # Check all installed packages for updates. So long as the installed + # version and the version in the repositories differ, it's considered + # an update. + [ "$CPT_FETCH" = 0 ] || pkg_fetch + + log "Checking for new package versions" + + set +f + + for pkg in "$sys_db/"*; do + pkg_name=${pkg##*/} + + # Read version and release information from the installed packages + # and repository. + read -r db_ver db_rel < "$pkg/version" + read -r re_ver re_rel < "$(pkg_find "$pkg_name")/version" + + # Compare installed packages to repository packages. + [ "$db_ver-$db_rel" != "$re_ver-$re_rel" ] && { + printf '%s\n' "$pkg_name $db_ver-$db_rel ==> $re_ver-$re_rel" + outdated="$outdated$pkg_name " + } + done + + set -f + + # If the download option is specified only download the outdated packages + # and exit. + # shellcheck disable=2154 + [ "$download_only" = 1 ] && { + log "Only sources for the packages will be acquired" + prompt || exit 0 + + for pkg in $outdated; do + pkg_sources "$pkg" + done + + exit 0 + } + + contains "$outdated" cpt && { + log "Detected package manager update" + log "The package manager will be updated first" + + prompt || exit 0 + + pkg_build cpt + cpt-install cpt + + log "Updated the package manager" + log "Re-run 'cpt update' to update your system" + + exit 0 + } + + [ "$outdated" ] || { + log "Everything is up to date" + return + } + + log "Packages to update: ${outdated% }" + + # Tell 'pkg_build' to always prompt before build. + pkg_update=1 + + # Build all packages requiring an update. + # See [1] at top of script. + # shellcheck disable=2046,2086 + { + pkg_order $outdated + pkg_build $order + } + + log "Updated all packages" +} + +pkg_clean() { + # Clean up on exit or error. This removes everything related + # to the build. + [ "$CPT_DEBUG" != 1 ] || return 0 + + # Block 'Ctrl+C' while cache is being cleaned. + trap_set block + + # Remove temporary items. + rm -rf -- "${CPT_TMPDIR:=$cac_dir/proc}/$pid" +} + +create_cache() { + # A temporary directory can be specified apart from the cache + # directory in order to build in a user specified directory. + # /tmp could be used in order to build on ram, useful on SSDs. + # The user can specify CPT_TMPDIR for this. + # + # Create the required temporary directories and set the variables + # which point to them. + mkdir -p "${CPT_TMPDIR:=$cac_dir/proc}" \ + "${mak_dir:=$CPT_TMPDIR/$pid/build}" \ + "${pkg_dir:=$CPT_TMPDIR/$pid/pkg}" \ + "${tar_dir:=$CPT_TMPDIR/$pid/export}" + +} + +# main() +{ + set -ef + + # If a parser definition exists, let's run it ourselves. This makes sure we + # get the variables as soon as possible. + command -v parser_definition >/dev/null && { + eval "$(getoptions parser_definition parse "$0")" + parse "$@" + eval set -- "$REST" + } + + # Create the cache directories for CPT and set the variables which point + # to them. This is seperate from temporary directories created in + # create_cache(). That's because we need these variables set on most + # occasions. + mkdir -p "${cac_dir:=${CPT_CACHE:=${XDG_CACHE_HOME:-$HOME/.cache}/cpt}}" \ + "${src_dir:=$cac_dir/sources}" \ + "${log_dir:=$cac_dir/logs}" \ + "${bin_dir:=$cac_dir/bin}" + + # Set the location to the repository and package database. + pkg_db=var/db/cpt/installed + + # The PID of the current shell process is used to isolate directories + # to each specific CPT instance. This allows multiple package manager + # instances to be run at once. Store the value in another variable so + # that it doesn't change beneath us. + pid=${CPT_PID:-$$} + + # Force the C locale to speed up things like 'grep' which disable unicode + # etc when this is set. We don't need unicode and a speed up is always + # welcome. + export LC_ALL=C LANG=C + + # Catch errors and ensure that build files and directories are cleaned + # up before we die. This occurs on 'Ctrl+C' as well as success and error. + trap_set cleanup + + # Prefer GNU grep if installed as it is much much faster than busybox's + # implementation. Very much worth it if you value performance over + # POSIX correctness (grep quoted to avoid shellcheck false-positive). + grep=$(command -v ggrep) || grep='grep' + + # Prefer libarchive tar or GNU tar if installed as they are much + # much faster than busybox's implementation. Very much worth it if + # you value performance. + tar=$(command -v bsdtar || command -v gtar) || tar=tar + + # Figure out which 'sudo' command to use based on the user's choice or + # what is available on the system. + su=${CPT_SU:-$(command -v sudo || command -v doas)} || su=su + + # Store the date and time of script invocation to be used as the name + # of the log files the package manager creates uring builds. + time=$(date '+%Y-%m-%d-%H:%M') + + # Use readelf for fixing dependencies if it is available, fallback to + # ldd. readelf shows only the actual dependencies and doesn't include + # the libraries required by the dependencies. + elf_prog=${CPT_ELF:="$( + command -v readelf || + command -v llvm-readelf || + command -v eu-readelf)"} || elf_prog=ldd + + # Make note of the user's current ID to do root checks later on. + # This is used enough to warrant a place here. + uid=$(id -u) + + # Save IFS, so we can restore it back to what it was before. + old_ifs=$IFS + + # Make sure that the CPT_ROOT doesn't end with a '/'. This might + # break some operations. + [ -z "$CPT_ROOT" ] || [ "${CPT_ROOT##*/}" ] || { + warn "" "Your CPT_ROOT variable shouldn't end with '/'" + CPT_ROOT=${CPT_ROOT%/} + } + + # Define an optional sys_arch variable in order to provide + # information to build files with architectural information. + sys_arch=$(uname -m 2>/dev/null) ||: + + # Define this variable but don't create its directory structure from + # the get go. It will be created as needed by package installation. + sys_db=$CPT_ROOT/$pkg_db + + # This allows for automatic setup of a CPT chroot and will + # do nothing on a normal system. + mkdir -p "$CPT_ROOT/" 2>/dev/null ||: + + # Set a value for CPT_COMPRESS if it isn't set. + : "${CPT_COMPRESS:=gz}" + + # Unless being piped or the user specifically doesn't want colors, set + # colors. This can of course be overriden if the user specifically want + # colors during piping. + if { [ "$CPT_COLOR" != 0 ] && [ -t 1 ] ;} || [ "$CPT_COLOR" = 1 ]; then + colory="\033[1;33m" colorb="\033[1;36m" colre="\033[m" + fi + +} -- cgit v1.2.3 From 7722519b9e9f47b61243cf591abdc614f53c27df Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 09:53:31 +0000 Subject: update redo files FossilOrigin-Name: 1096355ead3f8b366f9cf1428369d38cc9735d78da31e8fcfbb3c7ce046d1db0 --- all.do | 2 -- clean.do | 4 ++-- config.rc | 2 +- default.do | 29 ++++++++++++++++++----------- dist.do | 3 --- docs/default.do | 24 ++++++++++++++++++++++++ 6 files changed, 45 insertions(+), 19 deletions(-) delete mode 100644 all.do delete mode 100644 dist.do create mode 100644 docs/default.do diff --git a/all.do b/all.do deleted file mode 100644 index 60dfc91..0000000 --- a/all.do +++ /dev/null @@ -1,2 +0,0 @@ -. ./config.rc -redo-ifchange bin/all doc/cpt.info diff --git a/clean.do b/clean.do index 389daf3..b30f907 100644 --- a/clean.do +++ b/clean.do @@ -1,5 +1,5 @@ . ./config.rc -redo bin/clean +redo bin/clean src/clean redo_clean rm -f "cpt-$VERSION.tar.xz" -find doc -name '*.info' -exec rm -f -- {} + +find docs -name '*.info' -exec rm -f -- {} + diff --git a/config.rc b/config.rc index d1b2281..b6dc81e 100644 --- a/config.rc +++ b/config.rc @@ -38,7 +38,7 @@ PHONY() { trap 'case " $PHONY " in *" $1 "*) rm -f $3; esac' EXIT INT } -setv VERSION = 5.1.0 +setv VERSION = 5.1.1 # Paths setv PREFIX = /usr/local diff --git a/default.do b/default.do index 2d091c6..d8c1cd9 100644 --- a/default.do +++ b/default.do @@ -4,6 +4,15 @@ fn="${1%.*}" case "$1" in + all) redo-ifchange src/cpt-lib bin/all docs/cpt.info ;; + dist) + redo clean + redo "cpt-$VERSION.tar.xz" + ;; + src/cpt-lib) + redo-ifchange "$1.in" + sed "s|@VERSION@|$VERSION|g" < "$1.in" > "$3" + ;; bin/cpt-readlink|bin/cpt-stat) redo-ifchange "$1.o" "$CC" -o "$3" $LDFLAGS "$1.o" $LIBS @@ -13,18 +22,8 @@ case "$1" in redo-ifchange "$fn.c" "$CC" -c -o "$3" $CFLAGS "$fn.c" ;; - *.info) - redo-ifchange "$fn.texi" - $MAKEINFO "$fn.texi" -o "$3" - ;; - *.texi) - [ -f "$fn.org" ] || exit 99 - redo-ifchange "$fn.org" - $EMACS "$fn.org" --batch -f org-texinfo-export-to-texinfo - mv "$1" "$3" - ;; "cpt-$VERSION.tar.xz") - redo doc/cpt.info + redo docs/cpt.info rm -rf -- "cpt-$VERSION" find . -type f ! -name '.*' ! -path './.*' | while read -r file; do @@ -36,7 +35,15 @@ case "$1" in rm -rf -- "cpt-$VERSION" mv "$1" "$3" ;; + test) + redo src/test + ;; + src/clean) + rm -f src/cpt-lib + ;; *) echo "Unknown target $1" exit 99 esac + +PHONY all dist test clean src/clean diff --git a/dist.do b/dist.do deleted file mode 100644 index c7f21f1..0000000 --- a/dist.do +++ /dev/null @@ -1,3 +0,0 @@ -. ./config.rc -redo clean -redo "cpt-$VERSION.tar.xz" diff --git a/docs/default.do b/docs/default.do new file mode 100644 index 0000000..2742edb --- /dev/null +++ b/docs/default.do @@ -0,0 +1,24 @@ +. ../config.rc + +# Extensionless name of file +fn="${1%.*}" + +case "$1" in + all) redo-ifchange info ;; + info) redo-ifchange cpt.info ;; + *.info) + redo-ifchange "$fn.texi" + $MAKEINFO "$fn.texi" -o "$3" + ;; + *.texi) + [ -f "$fn.org" ] || exit 99 + redo-ifchange "$fn.org" + $EMACS "$fn.org" --batch -f org-texinfo-export-to-texinfo + mv "$1" "$3" + ;; + *) + echo "Unknown target $1" + exit 99 +esac + +PHONY all info html -- cgit v1.2.3 From 96a1f4d2a70e1631564c00fa33e7f11c5e331573 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 19:59:49 +0000 Subject: cpt: update redo build files FossilOrigin-Name: de567d5766f6af7e5659f2afbef6a566b386c63659f0f8b292f905e38b53c272 --- config.rc | 51 +++++++++++++++++++++++++++++---------------------- default.do | 6 ++++-- install.do | 16 ++++++++++++++++ src/test.do | 4 ++++ uninstall.do | 9 +++++++++ 5 files changed, 62 insertions(+), 24 deletions(-) create mode 100644 install.do create mode 100644 src/test.do create mode 100644 uninstall.do diff --git a/config.rc b/config.rc index b6dc81e..f09b44c 100644 --- a/config.rc +++ b/config.rc @@ -14,6 +14,31 @@ setv() { esac } +setv VERSION = 5.1.1 + +# Paths +setv PREFIX = /usr/local +setv BINDIR = "${PREFIX}/bin" +setv SHAREDIR = "${PREFIX}/share" +setv DOCDIR = "${SHAREDIR}/doc" +setv CPTDOC = "${DOCDIR}/cpt" +setv MANPREFIX = "${SHAREDIR}/man" +setv MAN1 = "${MANPREFIX}/man1" + +# Flags +setv CFLAGS = -std=c99 -Wpedantic -Wall -Os +setv CFLAGS += -D_XOPEN_SOURCE=700 +setv LDFLAGS = -s -static +setv LIBS = -lc + +setv CC = cc +setv LD = "${CC}" + +# Documentation tools +setv EMACS = emacs +setv MAKEINFO = makeinfo + +# Helper functions redo_clean() { # Clean function for various redo implementations [ -r .do_built ] && { @@ -38,29 +63,11 @@ PHONY() { trap 'case " $PHONY " in *" $1 "*) rm -f $3; esac' EXIT INT } -setv VERSION = 5.1.1 - -# Paths -setv PREFIX = /usr/local -setv BINDIR = "${PREFIX}/bin" -setv SHAREDIR = "${PREFIX}/share" -setv DOCDIR = "${SHAREDIR}/doc" -setv CPTDOC = "${DOCDIR}/cpt" -setv MANPREFIX = "${SHAREDIR}/man" -setv MAN1 = "${MANPREFIX}/man1" - -# Flags -setv CFLAGS = -std=c99 -Wpedantic -Wall -Os -setv CFLAGS += -D_XOPEN_SOURCE=700 -setv LDFLAGS = -s -static -setv LIBS = -lc - -setv CC = cc -setv LD = "${CC}" +getbin() { + # Function to get all executables + find src contrib bin -name 'cpt-*' ! -name '*.in' ! -name '*.[coh]' +} -# Documentation tools -setv EMACS = emacs -setv MAKEINFO = makeinfo # Phony targets PHONY all dist clean install uninstall test diff --git a/default.do b/default.do index d8c1cd9..f0ebcf4 100644 --- a/default.do +++ b/default.do @@ -25,9 +25,11 @@ case "$1" in "cpt-$VERSION.tar.xz") redo docs/cpt.info rm -rf -- "cpt-$VERSION" - find . -type f ! -name '.*' ! -path './.*' | + mkdir -p "cpt-$VERSION" + { git ls-tree -r HEAD --name-only && echo docs/cpt.info ;} | while read -r file; do - mkdir -p "cpt-$VERSION/${file%/*}" + [ "${file##*/*}" ] || + mkdir -p "cpt-$VERSION/${file%/*}" cp "$file" "cpt-$VERSION/$file" done tar cf "cpt-$VERSION.tar" "cpt-$VERSION" diff --git a/install.do b/install.do new file mode 100644 index 0000000..cc11bd4 --- /dev/null +++ b/install.do @@ -0,0 +1,16 @@ +. ./config.rc +redo all + +# Install executables. +mkdir -p "${DESTDIR}${BINDIR}" +getbin | while read -r file; do + cp "$file" "${DESTDIR}${BINDIR}/${file##*/}" + chmod 755 "${DESTDIR}${BINDIR}/${file##*/}" +done + +# Install manual pages. +mkdir -p "${DESTDIR}${MAN1}" +for man in man/*.1; do + cp "$man" "${DESTDIR}${MAN1}/${man##*/}" + chmod 644 "${DESTDIR}${MAN1}/${man##*/}" +done diff --git a/src/test.do b/src/test.do new file mode 100644 index 0000000..9d32aef --- /dev/null +++ b/src/test.do @@ -0,0 +1,4 @@ +. ../config.rc +redo-ifchange cpt-lib +shellcheck -x -f gcc ./cpt* ../contrib/* +PHONY diff --git a/uninstall.do b/uninstall.do new file mode 100644 index 0000000..d2f4585 --- /dev/null +++ b/uninstall.do @@ -0,0 +1,9 @@ +. ./config.rc + +getbin | while read -r file; do + rm -f "${DESTDIR}${BINDIR}/${file##*/}" +done + +for man in man/*.1; do + rm -f "${DESTDIR}${MAN1}/${man##*/}" +done -- cgit v1.2.3 From 16dd8abfa7f34ea3b6de4e22a1957f21dcf5d83d Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 20:00:33 +0000 Subject: cpt: remove Makefile and config.mk FossilOrigin-Name: 939a0899a2d62edcf207a8679988a0486a06c56fd1b92efc0af2e0b0146a5553 --- Makefile | 69 --------------------------------------------------------------- config.mk | 25 ----------------------- 2 files changed, 94 deletions(-) delete mode 100644 Makefile delete mode 100644 config.mk diff --git a/Makefile b/Makefile deleted file mode 100644 index 4e0880d..0000000 --- a/Makefile +++ /dev/null @@ -1,69 +0,0 @@ -# See LICENSE for copyright information -include config.mk - -SRC = bin/cpt-readlink.c bin/cpt-stat.c -OBJ = ${SRC:.c=.o} -BIN = ${SRC:.c=} - -.SUFFIXES: -.SUFFIXES: .o .c .org .texi .info - -.org.texi: - ${EMACS} $< --batch -f org-texinfo-export-to-texinfo - -.texi.info: - ${MAKEINFO} $< -o $@ - -all: ${BIN} - -.c: - ${CC} ${CFLAGS} ${LDFLAGS} -o $@ $< ${LIBS} - -clean: - rm -f ${BIN} ${OBJ} - -test: ${BIN} - bin/cpt-stat bin - bin/cpt-stat Makefile - bin/cpt-readlink /bin/sh - ${MAKE} -C src test - -install-bin: ${BIN} - for bin in ${BIN}; do \ - install -Dm755 $${bin} ${DESTDIR}${BINDIR}/$${bin##*/}; done - -install-src: - for bin in src/*; do \ - install -Dm755 $${bin} ${DESTDIR}${BINDIR}/$${bin##*/}; done - -install-contrib: - for bin in contrib/*; do \ - install -Dm755 $${bin} ${DESTDIR}${BINDIR}/$${bin##*/}; done - -install-contrib-static: - mkdir -p ${DESTDIR}${BINDIR} - for bin in contrib/*; do \ - sed '/\. cpt-lib/r src/cpt-lib' $${bin} | \ - sed '/\. cpt-lib/d' > ${DESTDIR}${BINDIR}/$${bin##*/}; \ - chmod 755 ${DESTDIR}${BINDIR}/$${bin##*/}; done - -install-src-static: - mkdir -p ${DESTDIR}${BINDIR} - for bin in src/*; do \ - sed '/\. cpt-lib/r src/cpt-lib' $${bin} | \ - sed '/\. cpt-lib/d' > ${DESTDIR}${BINDIR}/$${bin##*/}; \ - chmod 755 ${DESTDIR}${BINDIR}/$${bin##*/}; done - -install-doc: - for man in man/*.1; do install -Dm644 $${man} ${DESTDIR}${MAN1}/$${man##*/}; done - -install: install-bin install-src install-contrib install-doc -install-static: install-bin install-src-static install-contrib-static install-doc - -uninstall: - for bin in ${BIN} src/* contrib/*; do \ - rm -f ${DESTDIR}${BINDIR}/$${bin##*/}; done - for man in man/*; do rm -f ${DESTDIR}${MAN1}/$${man##*/}; done - - -.PHONY: all install-bin install-src install-contrib install-doc install-src-static install-contrib-static install uninstall test clean diff --git a/config.mk b/config.mk deleted file mode 100644 index ba76118..0000000 --- a/config.mk +++ /dev/null @@ -1,25 +0,0 @@ -# See LICENSE for copyright information -VERSION = 5.1.1 - -# Paths -PREFIX = /usr/local -BINDIR = ${PREFIX}/bin -SHAREDIR = ${PREFIX}/share -DOCDIR = ${SHAREDIR}/doc -CPTDOC = ${DOCDIR}/cpt -MANPREFIX = ${SHAREDIR}/man -MAN1 = ${MANPREFIX}/man1 - -# Flags -CFLAGS = -std=c99 -Wpedantic -Wall -Os -CFLAGS += -D_XOPEN_SOURCE=700 -LDFLAGS = -s -static -LIBS = -lc - -# C compiler and linker -CC = cc -LD = ${CC} - -# Documentation tools -EMACS = emacs -MAKEINFO = makeinfo -- cgit v1.2.3 From a2896e8f699945c6141328931d2e70a5f75af5d8 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 20:06:00 +0000 Subject: docs: change texinfo build function FossilOrigin-Name: 292f67b613434aeb4c338dc906245848d8c80410f9afb4945a5e62b7c8ca3880 --- docs/default.do | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/docs/default.do b/docs/default.do index 2742edb..5f8ee48 100644 --- a/docs/default.do +++ b/docs/default.do @@ -11,10 +11,12 @@ case "$1" in $MAKEINFO "$fn.texi" -o "$3" ;; *.texi) - [ -f "$fn.org" ] || exit 99 + [ -f "$fn.org" ] || exit 0 redo-ifchange "$fn.org" - $EMACS "$fn.org" --batch -f org-texinfo-export-to-texinfo - mv "$1" "$3" + cp "$fn.org" "$3.org" + $EMACS "$3.org" --batch -f org-texinfo-export-to-texinfo + rm -f "$3.org" + mv "$3.texi" "$3" ;; *) echo "Unknown target $1" -- cgit v1.2.3 From 3c84ff9e8dddf976235fe50df6e69ec75ef7320b Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:17:05 +0000 Subject: src: remove Makefile FossilOrigin-Name: 2b735d94662c1f120603b8077e880d07eb76f26f155e2957ce60d6057c938509 --- src/Makefile | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 src/Makefile diff --git a/src/Makefile b/src/Makefile deleted file mode 100644 index aae4e59..0000000 --- a/src/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -test: - shellcheck -x -f gcc ./cpt* ../contrib/* - -.PHONY: test -- cgit v1.2.3 From 2b62486c4e07a6fd10a4900d125b5c55da5ecd91 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:17:25 +0000 Subject: src: be more specific for calling shellcheck on scripts FossilOrigin-Name: fbdd6c240bc5eae6522926ccb776cd79ab87c1f2e422fd4f15ba8c23fbbbe5b4 --- src/test.do | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/test.do b/src/test.do index 9d32aef..76771c0 100644 --- a/src/test.do +++ b/src/test.do @@ -1,4 +1,5 @@ . ../config.rc redo-ifchange cpt-lib -shellcheck -x -f gcc ./cpt* ../contrib/* +exec >&2 +find . ../contrib -name 'cpt-*' ! -name '*.*' -exec shellcheck -x -f gcc {} + PHONY -- cgit v1.2.3 From 9aab40bc5d33350c5bfb3a6d86a783b9c243ec68 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:18:47 +0000 Subject: config.rc: better helper functions FossilOrigin-Name: 616fa2758bb01ad6f314baa41abb361b53194e62407bdc7ecd2320be41073334 --- config.rc | 27 +++++++++++++++++++-------- 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/config.rc b/config.rc index f09b44c..2f0733b 100644 --- a/config.rc +++ b/config.rc @@ -39,6 +39,8 @@ setv EMACS = emacs setv MAKEINFO = makeinfo # Helper functions +target=$1 basename=$2 dest=$3 + redo_clean() { # Clean function for various redo implementations [ -r .do_built ] && { @@ -52,15 +54,24 @@ redo_clean() { [ "$REDO_BASE" ] || rm -rf -- .redo } +targcheck() { + # Usage: targcheck [target...] + # + # Check if current target is one of the given arguments of this function. + # Returns 0 if target is one of the arguments, returns 1 if not. + case " $* " in *" $target "*) return 0; esac; return 1 +} + PHONY() { - # Function that resembles the .PHONY: target on the classic 'make' build - # system. - [ "$1" ] || { - trap 'rm -f $3' EXIT INT - return 0 - } - setv PHONY += "$@" - trap 'case " $PHONY " in *" $1 "*) rm -f $3; esac' EXIT INT + # Usage: PHONY [[target...]] + # + # Function that resembles the .PHONY: target on the classic 'make' system. + # You can either use it without an argument on a single target, or specify + # multiple targets. + if [ -z "$1" ] || targcheck "$@"; then + # shellcheck disable=2064 + trap "rm -f $dest" EXIT INT + fi } getbin() { -- cgit v1.2.3 From 957dd999d5b9d56e29a355efeeb284b296a70b81 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:20:16 +0000 Subject: default.do: remove second PHONY call, add PHONY call to src/clean FossilOrigin-Name: 3d8973d0ab4bd8fe91cd1f5de2d993dbd232c73b725fd9a901ce2dabdcc15842 --- default.do | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/default.do b/default.do index f0ebcf4..9407c6e 100644 --- a/default.do +++ b/default.do @@ -42,10 +42,9 @@ case "$1" in ;; src/clean) rm -f src/cpt-lib + PHONY ;; *) echo "Unknown target $1" exit 99 esac - -PHONY all dist test clean src/clean -- cgit v1.2.3 From 388586b7ceac1470e741b2967e5ba33c8d99ef7b Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:20:58 +0000 Subject: bin: add tests FossilOrigin-Name: ec194605cd8c21140304faf47b67f419967b7606155d7afb0e6d9f83c7622d46 --- bin/test.do | 11 +++++++++++ default.do | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 bin/test.do diff --git a/bin/test.do b/bin/test.do new file mode 100644 index 0000000..4794751 --- /dev/null +++ b/bin/test.do @@ -0,0 +1,11 @@ +. ../config.rc +redo all +exec >&2 + +./cpt-readlink . +./cpt-readlink .. +./cpt-readlink /bin +./cpt-stat /bin +./cpt-stat cpt-readlink.o + +PHONY diff --git a/default.do b/default.do index 9407c6e..6e2d555 100644 --- a/default.do +++ b/default.do @@ -38,7 +38,7 @@ case "$1" in mv "$1" "$3" ;; test) - redo src/test + redo src/test bin/test ;; src/clean) rm -f src/cpt-lib -- cgit v1.2.3 From 41859732b57e182a675650584530ea85c7bf1407 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:21:21 +0000 Subject: GitHub Workflows: add minimal 'do' shell script for use in tests FossilOrigin-Name: de5cc9af20b3b0b7921469eb7bbdbe8ba0950583f627480fa2ada969e6407bf4 --- .github/workflows/main.yml | 2 +- minimal/do | 446 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 447 insertions(+), 1 deletion(-) create mode 100755 minimal/do diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 836d891..0e61433 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -10,4 +10,4 @@ jobs: steps: - uses: actions/checkout@v1 - name: Run tests. - run: make test + run: ./minimal/do test diff --git a/minimal/do b/minimal/do new file mode 100755 index 0000000..f38a2a7 --- /dev/null +++ b/minimal/do @@ -0,0 +1,446 @@ +#!/bin/sh +# +# A minimal alternative to djb redo that doesn't support incremental builds. +# For the full version, visit http://github.com/apenwarr/redo +# +# The author disclaims copyright to this source file and hereby places it in +# the public domain. (2010 12 14; updated 2019 02 24) +# +USAGE=" +usage: do [-d] [-x] [-v] [-c] + -d print extra debug messages (mostly about dependency checks) + -v run .do files with 'set -v' + -x run .do files with 'set -x' + -c clean up all old targets before starting + + Note: do is an implementation of redo that does *not* check dependencies. + It will never rebuild a target it has already built, unless you use -c. +" + +# CDPATH apparently causes unexpected 'cd' output on some platforms. +unset CDPATH + +# By default, no output coloring. +green="" +bold="" +plain="" + +if [ -n "$TERM" -a "$TERM" != "dumb" ] && tty <&2 >/dev/null 2>&1; then + green="$(printf '\033[32m')" + bold="$(printf '\033[1m')" + plain="$(printf '\033[m')" +fi + +# The 'seq' command is not available on all platforms. +_seq() { + local x=0 max="$1" + while [ "$x" -lt "$max" ]; do + x=$((x + 1)) + echo "$x" + done +} + +# Split $1 into a dir part ($_dirsplit_dir) and base filename ($_dirsplit_base) +_dirsplit() { + _dirsplit_base=${1##*/} + _dirsplit_dir=${1%$_dirsplit_base} +} + +# Like /usr/bin/dirname, but avoids a fork and uses _dirsplit semantics. +qdirname() ( + _dirsplit "$1" + dir=${_dirsplit_dir%/} + echo "${dir:-.}" +) + +_dirsplit "$0" +REDO=$(cd "$(pwd -P)" && + cd "${_dirsplit_dir:-.}" && + echo "$PWD/$_dirsplit_base") +export REDO +_cmd=$_dirsplit_base + +DO_TOP= +if [ -z "$DO_BUILT" ]; then + export _do_opt_debug= + export _do_opt_exec= + export _do_opt_verbose= + export _do_opt_clean= +fi +while getopts 'dxvcj:h?' _opt; do + case $_opt in + d) _do_opt_debug=1 ;; + x) _do_opt_exec=x ;; + v) _do_opt_verbose=v ;; + c) _do_opt_clean=1 ;; + j) ;; # silently ignore, for compat with real redo + \?|h|*) printf "%s" "$USAGE" >&2 + exit 99 + ;; + esac +done +shift "$((OPTIND - 1))" +_debug() { + [ -z "$_do_opt_debug" ] || echo "$@" >&2 +} + +if [ -z "$DO_BUILT" -a "$_cmd" != "redo-whichdo" ]; then + DO_TOP=1 + if [ "$#" -eq 0 ] && [ "$_cmd" = "do" -o "$_cmd" = "redo" ]; then + set all # only toplevel redo has a default target + fi + export DO_STARTDIR="$(pwd -P)" + # If starting /bin/pwd != $PWD, this will fix it. + # That can happen when $PWD contains symlinks that the shell is + # trying helpfully (but unsuccessfully) to hide from the user. + cd "$DO_STARTDIR" || exit 99 + export DO_BUILT="$PWD/.do_built" + if [ -z "$_do_opt_clean" -a -e "$DO_BUILT" ]; then + echo "do: Incremental mode. Use -c for clean rebuild." >&2 + fi + : >>"$DO_BUILT" + sort -u "$DO_BUILT" >"$DO_BUILT.new" + while read f; do + [ -n "$_do_opt_clean" ] && printf "%s\0%s.did\0" "$f" "$f" + printf "%s.did.tmp\0" "$f" + done <"$DO_BUILT.new" | + xargs -0 rm -f 2>/dev/null + mv "$DO_BUILT.new" "$DO_BUILT" + export DO_PATH="$DO_BUILT.dir" + export PATH="$DO_PATH:$PATH" + rm -rf "$DO_PATH" + mkdir "$DO_PATH" + for d in redo redo-ifchange redo-whichdo; do + ln -s "$REDO" "$DO_PATH/$d" + done + for d in redo-ifcreate redo-stamp redo-always redo-ood \ + redo-targets redo-sources; do + echo "#!/bin/sh" >"$DO_PATH/$d" + chmod a+rx "$DO_PATH/$d" + done +fi + + +# Chop the "file" part off a /path/to/file pathname. +# Note that if the filename already ends in a /, we just remove the slash. +_updir() +{ + local v="${1%/*}" + [ "$v" != "$1" ] && echo "$v" + # else "empty" which means we went past the root +} + + +# Returns true if $1 starts with $2. +_startswith() +{ + [ "${1#"$2"}" != "$1" ] +} + + +# Returns true if $1 ends with $2. +_endswith() +{ + [ "${1%"$2"}" != "$1" ] +} + + +# Prints $1 if it's absolute, or $2/$1 if $1 is not absolute. +_abspath() +{ + local here="$2" there="$1" + if _startswith "$1" "/"; then + echo "$1" + else + echo "$2/$1" + fi +} + + +# Prints $1 as a path relative to $PWD (not starting with /). +# If it already doesn't start with a /, doesn't change the string. +_relpath() +{ + local here="$2" there="$1" out= hadslash= + #echo "RP start '$there' hs='$hadslash'" >&2 + _startswith "$there" "/" || { echo "$there" && return; } + [ "$there" != "/" ] && _endswith "$there" "/" && hadslash=/ + here=${here%/}/ + while [ -n "$here" ]; do + #echo "RP out='$out' here='$here' there='$there'" >&2 + [ "${here%/}" = "${there%/}" ] && there= && break; + [ "${there#$here}" != "$there" ] && break + out=../$out + _dirsplit "${here%/}" + here=$_dirsplit_dir + done + there=${there#$here} + if [ -n "$there" ]; then + echo "$out${there%/}$hadslash" + else + echo "${out%/}$hadslash" + fi +} + + +# Prints a "normalized relative" path, with ".." resolved where possible. +# For example, a/b/../c will be reduced to just a/c. +_normpath() +( + local path="$1" relto="$2" out= isabs= + #echo "NP start '$path'" >&2 + if _startswith "$path" "/"; then + isabs=1 + else + path="${relto%/}/$path" + fi + set -f + IFS=/ + for d in ${path%/}; do + #echo "NP out='$out' d='$d'" >&2 + if [ "$d" = ".." ]; then + out=$(_updir "${out%/}")/ + else + out=$out$d/ + fi + done + #echo "NP out='$out' (done)" >&2 + out=${out%/} + if [ -n "$isabs" ]; then + echo "${out:-/}" + else + _relpath "${out:-/}" "$relto" + fi +) + + +# Prints a "real" path, with all symlinks resolved where possible. +_realpath() +{ + local path="$1" relto="$2" isabs= rest= + if _startswith "$path" "/"; then + isabs=1 + else + path="${relto%/}/$path" + fi + ( + for d in $(_seq 100); do + #echo "Trying: $PWD--$path" >&2 + if cd -P "$path" 2>/dev/null; then + # success + pwd=$(pwd -P) + #echo " chdir ok: $pwd--$rest" >&2 + np=$(_normpath "${pwd%/}/$rest" "$relto") + if [ -n "$isabs" ]; then + echo "$np" + else + _relpath "$np" "$relto" + fi + break + fi + _dirsplit "${path%/}" + path=$_dirsplit_dir + rest="$_dirsplit_base/$rest" + done + ) +} + + +# List the possible names for default*.do files in dir $1 matching the target +# pattern in $2. We stop searching when we find the first one that exists. +_find_dofiles_pwd() +{ + local dodir="$1" dofile="$2" + _startswith "$dofile" "default." || dofile=${dofile#*.} + while :; do + dofile=default.${dofile#default.*.} + echo "$dodir$dofile" + [ -e "$dodir$dofile" ] && return 0 + [ "$dofile" = default.do ] && break + done + return 1 +} + + +# List the possible names for default*.do files in $PWD matching the target +# pattern in $1. We stop searching when we find the first name that works. +# If there are no matches in $PWD, we'll search in .., and so on, to the root. +_find_dofiles() +{ + local target="$1" dodir= dofile= newdir= + _debug "find_dofile: '$PWD' '$target'" + dofile="$target.do" + echo "$dofile" + [ -e "$dofile" ] && return 0 + + # Try default.*.do files, walking up the tree + _dirsplit "$dofile" + dodir=$_dirsplit_dir + dofile=$_dirsplit_base + [ -n "$dodir" ] && dodir=${dodir%/}/ + [ -e "$dodir$dofile" ] && return 0 + for i in $(_seq 100); do + [ -n "$dodir" ] && dodir=${dodir%/}/ + #echo "_find_dofiles: '$dodir' '$dofile'" >&2 + _find_dofiles_pwd "$dodir" "$dofile" && return 0 + newdir=$(_realpath "${dodir}.." "$PWD") + [ "$newdir" = "$dodir" ] && break + dodir=$newdir + done + return 1 +} + + +# Print the last .do file returned by _find_dofiles. +# If that file exists, returns 0, else 1. +_find_dofile() +{ + local files="$(_find_dofiles "$1")" + rv=$? + #echo "files='$files'" >&2 + [ "$rv" -ne 0 ] && return $rv + echo "$files" | { + while read -r linex; do line=$linex; done + printf "%s\n" "$line" + } +} + + +# Actually run the given $dofile with the arguments in $@. +# Note: you should always run this in a subshell. +_run_dofile() +{ + export DO_DEPTH="$DO_DEPTH " + export REDO_TARGET="$PWD/$target" + local line1 + set -e + read line1 <"$PWD/$dofile" || true + cmd=${line1#"#!/"} + if [ "$cmd" != "$line1" ]; then + set -$_do_opt_verbose$_do_opt_exec + exec /$cmd "$PWD/$dofile" "$@" + else + set -$_do_opt_verbose$_do_opt_exec + # If $dofile is empty, "." might not change $? at + # all, so we clear it first with ":". + :; . "$PWD/$dofile" + fi +} + + +# Find and run the right .do file, starting in dir $1, for target $2, +# providing a temporary output file as $3. Renames the temp file to $2 when +# done. +_do() +{ + local dir="$1" target="$1$2" tmp="$1$2.redo.tmp" tdir= + local dopath= dodir= dofile= ext= + if [ "$_cmd" = "redo" ] || + ( [ ! -e "$target" -o -d "$target" ] && + [ ! -e "$target.did" ] ); then + printf '%sdo %s%s%s%s\n' \ + "$green" "$DO_DEPTH" "$bold" "$target" "$plain" >&2 + dopath=$(_find_dofile "$target") + if [ ! -e "$dopath" ]; then + echo "do: $target: no .do file ($PWD)" >&2 + return 1 + fi + _dirsplit "$dopath" + dodir=$_dirsplit_dir dofile=$_dirsplit_base + if _startswith "$dofile" "default."; then + ext=${dofile#default} + ext=${ext%.do} + else + ext= + fi + target=$PWD/$target + tmp=$PWD/$tmp + cd "$dodir" || return 99 + target=$(_relpath "$target" "$PWD") || return 98 + tmp=$(_relpath "$tmp" "$PWD") || return 97 + base=${target%$ext} + tdir=$(qdirname "$target") + [ ! -e "$DO_BUILT" ] || [ ! -w "$tdir/." ] || + : >>"$target.did.tmp" + # $qtmp is a temporary file used to capture stdout. + # Since it might be accidentally deleted as a .do file + # does its work, we create it, then open two fds to it, + # then immediately delete the name. We use one fd to + # redirect to stdout, and the other to read from after, + # because there's no way to fseek(fd, 0) in sh. + qtmp=$DO_PATH/do.$$.tmp + ( + rm -f "$qtmp" + ( _run_dofile "$target" "$base" "$tmp" >&3 3>&- 4<&- ) + rv=$? + if [ $rv != 0 ]; then + printf "do: %s%s\n" "$DO_DEPTH" \ + "$target: got exit code $rv" >&2 + rm -f "$tmp.tmp" "$tmp.tmp2" "$target.did" + return $rv + fi + echo "$PWD/$target" >>"$DO_BUILT" + if [ ! -e "$tmp" ]; then + # if $3 wasn't created, copy from stdout file + cat <&4 >$tmp + # if that's zero length too, forget it + [ -s "$tmp" ] || rm -f "$tmp" + fi + ) 3>$qtmp 4<$qtmp # can't use "|| return" here... + # ...because "|| return" would mess up "set -e" inside the () + # on some shells. Running commands in "||" context, even + # deep inside, will stop "set -e" from functioning. + rv=$? + [ "$rv" = 0 ] || return "$rv" + mv "$tmp" "$target" 2>/dev/null + [ -e "$target.did.tmp" ] && + mv "$target.did.tmp" "$target.did" || + : >>"$target.did" + else + _debug "do $DO_DEPTH$target exists." >&2 + fi +} + + +# Implementation of the "redo" command. +_redo() +{ + local i startdir="$PWD" dir base + set +e + for i in "$@"; do + i=$(_abspath "$i" "$startdir") + ( + cd "$DO_STARTDIR" || return 99 + i=$(_realpath "$(_relpath "$i" "$PWD")" "$PWD") + _dirsplit "$i" + dir=$_dirsplit_dir base=$_dirsplit_base + _do "$dir" "$base" + ) + [ "$?" = 0 ] || return 1 + done +} + + +# Implementation of the "redo-whichdo" command. +_whichdo() +{ + _find_dofiles "$1" +} + + +case $_cmd in + do|redo|redo-ifchange) _redo "$@" ;; + redo-whichdo) _whichdo "$1" ;; + do.test) ;; + *) printf "do: '%s': unexpected redo command" "$_cmd" >&2; exit 99 ;; +esac +[ "$?" = 0 ] || exit 1 + +if [ -n "$DO_TOP" ]; then + if [ -n "$_do_opt_clean" ]; then + echo "do: Removing stamp files..." >&2 + [ ! -e "$DO_BUILT" ] || + while read f; do printf "%s.did\0" "$f"; done <"$DO_BUILT" | + xargs -0 rm -f 2>/dev/null + fi +fi -- cgit v1.2.3 From 059560eab4f93e8c6cfc1193aca204923999b4dc Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:23:21 +0000 Subject: .gitignore: update FossilOrigin-Name: 49d9da0713812747e282deef29eeb87b6ac7619676f625b46dd881ef9876cdf0 --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index e477f1d..45ab818 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,7 @@ ### Binaries/Objects ### +cpt-lib cpt-stat cpt-readlink -getopt *.o ### Emacs ### -- cgit v1.2.3 From 925cdf2010fad8f60de35b765939264cb72af097 Mon Sep 17 00:00:00 2001 From: merakor Date: Sun, 20 Dec 2020 21:23:57 +0000 Subject: README: update FossilOrigin-Name: 22c3642bb16ab93eb946f2b8b0c63cfcf8d83f6a86458096525d4f4c0cc22943 --- README | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/README b/README index 0e83582..6479e35 100644 --- a/README +++ b/README @@ -19,25 +19,24 @@ Dependencies To build and use cpt, you need the following software. -- C compiler [make] +MAKE DEPENDS +- C compiler +- redo (the repository contains minimal/do) + +RUNTIME DEPENDS - rsync - curl -- getopt [provided by cpt if not available] - POSIX base utilities [coreutils, busybox, sbase, etc.] - tar [GNU tar, busybox, toybox, libarchive, etc.] -Build configuration can be done from the 'config.mk' file. If you have getopt -on your system, add SYSTEM_GETOPT=1 option to your 'config.mk'. - Directory Structure -------------------------------------------------------------------------------- - / -- cpt, README, Makefile, LICENSE, CHANGELOG + / -- README, LICENSE, CHANGELOG bin/ -- for C programs. contrib/ -- for Shell scripts that wrap around cpt. - doc/ -- for documentation. - getopt-ul -- for cpt provided util-linux standalone getopt(1). + docs/ -- for documentation. man/ -- for manual pages. src/ -- for the tools that make up the package manager. -- cgit v1.2.3 From ed0fd37a7dd2b9683dbc45964334defe6c0e5982 Mon Sep 17 00:00:00 2001 From: merakor Date: Mon, 21 Dec 2020 09:07:42 +0000 Subject: default.do: run docs/all instead of docs/cpt.info FossilOrigin-Name: 5fb5e5902106aa153d1d68dcf6ab19640f47dc07804c8ab487ba1549ec349fbb --- default.do | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/default.do b/default.do index 6e2d555..c16f5f8 100644 --- a/default.do +++ b/default.do @@ -4,7 +4,7 @@ fn="${1%.*}" case "$1" in - all) redo-ifchange src/cpt-lib bin/all docs/cpt.info ;; + all) redo-ifchange src/cpt-lib bin/all docs/all ;; dist) redo clean redo "cpt-$VERSION.tar.xz" -- cgit v1.2.3 From 263daddaa753ecc2aa7fc202d0bb1acbe8f43f74 Mon Sep 17 00:00:00 2001 From: merakor Date: Mon, 21 Dec 2020 09:08:09 +0000 Subject: docs/default: change targets FossilOrigin-Name: 779edf420267002781b77fe90d1a9301cb02aab72c6f5f5c51a5e27d439573f1 --- docs/default.do | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/default.do b/docs/default.do index 5f8ee48..a66ed9b 100644 --- a/docs/default.do +++ b/docs/default.do @@ -4,8 +4,9 @@ fn="${1%.*}" case "$1" in - all) redo-ifchange info ;; - info) redo-ifchange cpt.info ;; + all) redo info ;; + allclean) redo ../clean; rm -f cpt.texi ;; + info) redo-ifchange cpt.info cpt.texi cpt.org ;; *.info) redo-ifchange "$fn.texi" $MAKEINFO "$fn.texi" -o "$3" -- cgit v1.2.3