From 3252b625b751a2de794ae4f8e48b80b4ff09dfdc Mon Sep 17 00:00:00 2001 From: Rob Landley Date: Tue, 30 May 2006 19:19:45 +0000 Subject: Since we have less than 100k of usage.h data anyway, using bzip -9 is silly. That says use 900k chunks when compressing, which needs about 4 megs of data structures to undo the Burrows-Wheeler transform. Switching it down to bzip -1 (100k chunks) should have no impact on the compression (since it still all fits in one chunk) but should reduce runtime decompression memory requirements to something like 500k. Still larger than gunzip, but not egregiously so. --- scripts/usage_compressed | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'scripts') diff --git a/scripts/usage_compressed b/scripts/usage_compressed index 59e93e669..8e2228af2 100644 --- a/scripts/usage_compressed +++ b/scripts/usage_compressed @@ -6,7 +6,7 @@ test "$loc" || loc=. test -x "$loc/usage" || exit 1 echo 'static const char packed_usage[] = ' -"$loc"/usage | bzip2 -9 | od -v -t x1 \ +"$loc"/usage | bzip2 -1 | od -v -t x1 \ | $SED -e 's/^[^ ]*//' -e 's/ \(..\)/\\x\1/g' -e 's/^\(.*\)$/"\1"/' || exit 1 echo ';' sz=`"$loc"/usage | wc -c` || exit 1 -- cgit v1.2.3