From 5e476bab9c6db89dfadc94a8c4fbf3082339fecf Mon Sep 17 00:00:00 2001 From: Denis Vlasenko Date: Tue, 15 Jul 2008 21:29:44 +0000 Subject: libbb: document plans to speed up line-based input --- libbb/get_line_from_file.c | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) (limited to 'libbb/get_line_from_file.c') diff --git a/libbb/get_line_from_file.c b/libbb/get_line_from_file.c index 7b65ced8d..56761f941 100644 --- a/libbb/get_line_from_file.c +++ b/libbb/get_line_from_file.c @@ -68,12 +68,24 @@ char* FAST_FUNC xmalloc_fgetline(FILE *file) return c; } +#if 0 /* Faster routines (~twice as fast). +170 bytes. Unused as of 2008-07. * * NB: they stop at NUL byte too. * Performance is important here. Think "grep 50gigabyte_file"... - * Iironically, grep can't use it because of NUL issue. + * Ironically, grep can't use it because of NUL issue. * We sorely need C lib to provide fgets which reports size! + * + * Update: + * Actually, uclibc and glibc have it. man getline. It's GNUism, + * but very useful one (if it's as fast as this code). + * TODO: + * - currently, sed and sort use bb_get_chunk_from_file and heavily + * depend on its "stop on \n or \0" behavior, and STILL they fail + * to handle all cases with embedded NULs correctly. So: + * - audit sed and sort; convert them to getline FIRST. + * - THEN ditch bb_get_chunk_from_file, replace it with getline. + * - provide getline implementation for non-GNU systems. */ static char* xmalloc_fgets_internal(FILE *file, int *sizep) @@ -118,7 +130,6 @@ char* FAST_FUNC xmalloc_fgetline_fast(FILE *file) return r; /* not xrealloc(r, sz + 1)! */ } -#if 0 char* FAST_FUNC xmalloc_fgets(FILE *file) { int sz; -- cgit v1.2.3