aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorKaz Kylheku <kaz@kylheku.com>2022-04-15 17:41:23 -0700
committerKaz Kylheku <kaz@kylheku.com>2022-04-15 17:41:23 -0700
commit8beafd5dc2b663c26502816e3f72adb3de4655ff (patch)
tree89d1bc88805d8958473b972274929dab1185deb9
parent4663bb3b6feeb2bdbe18391336b5b7dd2ededf5e (diff)
downloadcppawk-8beafd5dc2b663c26502816e3f72adb3de4655ff.tar.gz
cppawk-8beafd5dc2b663c26502816e3f72adb3de4655ff.tar.bz2
cppawk-8beafd5dc2b663c26502816e3f72adb3de4655ff.zip
cons: document and test uniq, new uniqual function.
-rw-r--r--cppawk-cons.151
-rw-r--r--cppawk-include/cons-priv.h17
-rw-r--r--cppawk-include/cons.h1
-rw-r--r--testcases-cons30
4 files changed, 98 insertions, 1 deletions
diff --git a/cppawk-cons.1 b/cppawk-cons.1
index fa286b9..6d701f6 100644
--- a/cppawk-cons.1
+++ b/cppawk-cons.1
@@ -92,7 +92,8 @@ cons \- Lisp-like data representation and control flow macros
iota(\fIx\fP, \fIy\fP[, \fId\fP]) \fI// numbers from x to y, incrementing by\fP
- uniq(\fIx\fP) \fI// list x deduplicated\fP
+ uniq(\fIx\fP) \fI// deduplicate x\fP
+ uniqual(\fIx\fP) \fI// deduplicate x with equal equality\fP
mapcar(\fIf\fP, \fIx\fP) \fI// map list through function f\fP
@@ -1766,6 +1767,54 @@ will be attained.
iota(2.5, 2.5, -1) -> (2.5)
.ft R
+.SS Functions \fIuniq\fP and \fIuniqual\fP
+.bk
+Syntax:
+
+.ft B
+ uniq(\fIx\fP)
+ uniqual(\fIx\fP)
+.ft R
+
+The
+.B uniq
+and
+.B uniqual
+functions return a list formed by removing the duplicates from list
+.IR x .
+
+Whenever any item appears in
+.I x
+more than once, the resulting list will have only the first occurrence of
+that item; the subsequent occurrences do not appear in the returned list.
+
+The
+.B uniq
+function identifies duplicates using native Awk equality,
+using the raw representation of the objects as keys into an
+associative array.
+
+The
+.B uniqual
+function uses the
+.B equal
+function's notion of equality.
+
+.B Examples:
+
+.ft B
+ uniq(nil) -> nil
+ uniq(list(1, 2, 1, 3, 2, 4, 2, 1, 5, 6, 5)) -> (1 2 3 4 5 6)
+ uniqual(nil) -> nil
+ uniqual(list(1, 2, 1, 3, 2, 4, 2, 1, 5, 6, 5)) -> (1 2 3 4 5 6):
+ uniq(list(1, 1.0)) -> (1)
+ uniq(list(1, "1.0")) -> (1 1.0)
+ uniqual(list(1, 1.0)) -> (1)
+ uniqual(list(1, "1.0")) -> (1)
+ uniq(list(box_str("abc"), "abc")) -> ("abc" "abc")
+ uniqual(list(box_str("abc"), "abc")) -> ("abc")
+.ft R
+
.SH "SEE ALSO"
cppawk(1)
diff --git a/cppawk-include/cons-priv.h b/cppawk-include/cons-priv.h
index 97d410d..4482fa6 100644
--- a/cppawk-include/cons-priv.h
+++ b/cppawk-include/cons-priv.h
@@ -843,6 +843,23 @@ function __uniq(__lst,
return __list_end(__out)
}
+function __uniqual(__lst,
+ __out, __item, __itemeq, __seen)
+{
+ __out = __list_begin();
+
+ __dolist(__item, __lst) {
+ __itemq = __equalize(__item)
+ if (__itemq in __seen)
+ continue
+ __seen[__itemq]
+ __out = __list_add(__out, __item)
+ }
+
+ return __list_end(__out)
+}
+
+
#if __have_indirect_functions
function __mapcar(__fun, __lst,
diff --git a/cppawk-include/cons.h b/cppawk-include/cons.h
index 2cf579f..b91877d 100644
--- a/cppawk-include/cons.h
+++ b/cppawk-include/cons.h
@@ -83,6 +83,7 @@
#define butlast __butlast
#define iota __iota
#define uniq __uniq
+#define uniqual __uniqual
#define mapcar __mapcar
#define mappend __mappend
diff --git a/testcases-cons b/testcases-cons
index 9962970..0e0b2db 100644
--- a/testcases-cons
+++ b/testcases-cons
@@ -673,3 +673,33 @@ BEGIN {
nil
(2.5)
(2.5)
+--
+36:
+$cppawk '
+#include <cons.h>
+
+BEGIN {
+ print sexp(uniq(nil))
+ print sexp(uniq(list(1, 2, 1, 3, 2, 4, 2, 1, 5, 6, 5)))
+ print sexp(uniqual(nil))
+ print sexp(uniqual(list(1, 2, 1, 3, 2, 4, 2, 1, 5, 6, 5)))
+ print sexp(uniq(list(1, 1.0)))
+ print sexp(uniq(list(1, "1.0")))
+ print sexp(uniqual(list(1, 1.0)))
+ print sexp(uniqual(list(1, "1.0")))
+ print sexp(uniq(list(box_str("abc"), "abc")))
+ print sexp(uniqual(list(box_str("abc"), "abc")))
+ print sexp(uniq(list(cons(1, 2), cons(3, 4), cons(1, 2), cons(3, 5))))
+}'
+:
+nil
+(1 2 3 4 5 6)
+nil
+(1 2 3 4 5 6)
+(1)
+(1 1.0)
+(1)
+(1)
+("abc" "abc")
+("abc")
+((1 . 2) (3 . 4) (3 . 5))