summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 08:33:59 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 08:33:59 -0700
commit0b68c6d00a2c84a5db13ba5fa304a6eadc4279f2 (patch)
tree1af54f1d5bdd9e7eb77d75e875c25bb7b6fbb7b4
Imported Upstream version 1.3.4
-rw-r--r--LICENSE28
-rw-r--r--MANIFEST.in15
-rw-r--r--PKG-INFO202
-rw-r--r--README.rst179
-rw-r--r--_blist.c7684
-rw-r--r--_btuple.py92
-rw-r--r--_sorteddict.py142
-rw-r--r--_sortedlist.py647
-rw-r--r--blist.egg-info/PKG-INFO202
-rw-r--r--blist.egg-info/SOURCES.txt30
-rw-r--r--blist.egg-info/dependency_links.txt1
-rw-r--r--blist.egg-info/not-zip-safe1
-rw-r--r--blist.egg-info/top_level.txt5
-rw-r--r--blist.h248
-rw-r--r--blist.py8
-rw-r--r--distribute_setup.py481
-rwxr-xr-xprototype/blist.py2116
-rw-r--r--setup.cfg5
-rwxr-xr-xsetup.py46
-rwxr-xr-xspeed_test.py340
-rw-r--r--test/__init__.py0
-rw-r--r--test/btuple_tests.py161
-rw-r--r--test/list_tests.py558
-rw-r--r--test/mapping_tests.py678
-rw-r--r--test/seq_tests.py332
-rw-r--r--test/sorteddict_tests.py81
-rw-r--r--test/sortedlist_tests.py614
-rw-r--r--test/test_list.py39
-rw-r--r--test/test_set.py1536
-rw-r--r--test/test_support.py415
-rw-r--r--test/unittest.py840
-rwxr-xr-xtest_blist.py368
32 files changed, 18094 insertions, 0 deletions
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..b51c68f
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,28 @@
+Copyright 2007-2010 Stutzbach Enterprises, LLC
+(daniel@stutzbachenterprises.com)
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ 3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..ae9099c
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,15 @@
+include _blist.c
+include blist.py
+include _sorteddict.py
+include _sortedlist.py
+include _btuple.py
+include setup.py
+include test_blist.py
+include test/*.py
+include README.rst
+include LICENSE
+include prototype/blist.py
+include distribute_setup.py
+include speed_test.py
+include blist.rst
+include blist.h
diff --git a/PKG-INFO b/PKG-INFO
new file mode 100644
index 0000000..a43d00d
--- /dev/null
+++ b/PKG-INFO
@@ -0,0 +1,202 @@
+Metadata-Version: 1.1
+Name: blist
+Version: 1.3.4
+Summary: a list-like type with better asymptotic performance and similar performance on small lists
+Home-page: http://stutzbachenterprises.com/blist/
+Author: Stutzbach Enterprises, LLC
+Author-email: daniel@stutzbachenterprises.com
+License: BSD
+Description: blist: a list-like type with better performance
+ ===============================================
+
+ The ``blist`` is a drop-in replacement for the Python list the provides
+ better performance when modifying large lists. The blist package also
+ provides ``sortedlist``, ``sortedset``, ``weaksortedlist``,
+ ``weaksortedset``, ``sorteddict``, and ``btuple`` types.
+
+ Full documentation is at the link below:
+
+ http://stutzbachenterprises.com/blist-doc/
+
+ Python's built-in list is a dynamically-sized array; to insert or
+ removal an item from the beginning or middle of the list, it has to
+ move most of the list in memory, i.e., O(n) operations. The blist
+ uses a flexible, hybrid array/tree structure and only needs to move a
+ small portion of items in memory, specifically using O(log n)
+ operations.
+
+ For small lists, the blist and the built-in list have virtually
+ identical performance.
+
+ To use the blist, you simply change code like this:
+
+ >>> items = [5, 6, 2]
+ >>> more_items = function_that_returns_a_list()
+
+ to:
+
+ >>> from blist import blist
+ >>> items = blist([5, 6, 2])
+ >>> more_items = blist(function_that_returns_a_list())
+
+ Here are some of the use cases where the blist asymptotically
+ outperforms the built-in list:
+
+ ========================================== ================ =========
+ Use Case blist list
+ ========================================== ================ =========
+ Insertion into or removal from a list O(log n) O(n)
+ Taking slices of lists O(log n) O(n)
+ Making shallow copies of lists O(1) O(n)
+ Changing slices of lists O(log n + log k) O(n+k)
+ Multiplying a list to make a sparse list O(log k) O(kn)
+ Maintain a sorted lists with bisect.insort O(log**2 n) O(n)
+ ========================================== ================ =========
+
+ So you can see the performance of the blist in more detail, several
+ performance graphs available at the following link:
+ http://stutzbachenterprises.com/blist/
+
+ Example usage:
+
+ >>> from blist import *
+ >>> x = blist([0]) # x is a blist with one element
+ >>> x *= 2**29 # x is a blist with > 500 million elements
+ >>> x.append(5) # append to x
+ >>> y = x[4:-234234] # Take a 500 million element slice from x
+ >>> del x[3:1024] # Delete a few thousand elements from x
+
+ Other data structures
+ ---------------------
+
+ The blist package provides other data structures based on the blist:
+
+ - sortedlist
+ - sortedset
+ - weaksortedlist
+ - weaksorteset
+ - sorteddict
+ - btuple
+
+ These additional data structures are only available in Python 2.6 or
+ higher, as they make use of Abstract Base Classes.
+
+ The sortedlist is a list that's always sorted. It's iterable and
+ indexable like a Python list, but to modify a sortedlist the same
+ methods you would use on a Python set (add, discard, or remove).
+
+ >>> from blist import sortedlist
+ >>> my_list = sortedlist([3,7,2,1])
+ >>> my_list
+ sortedlist([1, 2, 3, 7])
+ >>> my_list.add(5)
+ >>> my_list[3]
+ 5
+ >>>
+
+ The sortedlist constructor takes an optional "key" argument, which may
+ be used to change the sort order just like the sorted() function.
+
+ >>> from blist import sortedlist
+ >>> my_list = sortedlist([3,7,2,1], key=lambda i: -i)
+ sortedlist([7, 3, 2, 1]
+ >>>
+
+ The sortedset is a set that's always sorted. It's iterable and
+ indexable like a Python list, but modified like a set. Essentially,
+ it's just like a sortedlist except that duplicates are ignored.
+
+ >>> from blist import sortedset
+ >>> my_set = sortedset([3,7,2,2])
+ sortedset([2, 3, 7]
+ >>>
+
+ The weaksortedlist and weaksortedset are weakref variations of the
+ sortedlist and sortedset.
+
+ The sorteddict works just like a regular dict, except the keys are
+ always sorted. The sorteddict should not be confused with Python
+ 2.7's OrderedDict type, which remembers the insertion order of the
+ keys.
+
+ >>> from blist import sorteddict
+ >>> my_dict = sorteddict({1: 5, 6: 8, -5: 9})
+ >>> my_dict.keys()
+ [-5, 1, 6]
+ >>>
+
+ The btuple is a drop-in replacement for the built-in tuple. Compared
+ to the built-in tuple, the btuple offers the following advantages:
+
+ - Constructing a btuple from a blist takes O(1) time.
+ - Taking a slice of a btuple takes O(n) time, where n is the size of
+ the original tuple. The size of the slice does not matter.
+
+ >>> from blist import blist, btuple
+ >>> x = blist([0]) # x is a blist with one element
+ >>> x *= 2**29 # x is a blist with > 500 million elements
+ >>> y = btuple(x) # y is a btuple with > 500 million elements
+
+ Installation instructions
+ -------------------------
+
+ Python 2.5 or higher is required. If building from the source
+ distribution, the Python header files are also required. In either
+ case, just run:
+
+ python setup.py install
+
+ If you're running Linux and see a bunch of compilation errors from
+ GCC, you probably do not have the Python header files installed.
+ They're usually located in a package called something like
+ "python2.6-dev".
+
+ The blist package will be installed in the 'site-packages' directory of
+ your Python installation. (Unless directed elsewhere; see the
+ "Installing Python Modules" section of the Python manuals for details
+ on customizing installation locations, etc.).
+
+ If you downloaded the source distribution and wish to run the
+ associated test suite, you can also run:
+
+ python setup.py test
+
+ which will verify the correct installation and functioning of the
+ package. The tests require Python 2.6 or higher.
+
+ Feedback
+ --------
+
+ We're eager to hear about your experiences with the blist. You can
+ email me at daniel@stutzbachenterprises.com. Alternately, bug reports
+ and feature requests may be reported on our bug tracker at:
+ http://github.com/DanielStutzbach/blist/issues
+
+ How we test
+ -----------
+
+ In addition to the tests include in the source distribution, we
+ perform the following to add extra rigor to our testing process:
+
+ 1. We use a "fuzzer": a program that randomly generates list
+ operations, performs them using both the blist and the built-in
+ list, and compares the results.
+
+ 2. We use a modified Python interpreter where we have replaced the
+ array-based built-in list with the blist. Then, we run all of
+ the regular Python unit tests.
+
+Keywords: blist list b+tree btree fast copy-on-write sparse array sortedlist sorted sortedset weak weaksortedlist weaksortedset sorteddict btuple
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: C
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Provides: blist
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000..32e569c
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,179 @@
+blist: a list-like type with better performance
+===============================================
+
+The ``blist`` is a drop-in replacement for the Python list the provides
+better performance when modifying large lists. The blist package also
+provides ``sortedlist``, ``sortedset``, ``weaksortedlist``,
+``weaksortedset``, ``sorteddict``, and ``btuple`` types.
+
+Full documentation is at the link below:
+
+http://stutzbachenterprises.com/blist-doc/
+
+Python's built-in list is a dynamically-sized array; to insert or
+removal an item from the beginning or middle of the list, it has to
+move most of the list in memory, i.e., O(n) operations. The blist
+uses a flexible, hybrid array/tree structure and only needs to move a
+small portion of items in memory, specifically using O(log n)
+operations.
+
+For small lists, the blist and the built-in list have virtually
+identical performance.
+
+To use the blist, you simply change code like this:
+
+>>> items = [5, 6, 2]
+>>> more_items = function_that_returns_a_list()
+
+to:
+
+>>> from blist import blist
+>>> items = blist([5, 6, 2])
+>>> more_items = blist(function_that_returns_a_list())
+
+Here are some of the use cases where the blist asymptotically
+outperforms the built-in list:
+
+========================================== ================ =========
+Use Case blist list
+========================================== ================ =========
+Insertion into or removal from a list O(log n) O(n)
+Taking slices of lists O(log n) O(n)
+Making shallow copies of lists O(1) O(n)
+Changing slices of lists O(log n + log k) O(n+k)
+Multiplying a list to make a sparse list O(log k) O(kn)
+Maintain a sorted lists with bisect.insort O(log**2 n) O(n)
+========================================== ================ =========
+
+So you can see the performance of the blist in more detail, several
+performance graphs available at the following link:
+http://stutzbachenterprises.com/blist/
+
+Example usage:
+
+>>> from blist import *
+>>> x = blist([0]) # x is a blist with one element
+>>> x *= 2**29 # x is a blist with > 500 million elements
+>>> x.append(5) # append to x
+>>> y = x[4:-234234] # Take a 500 million element slice from x
+>>> del x[3:1024] # Delete a few thousand elements from x
+
+Other data structures
+---------------------
+
+The blist package provides other data structures based on the blist:
+
+- sortedlist
+- sortedset
+- weaksortedlist
+- weaksorteset
+- sorteddict
+- btuple
+
+These additional data structures are only available in Python 2.6 or
+higher, as they make use of Abstract Base Classes.
+
+The sortedlist is a list that's always sorted. It's iterable and
+indexable like a Python list, but to modify a sortedlist the same
+methods you would use on a Python set (add, discard, or remove).
+
+>>> from blist import sortedlist
+>>> my_list = sortedlist([3,7,2,1])
+>>> my_list
+sortedlist([1, 2, 3, 7])
+>>> my_list.add(5)
+>>> my_list[3]
+5
+>>>
+
+The sortedlist constructor takes an optional "key" argument, which may
+be used to change the sort order just like the sorted() function.
+
+>>> from blist import sortedlist
+>>> my_list = sortedlist([3,7,2,1], key=lambda i: -i)
+sortedlist([7, 3, 2, 1]
+>>>
+
+The sortedset is a set that's always sorted. It's iterable and
+indexable like a Python list, but modified like a set. Essentially,
+it's just like a sortedlist except that duplicates are ignored.
+
+>>> from blist import sortedset
+>>> my_set = sortedset([3,7,2,2])
+sortedset([2, 3, 7]
+>>>
+
+The weaksortedlist and weaksortedset are weakref variations of the
+sortedlist and sortedset.
+
+The sorteddict works just like a regular dict, except the keys are
+always sorted. The sorteddict should not be confused with Python
+2.7's OrderedDict type, which remembers the insertion order of the
+keys.
+
+>>> from blist import sorteddict
+>>> my_dict = sorteddict({1: 5, 6: 8, -5: 9})
+>>> my_dict.keys()
+[-5, 1, 6]
+>>>
+
+The btuple is a drop-in replacement for the built-in tuple. Compared
+to the built-in tuple, the btuple offers the following advantages:
+
+- Constructing a btuple from a blist takes O(1) time.
+- Taking a slice of a btuple takes O(n) time, where n is the size of
+ the original tuple. The size of the slice does not matter.
+
+>>> from blist import blist, btuple
+>>> x = blist([0]) # x is a blist with one element
+>>> x *= 2**29 # x is a blist with > 500 million elements
+>>> y = btuple(x) # y is a btuple with > 500 million elements
+
+Installation instructions
+-------------------------
+
+Python 2.5 or higher is required. If building from the source
+distribution, the Python header files are also required. In either
+case, just run:
+
+ python setup.py install
+
+If you're running Linux and see a bunch of compilation errors from
+GCC, you probably do not have the Python header files installed.
+They're usually located in a package called something like
+"python2.6-dev".
+
+The blist package will be installed in the 'site-packages' directory of
+your Python installation. (Unless directed elsewhere; see the
+"Installing Python Modules" section of the Python manuals for details
+on customizing installation locations, etc.).
+
+If you downloaded the source distribution and wish to run the
+associated test suite, you can also run:
+
+ python setup.py test
+
+which will verify the correct installation and functioning of the
+package. The tests require Python 2.6 or higher.
+
+Feedback
+--------
+
+We're eager to hear about your experiences with the blist. You can
+email me at daniel@stutzbachenterprises.com. Alternately, bug reports
+and feature requests may be reported on our bug tracker at:
+http://github.com/DanielStutzbach/blist/issues
+
+How we test
+-----------
+
+In addition to the tests include in the source distribution, we
+perform the following to add extra rigor to our testing process:
+
+ 1. We use a "fuzzer": a program that randomly generates list
+ operations, performs them using both the blist and the built-in
+ list, and compares the results.
+
+ 2. We use a modified Python interpreter where we have replaced the
+ array-based built-in list with the blist. Then, we run all of
+ the regular Python unit tests.
diff --git a/_blist.c b/_blist.c
new file mode 100644
index 0000000..7efb681
--- /dev/null
+++ b/_blist.c
@@ -0,0 +1,7684 @@
+/* Copyright 2007-2010 Stutzbach Enterprises, LLC
+ * daniel@stutzbachenterprises.com
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * 3. The name of the author may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+ * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+ * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+ * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+ * POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/**********************************************************************
+ * *
+ * PLEASE READ blist.rst BEFORE MODIFYING THIS CODE *
+ * *
+ **********************************************************************/
+
+
+#include <Python.h>
+#include <stddef.h>
+
+#if !defined(__STDC_VERSION__) || __STDC_VERSION__ < 199901L
+#define restrict
+#endif
+
+#if PY_MAJOR_VERSION == 2
+
+#ifndef PY_UINT32_T
+#if (defined UINT32_MAX || defined uint32_t)
+#define HAVE_UINT32_T 1
+#define PY_UINT32_T uint32_t
+#elif defined(MS_WINDOWS)
+#if SIZEOF_INT == 4
+#define HAVE_UINT32_T 1
+#define PY_UINT32_T unsigned int
+#elif SIZEOF_LONG == 4
+#define HAVE_UINT32_T 1
+#define PY_UINT32_T unsigned long
+#endif
+#endif
+#endif
+
+#ifndef PY_UINT64_T
+#if (defined UINT64_MAX || defined uint64_t)
+#define HAVE_UINT64_T 1
+#define PY_UINT64_T uint64_t
+#elif defined(MS_WINDOWS)
+#if SIZEOF_LONG_LONG == 8
+#define HAVE_UINT64_T 1
+#define PY_UINT64_T unsigned PY_LONG_LONG
+#endif
+#endif
+#endif
+
+#ifndef PY_INT32_T
+#if (defined INT32_MAX || defined int32_t)
+#define HAVE_INT32_T 1
+#define PY_INT32_T int32_t
+#elif defined(MS_WINDOWS)
+#if SIZEOF_INT == 4
+#define HAVE_INT32_T 1
+#define PY_INT32_T int
+#elif SIZEOF_LONG == 4
+#define HAVE_INT32_T 1
+#define PY_INT32_T long
+#endif
+#endif
+#endif
+
+#ifndef PY_INT64_T
+#if (defined INT64_MAX || defined int64_t)
+#define HAVE_INT64_T 1
+#define PY_INT64_T int64_t
+#elif defined(MS_WINDOWS)
+#if SIZEOF_LONG_LONG == 8
+#define HAVE_INT64_T 1
+#define PY_INT64_T PY_LONG_LONG
+#endif
+#endif
+#endif
+
+/* This macro is defined in Python 3. We need it since calling
+ * PyObject_GC_UnTrack twice is unsafe. */
+/* True if the object is currently tracked by the GC. */
+#define _PyObject_GC_IS_TRACKED(o) \
+ ((_Py_AS_GC(o))->gc.gc_refs != _PyGC_REFS_UNTRACKED)
+
+#if PY_MINOR_VERSION < 6
+/* Backward compatibility with Python 2.5 */
+#define PyUnicode_FromString PyString_FromString
+#define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
+#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
+#define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size)
+#define PyVarObject_HEAD_INIT(type, size) \
+ PyObject_HEAD_INIT(type) size,
+#define PyUnicode_FromFormat PyString_FromFormat
+#endif
+
+#elif PY_MAJOR_VERSION == 3
+/* Backward compatibility with Python 3 */
+#define PyInt_FromSsize_t PyLong_FromSsize_t
+#define PyInt_CheckExact PyLong_CheckExact
+#define PyInt_AsLong PyLong_AsLong
+#define PyInt_AsSsize_t PyLong_AsSsize_t
+#define PyInt_FromLong PyLong_FromLong
+#endif
+
+#ifndef BLIST_IN_PYTHON
+#include "blist.h"
+#endif
+
+#define BLIST_PYAPI(type) static type
+
+typedef struct {
+ PyBList *lst;
+ int i;
+} point_t;
+
+typedef struct {
+ int depth;
+ PyBList *leaf;
+ int i;
+ point_t stack[MAX_HEIGHT];
+} iter_t;
+
+typedef struct {
+ PyObject_HEAD
+ iter_t iter;
+} blistiterobject;
+
+/* Empty BList reuse scheme to save calls to malloc and free */
+#define MAXFREELISTS 80
+static PyBList *free_lists[MAXFREELISTS];
+static int num_free_lists = 0;
+
+static PyBList *free_ulists[MAXFREELISTS];
+static int num_free_ulists = 0;
+
+static blistiterobject *free_iters[MAXFREELISTS];
+static int num_free_iters = 0;
+
+typedef struct sortwrapperobject
+{
+ union {
+ unsigned long k_ulong;
+#ifdef BLIST_FLOAT_RADIX_SORT
+ PY_UINT64_T k_uint64;
+#endif
+ } fkey;
+ PyObject *key;
+ PyObject *value;
+} sortwrapperobject;
+
+#define PyBList_Check(op) (PyObject_TypeCheck((op), &PyBList_Type) || (PyObject_TypeCheck((op), &PyRootBList_Type)))
+#define PyRootBList_Check(op) (PyObject_TypeCheck((op), &PyRootBList_Type))
+#define PyRootBList_CheckExact(op) (Py_TYPE((op)) == &PyRootBList_Type)
+#define PyBList_CheckExact(op) ((op)->ob_type == &PyBList_Type || (op)->ob_type == &PyRootBList_Type)
+#define PyBListIter_Check(op) (PyObject_TypeCheck((op), &PyBListIter_Type) || (PyObject_TypeCheck((op), &PyBListReverseIter_Type)))
+
+#define INDEX_LENGTH(self) (((self)->n-1) / INDEX_FACTOR + 1)
+
+/************************************************************************
+ * Utility functions for copying and moving children.
+ */
+
+/* copy n children from index k2 of other to index k of self */
+BLIST_LOCAL(void)
+copy(PyBList *self, int k, PyBList *other, int k2, int n)
+{
+ PyObject **restrict src = &other->children[k2];
+ PyObject **restrict dst = &self->children[k];
+ PyObject **stop = &other->children[k2+n];
+
+ assert(self != other);
+
+ while (src < stop)
+ *dst++ = *src++;
+}
+
+/* like copy(), but incrementing references */
+BLIST_LOCAL(void)
+copyref(PyBList *self, int k, PyBList *other, int k2,int n) {
+ PyObject **restrict src = &other->children[k2];
+ PyObject **restrict dst = &self->children[k];
+ PyObject **stop = &src[n];
+
+ while (src < stop) {
+ Py_INCREF(*src);
+ *dst++ = *src++;
+ }
+}
+
+/* like copy(), but incrementing references but check for NULL */
+BLIST_LOCAL(void)
+xcopyref(PyBList *self, int k, PyBList *other,int k2,int n) {
+ PyObject **restrict src = &other->children[k2];
+ PyObject **restrict dst = &self->children[k];
+ PyObject **stop = &src[n];
+
+ while (src < stop) {
+ Py_XINCREF(*src);
+ *dst++ = *src++;
+ }
+}
+
+/* Move children starting at k to the right by n */
+BLIST_LOCAL(void)
+shift_right(PyBList *self, int k, int n)
+{
+ PyObject **src = &self->children[self->num_children-1];
+ PyObject **dst = &self->children[self->num_children-1 + n];
+ PyObject **stop = &self->children[k];
+
+ if (self->num_children == 0)
+ return;
+
+ assert(k >= 0);
+ assert(k <= LIMIT);
+ assert(n + self->num_children <= LIMIT);
+
+ while (src >= stop)
+ *dst-- = *src--;
+}
+
+/* Move children starting at k to the left by n */
+BLIST_LOCAL(void)
+shift_left(PyBList *self, int k, int n)
+{
+ PyObject **src = &self->children[k];
+ PyObject **dst = &self->children[k - n];
+ PyObject **stop = &self->children[self->num_children];
+
+ assert(k - n >= 0);
+ assert(k >= 0);
+ assert(k <= LIMIT);
+ assert(self->num_children -n >= 0);
+
+ while (src < stop)
+ *dst++ = *src++;
+
+#ifdef Py_DEBUG
+ while (dst < stop)
+ *dst++ = NULL;
+#endif
+}
+
+BLIST_LOCAL(void)
+balance_leafs(PyBList *restrict leaf1, PyBList *restrict leaf2)
+{
+ assert(leaf1->leaf);
+ assert(leaf2->leaf);
+ if (leaf1->num_children + leaf2->num_children <= LIMIT) {
+ copy(leaf1, leaf1->num_children, leaf2, 0,leaf2->num_children);
+ leaf1->num_children += leaf2->num_children;
+ leaf1->n += leaf2->num_children;
+ leaf2->num_children = 0;
+ leaf2->n = 0;
+ } else if (leaf1->num_children < HALF) {
+ int needed = HALF - leaf1->num_children;
+
+ copy(leaf1, leaf1->num_children, leaf2, 0, needed);
+ leaf1->num_children += needed;
+ leaf1->n += needed;
+ shift_left(leaf2, needed, needed);
+ leaf2->num_children -= needed;
+ leaf2->n -= needed;
+ } else if (leaf2->num_children < HALF) {
+ int needed = HALF - leaf2->num_children;
+
+ shift_right(leaf2, 0, needed);
+ copy(leaf2, 0, leaf1, leaf1->num_children-needed, needed);
+ leaf1->num_children -= needed;
+ leaf1->n -= needed;
+ leaf2->num_children += needed;
+ leaf2->n += needed;
+ }
+}
+
+/************************************************************************
+ * Macros for O(1) iteration over a BList via Depth-First-Search. If
+ * the root is also a leaf, it will skip the memory allocation and
+ * just use a for loop.
+ */
+
+/* Iteration over part of the list */
+#define ITER2(lst, item, start, stop, block) {\
+ iter_t _it; int _use_iter; \
+ if (lst->leaf) { \
+ Py_ssize_t _i; _use_iter = 0; \
+ for (_i = (start); _i < lst->num_children && _i < (stop); _i++) { \
+ item = lst->children[_i]; \
+ block; \
+ } \
+ } else { \
+ Py_ssize_t _remaining = (stop) - (start);\
+ PyBList *_p; _use_iter = 1;\
+ iter_init2(&_it, (lst), (start)); \
+ _p = _it.leaf; \
+ while (_p != NULL && _remaining--) { \
+ if (_it.i < _p->num_children) { \
+ item = _p->children[_it.i++]; \
+ } else { \
+ item = iter_next(&_it); \
+ _p = _it.leaf; \
+ if (item == NULL) break; \
+ } \
+ block; \
+ } \
+ iter_cleanup(&_it); \
+ } \
+}
+
+/* Iteration over the whole list */
+#define ITER(lst, item, block) {\
+ if ((lst)->leaf) { \
+ iter_t _it; \
+ Py_ssize_t _i; const int _use_iter = 0;\
+ for (_i = 0; _i < (lst)->num_children; _i++) { \
+ item = (lst)->children[_i]; \
+ block; \
+ } ITER_CLEANUP(); \
+ } else { \
+ iter_t _it; \
+ PyBList *_p; \
+ const int _use_iter = 1; \
+ iter_init(&_it, (lst)); \
+ _p = _it.leaf; \
+ while (_p) { \
+ if (_it.i < _p->num_children) { \
+ item = _p->children[_it.i++]; \
+ } else { \
+ item = iter_next(&_it); \
+ _p = _it.leaf; \
+ if (item == NULL) break; \
+ } \
+ block; \
+ } \
+ ITER_CLEANUP(); \
+ } \
+}
+
+/* Call this before when leaving the ITER via return or goto. */
+#define ITER_CLEANUP() if (_use_iter) iter_cleanup(&_it)
+
+/* Forward declarations */
+PyTypeObject PyBList_Type;
+PyTypeObject PyRootBList_Type;
+PyTypeObject PyBListIter_Type;
+PyTypeObject PyBListReverseIter_Type;
+static void ext_init(PyBListRoot *root);
+static void ext_mark(PyBList *broot, Py_ssize_t offset, int value);
+static void ext_mark_set_dirty(PyBList *broot, Py_ssize_t i, Py_ssize_t j);
+static void ext_mark_set_dirty_all(PyBList *broot);
+
+/* also hard-coded in blist.h */
+#define DIRTY (-1)
+#define CLEAN (-2)
+#define CLEAN_RW (-3) /* Only valid for dirty_root */
+
+static PyObject *_indexerr = NULL;
+void set_index_error(void)
+{
+ if (_indexerr == NULL)
+ _indexerr = PyUnicode_FromString("list index out of range");
+ PyErr_SetObject(PyExc_IndexError, _indexerr);
+}
+
+/************************************************************************
+ * Debugging forward declarations
+ */
+
+#ifdef Py_DEBUG
+
+static int blist_unstable = 0;
+static int blist_in_code = 0;
+static int blist_danger = 0;
+#define DANGER_GC_BEGIN { int _blist_unstable = blist_unstable, _blist_in_code = blist_in_code; blist_unstable = 0; blist_in_code = 0; blist_danger++
+#define DANGER_GC_END assert(!blist_unstable); assert(!blist_in_code); blist_unstable = _blist_unstable; blist_in_code = _blist_in_code; assert(blist_danger); blist_danger--; } while (0)
+#define DANGER_BEGIN DANGER_GC_BEGIN; assert(!gc_pause_count)
+#define DANGER_END assert(!gc_pause_count); DANGER_GC_END
+
+#else
+
+#define DANGER_BEGIN while(0)
+#define DANGER_END while(0)
+#define DANGER_GC_BEGIN while(0)
+#define DANGER_GC_END while(0)
+
+#endif
+
+/************************************************************************
+ * Functions we wish CPython's API provided :-)
+ */
+
+#ifdef Py_DEBUG
+static int gc_pause_count = 0;
+#endif
+
+#ifdef BLIST_IN_PYTHON
+#define gc_pause() (0)
+#define gc_unpause(previous) do {} while (0)
+#else
+static PyObject * (*pgc_disable)(PyObject *self, PyObject *noargs);
+static PyObject * (*pgc_enable)(PyObject *self, PyObject *noargs);
+static PyObject * (*pgc_isenabled)(PyObject *self, PyObject *noargs);
+
+BLIST_LOCAL(void)
+gc_unpause(int previous)
+{
+#ifdef Py_DEBUG
+ assert(gc_pause_count > 0);
+ gc_pause_count--;
+#endif
+ if (previous) {
+ PyObject *rv = pgc_enable(NULL, NULL);
+ Py_DECREF(rv);
+ }
+}
+
+BLIST_LOCAL(int)
+gc_pause(void)
+{
+ int rv;
+ PyObject *enabled = pgc_isenabled(NULL, NULL);
+ rv = (enabled == Py_True);
+ Py_DECREF(enabled);
+ if (rv) {
+ PyObject *none = pgc_disable(NULL, NULL);
+ Py_DECREF(none);
+ }
+#ifdef Py_DEBUG
+ gc_pause_count++;
+#endif
+ return rv;
+}
+#endif
+
+BLIST_LOCAL(int)
+do_eq(PyObject *v, PyObject *w)
+{
+ richcmpfunc f;
+ PyObject *res;
+ int rv;
+
+ if (Py_EnterRecursiveCall(" in cmp"))
+ return -1;
+
+ if (v->ob_type != w->ob_type &&
+ PyType_IsSubtype(w->ob_type, v->ob_type) &&
+ (f = w->ob_type->tp_richcompare) != NULL) {
+ res = (*f)(w, v, Py_EQ);
+ if (res != Py_NotImplemented) {
+ ob_to_int:
+ if (res == Py_False)
+ rv = 0;
+ else if (res == Py_True)
+ rv = 1;
+ else if (res == NULL) {
+ Py_LeaveRecursiveCall();
+ return -1;
+ } else
+ rv = PyObject_IsTrue(res);
+ Py_DECREF(res);
+ Py_LeaveRecursiveCall();
+ return rv;
+ }
+ Py_DECREF(res);
+ }
+ if ((f = v->ob_type->tp_richcompare) != NULL) {
+ res = (*f)(v, w, Py_EQ);
+ if (res != Py_NotImplemented)
+ goto ob_to_int;
+ Py_DECREF(res);
+ }
+ if ((f = w->ob_type->tp_richcompare) != NULL) {
+ res = (*f)(w, v, Py_EQ);
+ if (res != Py_NotImplemented)
+ goto ob_to_int;
+ Py_DECREF(res);
+ }
+
+ Py_LeaveRecursiveCall();
+#if PY_MAJOR_VERSION < 3
+ rv = PyObject_Compare(v, w);
+ if (PyErr_Occurred())
+ return -1;
+ if (rv == 0)
+ return 1;
+#endif
+ return 0;
+}
+
+/* If fast_type == v->ob_type == w->ob_type, then we can assume:
+ * 1) tp_richcompare != NULL
+ * 2) tp_richcompare will not recurse
+ * 3) tp_richcompare(v, w, op) == tp_richcompare(w, v, symmetric_op)
+ * 4) tp_richcompare(v, w, op) can return only Py_True or Py_False
+ *
+ * These assumptions hold for built-in, immutable, non-container types.
+ */
+static int
+fast_eq_richcompare(PyObject *v, PyObject *w, PyTypeObject *fast_type)
+{
+ if (v == w) return 1;
+ if (v->ob_type == fast_type && w->ob_type == fast_type) {
+ PyObject *res = v->ob_type->tp_richcompare(v, w, Py_EQ);
+ Py_DECREF(res);
+
+ return res == Py_True;
+ } else {
+ int rv;
+ DANGER_BEGIN;
+ rv = do_eq(v, w);
+ DANGER_END;
+ return rv;
+ }
+}
+
+#if PY_MAJOR_VERSION < 3
+static int
+fast_eq_compare(PyObject *v, PyObject *w, PyTypeObject *fast_type)
+{
+ if (v == w) return 1;
+ if (v->ob_type == w->ob_type && v->ob_type == fast_type)
+ return v->ob_type->tp_compare(v, w) == 0;
+ else {
+ int rv;
+ DANGER_BEGIN;
+ rv = PyObject_RichCompareBool(v, w, Py_EQ);
+ DANGER_END;
+ return rv;
+ }
+}
+#endif
+
+static int
+fast_lt_richcompare(PyObject *v, PyObject *w, PyTypeObject *fast_type)
+{
+ if (v->ob_type == w->ob_type && v->ob_type == fast_type) {
+ PyObject *res = v->ob_type->tp_richcompare(v, w, Py_LT);
+ Py_DECREF(res);
+
+ return res == Py_True;
+ } else {
+ int rv;
+ DANGER_BEGIN;
+ rv = PyObject_RichCompareBool(v, w, Py_LT);
+ DANGER_END;
+ return rv;
+ }
+}
+
+#if PY_MAJOR_VERSION < 3
+
+static int
+fast_lt_compare(PyObject *v, PyObject *w, PyTypeObject *fast_type)
+{
+ if (v->ob_type == w->ob_type && v->ob_type == fast_type)
+ return v->ob_type->tp_compare(v, w) < 0;
+ else {
+ int rv;
+ DANGER_BEGIN;
+ rv = PyObject_RichCompareBool(v, w, Py_LT);
+ DANGER_END;
+ return rv;
+ }
+}
+
+typedef int fast_compare_t(PyObject *v, PyObject *w, PyTypeObject *fast_type);
+typedef struct fast_compare_data
+{
+ PyTypeObject *fast_type;
+ fast_compare_t *comparer;
+} fast_compare_data_t;
+
+BLIST_LOCAL(fast_compare_data_t)
+_check_fast_cmp_type(PyObject *ob, int op)
+{
+ fast_compare_data_t rv = { NULL, NULL };
+
+ if (ob->ob_type == &PyInt_Type
+ || ob->ob_type == &PyLong_Type) {
+ rv.fast_type = ob->ob_type;
+ if (op == Py_EQ)
+ rv.comparer = fast_eq_compare;
+ else if (op == Py_LT)
+ rv.comparer = fast_lt_compare;
+ else
+ rv.fast_type = NULL;
+ } else {
+ if (op == Py_EQ)
+ rv.comparer = fast_eq_richcompare;
+ else if (op == Py_LT)
+ rv.comparer = fast_lt_richcompare;
+ else
+ return rv;
+
+ if ((ob->ob_type == &PyComplex_Type && (op == Py_EQ || op == Py_NE))
+ || ob->ob_type == &PyFloat_Type
+ || ob->ob_type == &PyLong_Type
+ || ob->ob_type == &PyUnicode_Type
+ || ob->ob_type == &PyString_Type) {
+ rv.fast_type = ob->ob_type;
+ }
+ }
+
+ return rv;
+}
+
+#define check_fast_cmp_type(ob, op) \
+ (_check_fast_cmp_type((ob), (op)))
+#define fast_eq(v, w, name) \
+ (((name).comparer == fast_eq_compare) \
+ ? fast_eq_compare((v), (w), (name).fast_type) \
+ : fast_eq_richcompare((v), (w), (name).fast_type))
+#define fast_lt(v, w, name) \
+ (((name).comparer == fast_lt_compare) \
+ ? fast_lt_compare((v), (w), (name).fast_type) \
+ : fast_lt_richcompare((v), (w), (name).fast_type))
+
+static const fast_compare_data_t no_fast_lt = { NULL, fast_lt_richcompare };
+static const fast_compare_data_t no_fast_eq = { NULL, fast_eq_richcompare };
+
+#else
+
+typedef PyTypeObject *fast_compare_data_t;
+
+BLIST_LOCAL(fast_compare_data_t)
+_check_fast_cmp_type(PyObject *ob, int op)
+{
+ if ((ob->ob_type == &PyComplex_Type && (op == Py_EQ || op == Py_NE))
+ || ob->ob_type == &PyFloat_Type
+ || ob->ob_type == &PyLong_Type
+ || ob->ob_type == &PyUnicode_Type
+ || ob->ob_type == &PyBytes_Type
+ || ob->ob_type == &PyLong_Type)
+ return ob->ob_type;
+ return NULL;
+}
+
+#define check_fast_cmp_type(ob, op) (_check_fast_cmp_type((ob), (op)))
+
+#define fast_eq(v, w, name) (fast_eq_richcompare((v), (w), (name)))
+#define fast_lt(v, w, name) (fast_lt_richcompare((v), (w), (name)))
+
+#define no_fast_lt (NULL)
+#define no_fast_eq (NULL)
+
+#endif
+
+/************************************************************************
+ * Utility functions for removal items from a BList
+ *
+ * Objects in Python can execute arbitrary code when garbage
+ * collected, which means they may make calls that modify the BList
+ * that we're deleting items from. Yuck.
+ *
+ * To avoid this in the general case, any function that removes items
+ * from a BList calls decref_later() on the object instead of
+ * Py_DECREF(). The objects are accumulated in a global list of
+ * objects pending for deletion. Just before the function returns to
+ * the interpreter, decref_flush() is called to actually decrement the
+ * reference counters.
+ *
+ * decref_later() can be passed PyBList objects to delete whole
+ * subtrees.
+ */
+
+static PyObject **decref_list = NULL;
+static Py_ssize_t decref_max = 0;
+static Py_ssize_t decref_num = 0;
+
+#define DECREF_BASE (2*128)
+
+int decref_init(void)
+{
+ decref_max = DECREF_BASE;
+ decref_list = (PyObject **) PyMem_New(PyObject *, decref_max);
+ if (decref_list == NULL)
+ return -1;
+ return 0;
+}
+
+static void _decref_later(PyObject *ob)
+{
+ if (decref_num == decref_max) {
+ PyObject **tmp = decref_list;
+ decref_max *= 2;
+
+ PyMem_Resize(decref_list, PyObject *, decref_max);
+ if (decref_list == NULL) {
+ PyErr_NoMemory();
+ decref_list = tmp;
+ decref_max /= 2;
+ return;
+ }
+ }
+
+ decref_list[decref_num++] = ob;
+}
+#define decref_later(ob) do { if (Py_REFCNT((ob)) > 1) { Py_DECREF((ob)); } else { _decref_later((ob)); } } while (0)
+
+static void xdecref_later(PyObject *ob)
+{
+ if (ob == NULL)
+ return;
+
+ decref_later(ob);
+}
+
+/* Like shift_left(), adding overwritten entries to the decref_later list */
+static void shift_left_decref(PyBList *self, int k, int n)
+{
+ register PyObject **src = &self->children[k];
+ register PyObject **dst = &self->children[k - n];
+ register PyObject **stop = &self->children[self->num_children];
+ register PyObject **dec;
+ register PyObject **dst_stop = &self->children[k];
+
+ if (decref_num + n > decref_max) {
+ while (decref_num + n > decref_max)
+ decref_max *= 2;
+ /* XXX Out of memory not handled */
+ PyMem_Resize(decref_list, PyObject *, decref_max);
+ }
+
+ dec = &decref_list[decref_num];
+
+ assert(n >= 0);
+ assert(k - n >= 0);
+ assert(k >= 0);
+ assert(k <= LIMIT);
+ assert(self->num_children - n >= 0);
+
+ while (src < stop && dst < dst_stop) {
+ if (*dst != NULL) {
+ if (Py_REFCNT(*dst) > 1) {
+ Py_DECREF(*dst);
+ } else {
+ *dec++ = *dst;
+ }
+ }
+ *dst++ = *src++;
+ }
+
+ while (src < stop) {
+ *dst++ = *src++;
+ }
+
+ while (dst < dst_stop) {
+ if (*dst != NULL) {
+ if (Py_REFCNT(*dst) > 1) {
+ Py_DECREF(*dst);
+ } else {
+ *dec++ = *dst;
+ }
+ }
+ dst++;
+ }
+
+#ifdef Py_DEBUG
+ src = &self->children[self->num_children - n];
+ while (src < stop)
+ *src++ = NULL;
+#endif
+
+ decref_num += dec - &decref_list[decref_num];
+}
+
+static void _decref_flush(void)
+{
+ while (decref_num) {
+ /* Py_DECREF() can cause arbitrary other oerations on
+ * BList, potentially even resulting in additional
+ * calls to decref_later() and decref_flush()!
+ *
+ * Any changes to this function must be made VERY
+ * CAREFULLY to handle this case.
+ *
+ * Invariant: whenever we call Py_DECREF, the
+ * decref_list is in a coherent state. It contains
+ * only items that still need to be decrefed.
+ * Furthermore, we can't cache anything about the
+ * global variables.
+ */
+
+ decref_num--;
+ DANGER_BEGIN;
+ Py_DECREF(decref_list[decref_num]);
+ DANGER_END;
+ }
+
+ if (decref_max > DECREF_BASE) {
+ /* Return memory to the system now.
+ * We may never get another chance
+ */
+
+ decref_max = DECREF_BASE;
+ PyMem_Resize(decref_list, PyObject *, decref_max);
+ }
+}
+
+/* Redefined in debug mode */
+#define decref_flush() (_decref_flush())
+#define SAFE_DECREF(x) Py_DECREF((x))
+#define SAFE_XDECREF(x) Py_XDECREF((x))
+
+/************************************************************************
+ * Debug functions
+ */
+
+#ifdef Py_DEBUG
+
+static void check_invariants(PyBList *self)
+{
+ if (self->leaf) {
+ assert(self->n == self->num_children);
+ int i;
+
+ for (i = 0; i < self->num_children; i++) {
+ PyObject *child = self->children[i];
+ if (child != NULL)
+ assert(Py_REFCNT(child) > 0);
+ }
+ } else {
+ int i;
+ Py_ssize_t total = 0;
+
+ assert(self->num_children > 0);
+
+ for (i = 0; i < self->num_children; i++) {
+ assert(PyBList_Check(self->children[i]));
+ assert(!PyRootBList_Check(self->children[i]));
+
+ PyBList *child = (PyBList *) self->children[i];
+ assert(child != self);
+ total += child->n;
+ assert(child->num_children <= LIMIT);
+ assert(HALF <= child->num_children);
+ /* check_invariants(child); */
+ }
+ assert(self->n == total);
+ assert(self->num_children > 1 || self->num_children == 0);
+
+ }
+
+ assert (Py_REFCNT(self) >= 1 || Py_TYPE(self) == &PyRootBList_Type
+ || (Py_REFCNT(self) == 0 && self->num_children == 0));
+}
+
+#define VALID_RW 1
+#define VALID_PARENT 2
+#define VALID_USER 4
+#define VALID_OVERFLOW 8
+#define VALID_COLLAPSE 16
+#define VALID_DECREF 32
+#define VALID_ROOT 64
+
+typedef struct
+{
+ PyBList *self;
+ int options;
+} debug_t;
+
+static void debug_setup(debug_t *debug)
+{
+ Py_INCREF(Py_None);
+ blist_in_code++;
+
+ assert(PyBList_Check(debug->self));
+
+ if (debug->options & VALID_DECREF) {
+ assert(blist_in_code == 1);
+ assert(debug->options & VALID_USER);
+ }
+
+#if 0
+ /* Comment this test out since users can get references via
+ * the gc module */
+ if (debug->options & VALID_RW) {
+ assert(Py_REFCNT(debug->self) == 1
+ || PyRootBList_Check(debug->self));
+ }
+#endif
+
+ if (debug->options & VALID_USER) {
+ debug->options |= VALID_ROOT;
+ assert(PyRootBList_Check(debug->self));
+ if (!debug->self->leaf)
+ assert(((PyBListRoot *)debug->self)->last_n
+ == debug->self->n);
+ assert(((PyBListRoot *)debug->self)->dirty_length
+ || ((PyBListRoot *)debug->self)->dirty_root);
+
+ if (!blist_danger)
+ assert(decref_num == 0);
+ }
+
+ if (debug->options & VALID_ROOT) {
+ debug->options |= VALID_PARENT;
+
+ assert(Py_REFCNT(debug->self) >= 1);
+ }
+
+ if (debug->options & (VALID_USER | VALID_PARENT)) {
+ check_invariants(debug->self);
+ }
+
+ if ((debug->options & VALID_USER) && (debug->options & VALID_RW)) {
+ assert(!blist_unstable);
+ blist_unstable = 1;
+ }
+}
+
+static void debug_return(debug_t *debug)
+{
+ Py_DECREF(Py_None);
+ assert(blist_in_code);
+ blist_in_code--;
+
+#if 0
+ /* Comment this test out since users can get references via
+ * the gc module */
+ if (debug->options & VALID_RW) {
+ assert(Py_REFCNT(debug->self) == 1
+ || PyRootBList_Check(debug->self));
+ }
+#endif
+
+ if (debug->options
+ & (VALID_PARENT|VALID_USER|VALID_OVERFLOW|VALID_COLLAPSE|VALID_ROOT))
+ check_invariants(debug->self);
+
+ if (debug->options & VALID_USER) {
+ if (!blist_danger)
+ assert(decref_num == 0);
+ if (!debug->self->leaf)
+ assert(((PyBListRoot *)debug->self)->last_n
+ == debug->self->n);
+ assert(((PyBListRoot *)debug->self)->dirty_length
+ || ((PyBListRoot *)debug->self)->dirty_root);
+ }
+
+ if ((debug->options & VALID_USER) && (debug->options & VALID_RW)) {
+ assert(blist_unstable);
+ blist_unstable = 0;
+ }
+}
+
+static PyObject *debug_return_overflow(debug_t *debug, PyObject *ret)
+{
+ if (debug->options & VALID_OVERFLOW) {
+ if (ret == NULL) {
+ debug_return(debug);
+ return ret;
+ }
+
+ assert(PyBList_Check((PyObject *) ret));
+ check_invariants((PyBList *) ret);
+ }
+
+ assert(!(debug->options & VALID_COLLAPSE));
+
+ debug_return(debug);
+
+ return ret;
+}
+
+static Py_ssize_t debug_return_collapse(debug_t *debug, Py_ssize_t ret)
+{
+ if (debug->options & VALID_COLLAPSE)
+ assert (((Py_ssize_t) ret) >= 0);
+
+ assert(!(debug->options & VALID_OVERFLOW));
+
+ debug_return(debug);
+
+ return ret;
+}
+
+#define invariants(self, options) debug_t _debug = { (PyBList *) (self), (options) }; \
+ debug_setup(&_debug)
+
+#define _blist(ret) (PyBList *) debug_return_overflow(&_debug, (PyObject *) (ret))
+#define _ob(ret) debug_return_overflow(&_debug, (ret))
+#define _int(ret) debug_return_collapse(&_debug, (ret))
+#define _void() do {assert(!(_debug.options & (VALID_OVERFLOW|VALID_COLLAPSE))); debug_return(&_debug);} while (0)
+#define _redir(ret) ((debug_return(&_debug), 1) ? (ret) : (ret))
+
+#undef Py_RETURN_NONE
+#define Py_RETURN_NONE return Py_INCREF(Py_None), _ob(Py_None)
+
+#undef decref_flush
+#define decref_flush() do { assert(_debug.options & VALID_DECREF); _decref_flush(); } while (0)
+
+static void safe_decref_check(PyBList *self)
+{
+ int i;
+
+ assert(PyBList_Check((PyObject *) self));
+
+ if (Py_REFCNT(self) > 1)
+ return;
+
+ if (self->leaf) {
+ for (i = 0; i < self->num_children; i++)
+ assert(self->children[i] == NULL
+ || Py_REFCNT(self->children[i]) > 1);
+ return;
+ }
+
+ for (i = 0; i < self->num_children; i++)
+ safe_decref_check((PyBList *)self->children[i]);
+}
+
+static void safe_decref(PyBList *self)
+{
+ assert(PyBList_Check((PyObject *) self));
+ safe_decref_check(self);
+
+ DANGER_GC_BEGIN;
+ Py_DECREF(self);
+ DANGER_GC_END;
+}
+
+#undef SAFE_DECREF
+#undef SAFE_XDECREF
+#define SAFE_DECREF(self) (safe_decref((PyBList *)(self)))
+#define SAFE_XDECREF(self) if ((self) == NULL) ; else SAFE_DECREF((self))
+
+#else /* !Py_DEBUG */
+
+#define check_invariants(self)
+#define invariants(self, options)
+#define _blist(ret) (ret)
+#define _ob(ret) (ret)
+#define _int(ret) (ret)
+#define _void()
+#define _redir(ret) (ret)
+
+#endif
+
+BLIST_LOCAL(int)
+append_and_squish(PyBList **out, int n, PyBList *leaf)
+{
+ if (n >= 1) {
+ PyBList *last = out[n-1];
+ if (last->num_children + leaf->num_children <= LIMIT) {
+ copy(last, last->num_children, leaf, 0,
+ leaf->num_children);
+ last->num_children += leaf->num_children;
+ last->n += leaf->num_children;
+ leaf->num_children = 0;
+ leaf->n = 0;
+ } else {
+ int moved = LIMIT - last->num_children;
+ copy(last, last->num_children, leaf, 0, moved);
+ shift_left(leaf, moved, moved);
+ last->num_children = LIMIT;
+ last->n = LIMIT;
+ leaf->num_children -= moved;
+ leaf->n -= moved;
+ }
+ }
+ if (!leaf->num_children)
+ SAFE_DECREF(leaf);
+ else
+ out[n++] = leaf;
+ return n;
+}
+
+BLIST_LOCAL(int)
+balance_last_2(PyBList **out, int n)
+{
+ PyBList *last;
+
+ if (n >= 2)
+ balance_leafs(out[n-2], out[n-1]);
+ if (n >= 1) {
+ last = out[n-1];
+ if (!last->num_children) {
+ SAFE_DECREF(last);
+ n--;
+ }
+ }
+ return n;
+}
+
+/************************************************************************
+ * Back to BLists proper.
+ */
+
+/* Creates a new blist for internal use only */
+static PyBList *blist_new(void)
+{
+ PyBList *self;
+
+ if (num_free_lists) {
+ self = free_lists[--num_free_lists];
+ _Py_NewReference((PyObject *) self);
+ } else {
+ DANGER_GC_BEGIN;
+ self = PyObject_GC_New(PyBList, &PyBList_Type);
+ DANGER_GC_END;
+ if (self == NULL)
+ return NULL;
+ self->children = PyMem_New(PyObject *, LIMIT);
+ if (self->children == NULL) {
+ PyObject_GC_Del(self);
+ PyErr_NoMemory();
+ return NULL;
+ }
+ }
+
+ self->leaf = 1; /* True */
+ self->num_children = 0;
+ self->n = 0;
+
+ PyObject_GC_Track(self);
+
+ return self;
+}
+
+static PyBList *blist_new_no_GC(void)
+{
+ PyBList *self = blist_new();
+ PyObject_GC_UnTrack(self);
+ return self;
+}
+
+/* Creates a blist for user use */
+static PyBList *blist_root_new(void)
+{
+ PyBList *self;
+
+ if (num_free_ulists) {
+ self = free_ulists[--num_free_ulists];
+ _Py_NewReference((PyObject *) self);
+ } else {
+ DANGER_GC_BEGIN;
+ self = (PyBList *) PyObject_GC_New(PyBListRoot, &PyRootBList_Type);
+ DANGER_GC_END;
+ if (self == NULL)
+ return NULL;
+ self->children = PyMem_New(PyObject *, LIMIT);
+ if (self->children == NULL) {
+ PyObject_GC_Del(self);
+ PyErr_NoMemory();
+ return NULL;
+ }
+ }
+
+ self->leaf = 1; /* True */
+ self->n = 0;
+ self->num_children = 0;
+
+ ext_init((PyBListRoot *) self);
+
+ PyObject_GC_Track(self);
+
+ return self;
+}
+
+/* Remove links to some of our children, decrementing their refcounts */
+static void blist_forget_children2(PyBList *self, int i, int j)
+{
+ int delta = j - i;
+
+ invariants(self, VALID_RW);
+
+ shift_left_decref(self, j, delta);
+ self->num_children -= delta;
+
+ _void();
+}
+
+/* Remove links to all children */
+#define blist_forget_children(self) \
+ (blist_forget_children2((self), 0, (self)->num_children))
+
+/* Remove link to one child */
+#define blist_forget_child(self, i) \
+ (blist_forget_children2((self), (i), (i)+1))
+
+/* Version for internal use defers Py_DECREF calls */
+static int blist_CLEAR(PyBList *self)
+{
+ invariants(self, VALID_RW|VALID_PARENT);
+
+ blist_forget_children(self);
+ self->n = 0;
+ self->leaf = 1;
+
+ return _int(0);
+}
+
+/* Make self into a copy of other */
+BLIST_LOCAL(void)
+blist_become(PyBList *restrict self, PyBList *restrict other)
+{
+ invariants(self, VALID_RW);
+ assert(self != other);
+
+ Py_INCREF(other); /* "other" may be one of self's children */
+ blist_forget_children(self);
+ self->n = other->n;
+ xcopyref(self, 0, other, 0, other->num_children);
+ self->num_children = other->num_children;
+ self->leaf = other->leaf;
+
+ SAFE_DECREF(other);
+ _void();
+}
+
+/* Make self into a copy of other and empty other */
+BLIST_LOCAL(void)
+blist_become_and_consume(PyBList *restrict self, PyBList *restrict other)
+{
+ PyObject **tmp;
+
+ invariants(self, VALID_RW);
+ assert(self != other);
+ assert(Py_REFCNT(other) == 1 || PyRootBList_Check(other));
+
+ Py_INCREF(other);
+ blist_forget_children(self);
+ tmp = self->children;
+ self->children = other->children;
+ self->n = other->n;
+ self->num_children = other->num_children;
+ self->leaf = other->leaf;
+
+ other->children = tmp;
+ other->n = 0;
+ other->num_children = 0;
+ other->leaf = 1;
+
+ SAFE_DECREF(other);
+ _void();
+}
+
+/* Create a copy of self */
+static PyBList *blist_copy(PyBList *restrict self)
+{
+ PyBList *copy;
+
+ copy = blist_new();
+ if (!copy) return NULL;
+ blist_become(copy, self);
+ return copy;
+}
+
+/* Create a copy of self, which is a root node */
+static PyBList *blist_root_copy(PyBList *restrict self)
+{
+ PyBList *copy;
+
+ copy = blist_root_new();
+ if (!copy) return NULL;
+ blist_become(copy, self);
+ ext_mark(copy, 0, DIRTY);
+ ext_mark_set_dirty_all(self);
+ return copy;
+}
+
+/************************************************************************
+ * Useful internal utility functions
+ */
+
+/* We are searching for the child that contains leaf element i.
+ *
+ * Returns a 3-tuple: (the child object, our index of the child,
+ * the number of leaf elements before the child)
+ */
+static void blist_locate(PyBList *self, Py_ssize_t i,
+ PyObject **child, int *idx, Py_ssize_t *before)
+{
+ invariants(self, VALID_PARENT);
+ assert (!self->leaf);
+
+ if (i <= self->n/2) {
+ /* Search from the left */
+ Py_ssize_t so_far = 0;
+ int k;
+ for (k = 0; k < self->num_children; k++) {
+ PyBList *p = (PyBList *) self->children[k];
+ if (i < so_far + p->n) {
+ *child = (PyObject *) p;
+ *idx = k;
+ *before = so_far;
+ _void();
+ return;
+ }
+ so_far += p->n;
+ }
+ } else {
+ /* Search from the right */
+ Py_ssize_t so_far = self->n;
+ int k;
+ for (k = self->num_children-1; k >= 0; k--) {
+ PyBList *p = (PyBList *) self->children[k];
+ so_far -= p->n;
+ if (i >= so_far) {
+ *child = (PyObject *) p;
+ *idx = k;
+ *before = so_far;
+ _void();
+ return;
+ }
+ }
+ }
+
+ /* Just append */
+ *child = self->children[self->num_children-1];
+ *idx = self->num_children-1;
+ *before = self->n - ((PyBList *)(*child))->n;
+
+ _void();
+}
+
+/* Find the current height of the tree.
+ *
+ * We could keep an extra few bytes in each node rather than
+ * figuring this out dynamically, which would reduce the
+ * asymptotic complexitiy of a few operations. However, I
+ * suspect it's not worth the extra overhead of updating it all
+ * over the place.
+ */
+BLIST_LOCAL(int)
+blist_get_height(PyBList *self)
+{
+ invariants(self, VALID_PARENT);
+ if (self->leaf)
+ return _int(1);
+ return _int(blist_get_height((PyBList *)
+ self->children[self->num_children - 1])
+ + 1);
+}
+
+BLIST_LOCAL(PyBList *)
+blist_prepare_write(PyBList *self, int pt)
+{
+ /* We are about to modify the child at index pt. Prepare it.
+ *
+ * This function returns the child object. If the caller has
+ * other references to the child, they must be discarded as they
+ * may no longer be valid.
+ *
+ * If the child's .refcount is 1, we simply return the
+ * child object.
+ *
+ * If the child's .refcount is greater than 1, we:
+ *
+ * - copy the child object
+ * - decrement the child's .refcount
+ * - replace self.children[pt] with the copy
+ * - return the copy
+ */
+
+ invariants(self, VALID_RW);
+ assert(!self->leaf);
+
+ if (pt < 0)
+ pt += self->num_children;
+ if (Py_REFCNT(self->children[pt]) > 1) {
+ PyBList *new_copy = blist_new();
+ if (!new_copy) return NULL;
+ blist_become(new_copy, (PyBList *) self->children[pt]);
+ SAFE_DECREF(self->children[pt]);
+ self->children[pt] = (PyObject *) new_copy;
+ }
+
+ return (PyBList *) _ob(self->children[pt]);
+}
+
+/* Macro version assumes that pt is non-negative */
+#define blist_PREPARE_WRITE(self, pt) (Py_REFCNT((self)->children[(pt)]) > 1 ? blist_prepare_write((self), (pt)) : (PyBList *) (self)->children[(pt)])
+
+/* Recompute self->n */
+BLIST_LOCAL(void)
+blist_adjust_n(PyBList *restrict self)
+{
+ int i;
+
+ invariants(self, VALID_RW);
+
+ if (self->leaf) {
+ self->n = self->num_children;
+ _void();
+ return;
+ }
+ self->n = 0;
+ for (i = 0; i < self->num_children; i++)
+ self->n += ((PyBList *)self->children[i])->n;
+
+ _void();
+}
+
+/* Non-default constructor. Create a node with specific children.
+ *
+ * We steal the reference counters from the caller.
+ */
+static PyBList *blist_new_sibling(PyBList *sibling)
+{
+ PyBList *restrict self = blist_new();
+ if (!self) return NULL;
+ assert(sibling->num_children == LIMIT);
+ copy(self, 0, sibling, HALF, HALF);
+ self->leaf = sibling->leaf;
+ self->num_children = HALF;
+ sibling->num_children = HALF;
+ blist_adjust_n(self);
+ return self;
+}
+
+/************************************************************************
+ * Bit twiddling utility function
+ */
+
+/* Return the highest set bit. E.g., for 0x00845894 return 0x00800000 */
+static unsigned highest_set_bit_slow(unsigned x)
+{
+ unsigned rv = 0;
+ unsigned mask;
+ for (mask = 0x1; mask; mask <<= 1)
+ if (mask & x) rv = mask;
+ return rv;
+}
+
+#if SIZEOF_INT == 4
+static unsigned highest_set_bit_table[256];
+
+static void highest_set_bit_init(void)
+{
+ unsigned i;
+ for (i = 0; i < 256; i++)
+ highest_set_bit_table[i] = highest_set_bit_slow(i);
+}
+
+static unsigned highest_set_bit(unsigned v)
+{
+ register unsigned tt, t;
+
+ if ((tt = v >> 16))
+ return (t = tt >> 8) ? highest_set_bit_table[t] << 24
+ : highest_set_bit_table[tt] << 16;
+ else
+ return (t = v >> 8) ? highest_set_bit_table[t] << 8
+ : highest_set_bit_table[v];
+}
+#else
+#define highest_set_bit_init() ()
+#define highest_set_bit(v) (highest_set_bit_slow((v)))
+#endif
+
+/************************************************************************
+ * Functions for the index extension used by the root node
+ */
+
+/* Initialize the index extension. Does not allocate any memory */
+static void ext_init(PyBListRoot *root)
+{
+ root->index_list = NULL;
+ root->offset_list = NULL;
+ root->setclean_list = NULL;
+ root->index_allocated = 0;
+ root->dirty = NULL;
+ root->dirty_length = 0;
+ root->dirty_root = DIRTY;
+ root->free_root = -1;
+
+#ifdef Py_DEBUG
+ root->last_n = root->n;
+#endif
+}
+
+/* Deallocate any memory used by the index extension */
+static void ext_dealloc(PyBListRoot *root)
+{
+ if (root->index_list) PyMem_Free(root->index_list);
+ if (root->offset_list) PyMem_Free(root->offset_list);
+ if (root->setclean_list) PyMem_Free(root->setclean_list);
+ if (root->dirty) PyMem_Free(root->dirty);
+ ext_init(root);
+}
+
+/* Find or create a new free node in "dirty" and return an index to it.
+ * amortized O(1) */
+static Py_ssize_t ext_alloc(PyBListRoot *root)
+{
+ Py_ssize_t i, parent;
+
+ if (root->free_root < 0) {
+ int newl;
+ int i;
+
+ if (!root->dirty) {
+ newl = 32;
+ root->dirty = PyMem_New(Py_ssize_t, newl);
+ root->dirty_root = DIRTY;
+ if (!root->dirty) return -1;
+ } else {
+ void *tmp;
+
+ assert(root->dirty_length > 0);
+ newl = root->dirty_length*2;
+ tmp = root->dirty;
+ PyMem_Resize(tmp, Py_ssize_t, newl);
+ if (!tmp) {
+ PyMem_Free(root->dirty);
+ root->dirty = NULL;
+ root->dirty_root = DIRTY;
+ return -1;
+ }
+ root->dirty = tmp;
+ }
+
+ for (i = root->dirty_length; i < newl; i += 2) {
+ root->dirty[i] = i+2;
+ root->dirty[i+1] = -1;
+ }
+ root->dirty[newl-2] = -1;
+ root->free_root = root->dirty_length;
+ root->dirty_length = newl;
+ assert(root->free_root >= 0);
+ assert(root->free_root+1 < root->dirty_length);
+ }
+
+ /* Depth-first search for a node with fewer than 2 children.
+ * Guaranteed to terminate in O(log n) since any leaf node
+ * will suffice.
+ */
+
+ i = root->free_root;
+ parent = -1;
+ assert(i >= 0);
+ assert(i+1 < root->dirty_length);
+ while (root->dirty[i] >= 0 && root->dirty[i+1] >= 0) {
+ assert(0);
+ assert(i >= 0);
+ assert(i+1 < root->dirty_length);
+ parent = i;
+ i = root->dirty[i];
+ }
+
+ /* At this point, "i" is the node to be alloced. "parent" is
+ * the node containing a pointer to "i" or -1 if free_root
+ * points to "i"
+ *
+ * parent's pointer to i is always the left-hand pointer
+ *
+ * i has at most one child
+ */
+
+ if (parent < 0) {
+ if (root->dirty[i] >= 0)
+ root->free_root = root->dirty[i];
+ else
+ root->free_root = root->dirty[i+1];
+ } else {
+ if (root->dirty[i] >= 0)
+ root->dirty[parent] = root->dirty[i];
+ else
+ root->dirty[parent] = root->dirty[i+1];
+ }
+
+ assert(i >= 0);
+ assert(i+1 < root->dirty_length);
+ return i;
+}
+
+/* Add each node in the tree rooted at loc to the free tree */
+/* Amortized O(1), since each node to be freed corresponds with
+ * an earlier lookup. Unamortized worst-case O(n)*/
+static void ext_free(PyBListRoot *root, Py_ssize_t loc)
+{
+ assert(loc >= 0);
+ assert(loc+1 < root->dirty_length);
+ if (root->dirty[loc] >= 0)
+ ext_free(root, root->dirty[loc]);
+ if (root->dirty[loc+1] >= 0)
+ ext_free(root, root->dirty[loc+1]);
+
+ root->dirty[loc] = root->free_root;
+ root->dirty[loc+1] = -1;
+ root->free_root = loc;
+ assert(root->free_root >= 0);
+ assert(root->free_root+1 < root->dirty_length);
+}
+
+BLIST_LOCAL(void)
+ext_mark_r(PyBListRoot *root, Py_ssize_t offset, Py_ssize_t i,
+ int bit, int value)
+{
+ Py_ssize_t j, next;
+
+ if (!(offset & bit)) {
+ /* Take left fork */
+
+ if (value == DIRTY) {
+ /* Mark right fork dirty */
+ assert(i >= 0 && i+1 < root->dirty_length);
+ if (root->dirty[i+1] >= 0)
+ ext_free(root, root->dirty[i+1]);
+ root->dirty[i+1] = DIRTY;
+ }
+ next = i;
+ } else {
+ /* Take right fork */
+ next = i+1;
+ }
+
+ assert(next >= 0 && next < root->dirty_length);
+
+ j = root->dirty[next];
+
+ if (j == value)
+ return;
+
+ if (bit == 1) {
+ root->dirty[next] = value;
+ return;
+ }
+
+ if (j < 0) {
+ Py_ssize_t nvalue = j;
+ Py_ssize_t tmp;
+ tmp = ext_alloc(root);
+ if (tmp < 0) {
+ ext_dealloc(root);
+ return;
+ }
+ root->dirty[next] = tmp;
+ j = root->dirty[next];
+ assert(j >= 0);
+ assert(j+1 < root->dirty_length);
+ root->dirty[j] = nvalue;
+ root->dirty[j+1] = nvalue;
+ }
+
+ ext_mark_r(root, offset, j, bit >> 1, value);
+
+ if (root->dirty
+ && (root->dirty[j] == root->dirty[j+1]
+ || (root->dirty[j] < 0
+ && (((offset | (bit>>1)) & ~((bit>>1)-1))
+ > (root->n-1) /INDEX_FACTOR)))) {
+ /* Both the same? Consolidate */
+ ext_free(root, j);
+ root->dirty[next] = value;
+ }
+}
+
+/* If "value" is CLEAN, mark the list clean at exactly "offset".
+ * If "value" is DIRTY, mark the list dirty for all >= "offset".
+ */
+static void ext_mark(PyBList *broot, Py_ssize_t offset, int value)
+{
+ int bit;
+
+ PyBListRoot *root = (PyBListRoot*) broot;
+ if (!root->n) {
+#ifdef Py_DEBUG
+ root->last_n = root->n;
+#endif
+ return;
+ }
+ if ((!offset && value == DIRTY) || root->n <= INDEX_FACTOR) {
+ if (root->dirty_root >= 0)
+ ext_free(root, root->dirty_root);
+ root->dirty_root = DIRTY;
+#ifdef Py_DEBUG
+ root->last_n = root->n;
+#endif
+ return;
+ }
+
+#ifdef Py_DEBUG
+ assert(root->last_n == root->n);
+#endif
+
+ if (root->dirty_root == value) return;
+
+ if (root->dirty_root < 0) {
+ Py_ssize_t nvalue = root->dirty_root;
+ root->dirty_root = ext_alloc(root);
+ if (root->dirty_root < 0) {
+ ext_dealloc(root);
+ return;
+ }
+ assert(root->dirty_root >= 0);
+ assert(root->dirty_root+1 < root->dirty_length);
+ root->dirty[root->dirty_root] = nvalue;
+ root->dirty[root->dirty_root+1] = nvalue;
+ }
+ offset /= INDEX_FACTOR;
+
+ bit = highest_set_bit((root->n-1) / INDEX_FACTOR);
+ ext_mark_r(root, offset, root->dirty_root, bit, value);
+ if (root->dirty &&
+ (root->dirty[root->dirty_root] ==root->dirty[root->dirty_root+1])){
+ ext_free(root, root->dirty_root);
+ root->dirty_root = value;
+ }
+}
+
+/* Mark a section of the list dirty for set operations */
+static void ext_mark_set_dirty(PyBList *broot, Py_ssize_t i, Py_ssize_t j)
+{
+ /* XXX We could set only the values in the setclean_list, but
+ * that takes O(n) time. Marking the nodes dirty for reading
+ * takes O(log n) time but will slow down future reads.
+ * Ideally there'd be a separate index_list for set
+ * operations */
+ ext_mark(broot, i, DIRTY);
+}
+
+/* Mark an entire list dirty for set operations */
+static void ext_mark_set_dirty_all(PyBList *broot)
+{
+ ext_mark_set_dirty(broot, 0, broot->n);
+}
+
+#if 0
+/* These functions are unused, but useful for debugging. Do not remove. */
+
+static void ext_print_r(PyBListRoot *root, Py_ssize_t i)
+{
+ printf("(");
+ if (root->dirty[i] < 0)
+ printf("%d", root->dirty[i]);
+ else
+ ext_print_r(root, root->dirty[i]);
+ printf(",");
+ if (root->dirty[i+1] < 0)
+ printf("%d", root->dirty[i+1]);
+ else
+ ext_print_r(root, root->dirty[i+1]);
+ printf(")");
+}
+
+static void ext_print(PyBListRoot *root)
+{
+ if (root->dirty_root < 0)
+ printf("%d", root->dirty_root);
+ else
+ ext_print_r(root, root->dirty_root);
+ printf("\n");
+}
+#endif
+
+/* Find an arbitrary DIRTY node in "dirty" at or below node "i" which
+ * corresponds with "offset" into the list. "bit" corresponds with
+ * the bit tested to determine the right or left child of "i".
+ * Returns the offset corresponding with the DIRTY node
+ */
+static Py_ssize_t
+ext_find_dirty(PyBListRoot *root, Py_ssize_t offset, int bit, Py_ssize_t i)
+{
+ assert(root->dirty);
+ assert(i >= 0);
+ assert(bit);
+
+ if (root->dirty[i] == DIRTY)
+ return offset;
+ if (root->dirty[i] >= 0)
+ return ext_find_dirty(root, offset, bit >> 1, root->dirty[i]);
+
+ if (root->dirty[i+1] == DIRTY)
+ return offset | bit;
+ assert(root->dirty[i+1] >= 0);
+ return ext_find_dirty(root, offset | bit, bit >> 1, root->dirty[i+1]);
+}
+
+/* Determine if "offset" is DIRTY, returning a Boolean value. Sets
+ * "dirty_offset" to an arbitrary DIRTY node located along the way,
+ * even if the requested offset is CLEAN. "dirty_offset" will be set
+ * to -1 if there are no DIRTY nodes. Worst-case O(log n)
+ */
+static int
+ext_is_dirty(PyBListRoot *root, Py_ssize_t offset, Py_ssize_t *dirty_offset)
+{
+ Py_ssize_t i, parent;
+ int bit;
+
+ if (root->dirty == NULL || root->dirty_root < 0) {
+ *dirty_offset = -1;
+ return root->dirty_root == DIRTY;
+ }
+ i = root->dirty_root;
+ parent = -1;
+ offset /= INDEX_FACTOR;
+ bit = highest_set_bit((root->n-1) / INDEX_FACTOR);
+
+#ifdef Py_DEBUG
+ assert(root->last_n == root->n);
+#endif
+
+ do {
+ assert(bit);
+ parent = i;
+ if (!(offset & bit)) {
+ assert (i >= 0 && i < root->dirty_length);
+ i = root->dirty[i];
+ } else {
+ assert (i >= 0 && i+1 < root->dirty_length);
+ i = root->dirty[i+1];
+ }
+ bit >>= 1;
+ } while (i >= 0);
+
+ if (i != DIRTY) {
+ if (!bit) bit = 1; else bit <<= 1;
+ *dirty_offset = INDEX_FACTOR *
+ ext_find_dirty(root, (offset ^ bit) & ~(bit-1), bit,
+ parent);
+ assert(*dirty_offset >= 0);
+ assert(*dirty_offset < root->n);
+ }
+
+ return i == DIRTY;
+}
+
+/* The length of the BList may have changed. Adjust the lengths of
+ * the extension data structures as needed */
+static int
+ext_grow_index(PyBListRoot *root)
+{
+ Py_ssize_t oldl = root->index_allocated;
+ if (!root->index_allocated) {
+ if (root->index_list) PyMem_Free(root->index_list);
+ if (root->offset_list) PyMem_Free(root->offset_list);
+ if (root->setclean_list) PyMem_Free(root->setclean_list);
+
+ root->index_list = NULL;
+ root->offset_list = NULL;
+ root->setclean_list = NULL;
+
+ root->index_allocated = (root->n-1) / INDEX_FACTOR + 1;
+ root->index_list = PyMem_New(PyBList *, root->index_allocated);
+ if (!root->index_list) {
+ fail:
+ root->index_allocated = oldl;
+ return -1;
+ }
+ root->offset_list = PyMem_New(Py_ssize_t, root->index_allocated);
+ if (!root->offset_list) goto fail;
+ root->setclean_list
+ = PyMem_New(unsigned,SETCLEAN_LEN(root->index_allocated));
+ if (!root->setclean_list) goto fail;
+ } else {
+ void *tmp;
+
+ do {
+ root->index_allocated *= 2;
+ } while ((root->n-1) / INDEX_FACTOR + 1 > root->index_allocated);
+ tmp = root->index_list;
+ PyMem_Resize(tmp, PyBList *, root->index_allocated);
+ if (!tmp) goto fail;
+ root->index_list = tmp;
+
+ tmp = root->offset_list;
+ PyMem_Resize(tmp, Py_ssize_t, root->index_allocated);
+ if (!tmp) goto fail;
+ root->offset_list = tmp;
+
+ tmp = root->setclean_list;
+ PyMem_Resize(tmp, unsigned, SETCLEAN_LEN(root->index_allocated));
+ if (!tmp) goto fail;
+ root->setclean_list = tmp;
+ }
+ return 0;
+}
+
+#define SET_OK_NO 0
+#define SET_OK_YES 1
+#define SET_OK_ALL 2
+
+BLIST_LOCAL(void)
+ext_index_r(PyBListRoot *root, PyBList *self, Py_ssize_t i, int set_ok)
+{
+ int j;
+ if (self != (PyBList *)root) {
+ assert(!(set_ok == SET_OK_ALL && Py_REFCNT(self) != 1));
+ set_ok = set_ok && (Py_REFCNT(self) == 1);
+ }
+
+ if (self->leaf) {
+ Py_ssize_t ioffset = i / INDEX_FACTOR;
+ if (ioffset * INDEX_FACTOR < i) ioffset++;
+ do {
+ assert(ioffset < root->index_allocated);
+ root->index_list[ioffset] = self;
+ root->offset_list[ioffset] = i;
+ if (set_ok != SET_OK_ALL) {
+ if (Py_REFCNT(self) > 1 || !set_ok)
+ CLEAR_BIT(root->setclean_list,ioffset);
+ else
+ SET_BIT(root->setclean_list, ioffset);
+ }
+ } while (++ioffset * INDEX_FACTOR < i + self->n);
+ i += self->n;
+ } else {
+ for (j = 0; j < self->num_children; j++) {
+ PyBList *child = (PyBList *) self->children[j];
+ ext_index_r(root, child, i, set_ok);
+ i += child->n;
+ }
+ }
+}
+
+BLIST_LOCAL(void)
+ext_index_all_dirty_r(PyBListRoot *root, PyBList *self, Py_ssize_t end,
+ Py_ssize_t child_index, Py_ssize_t child_n, int set_ok)
+{
+ Py_ssize_t i;
+ for (i = child_index; i < self->num_children && child_n < end; i++) {
+ PyBList *child = (PyBList *) self->children[i];
+ ext_index_r(root, child, child_n, set_ok);
+ child_n += child->n;
+ }
+}
+
+BLIST_LOCAL(void)
+ext_index_all_r(PyBListRoot *root,
+ Py_ssize_t dirty_index, Py_ssize_t dirty_offset, Py_ssize_t dirty_length,
+ PyBList *self, Py_ssize_t child_index, Py_ssize_t child_n,
+ int set_ok)
+{
+ if (dirty_index <= CLEAN) {
+ return;
+ } else if (dirty_index == DIRTY) {
+ ext_index_all_dirty_r(root, self, dirty_offset + dirty_length,
+ child_index, child_n, set_ok);
+ return;
+ }
+
+ if (!self->leaf) {
+ while (child_index < self->num_children) {
+ PyBList *child = (PyBList *) self->children[child_index];
+ if (child_n + child->n > dirty_offset)
+ break;
+ child_n += child->n;
+ child_index++;
+ }
+
+ if (child_index+1 == self->num_children
+ || (((PyBList *)self->children[child_index])->n + child_n
+ <= dirty_offset + dirty_length)) {
+ self = (PyBList *) self->children[child_index];
+ child_index = 0;
+ }
+ }
+
+ dirty_length /= 2;
+ ext_index_all_r(root,
+ root->dirty[dirty_index], dirty_offset, dirty_length,
+ self, child_index, child_n, set_ok);
+ dirty_offset += dirty_length;
+ ext_index_all_r(root,
+ root->dirty[dirty_index+1], dirty_offset, dirty_length,
+ self, child_index, child_n, set_ok);
+}
+
+/* Make everything clean in O(n) time. Any operation that alters the
+ * list and already takes Omega(n) time should call one of these functions.
+ */
+BLIST_LOCAL(void)
+_ext_index_all(PyBListRoot *root, int set_ok_all)
+{
+ Py_ssize_t ioffset_max = (root->n-1) / INDEX_FACTOR + 1;
+ int set_ok;
+
+ if (root->index_allocated < ioffset_max)
+ ext_grow_index(root);
+ if (set_ok_all) {
+ set_ok = SET_OK_ALL;
+ memset(root->setclean_list, 255,
+ SETCLEAN_LEN(root->index_allocated) * sizeof(unsigned));
+ } else
+ set_ok = SET_OK_YES;
+
+ ext_index_all_r(root, root->dirty_root, 0,
+ highest_set_bit((root->n-1)) * 2,
+ (PyBList*)root, 0, 0, set_ok);
+
+#ifdef Py_DEBUG
+ root->last_n = root->n;
+#endif
+ if (root->dirty_root >= 0)
+ ext_free(root, root->dirty_root);
+ root->dirty_root = set_ok_all ? CLEAN_RW : CLEAN;
+}
+#define ext_index_all(root) do { if (!(root)->leaf) _ext_index_all((root), 0); } while (0)
+#define ext_index_set_all(root) do { if (!(root)->leaf) _ext_index_all((root), 1); } while (0)
+
+BLIST_LOCAL_INLINE(void)
+_ext_reindex_all(PyBListRoot *root, int set_ok_all)
+{
+ if (root->dirty_root >= 0)
+ ext_free(root, root->dirty_root);
+ root->dirty_root = DIRTY;
+
+ _ext_index_all(root, set_ok_all);
+}
+
+#define ext_reindex_all(root) do { if (!(root)->leaf) _ext_reindex_all((root), 0); } while (0)
+#define ext_reindex_set_all(root) do { if (!(root)->leaf) _ext_reindex_all((root), 1); } while (0)
+
+/* We found a particular node at a certain offset. Add it to the
+ * index and mark it clean. */
+BLIST_LOCAL(void)
+ext_mark_clean(PyBListRoot *root, Py_ssize_t offset, PyBList *p, int setclean)
+{
+ Py_ssize_t ioffset = offset / INDEX_FACTOR;
+
+ assert(offset < root->n);
+
+ while (ioffset * INDEX_FACTOR < offset)
+ ioffset++;
+ for (;ioffset * INDEX_FACTOR < offset + p->n; ioffset++) {
+ ext_mark((PyBList*)root, ioffset * INDEX_FACTOR, CLEAN);
+
+ if (ioffset >= root->index_allocated) {
+ int err = ext_grow_index(root);
+ if (err < -1) {
+ ext_dealloc(root);
+ return;
+ }
+ }
+
+ assert(ioffset >= 0);
+ assert(ioffset < root->index_allocated);
+ root->index_list[ioffset] = p;
+ root->offset_list[ioffset] = offset;
+
+ if (setclean)
+ SET_BIT(root->setclean_list, ioffset);
+ else
+ CLEAR_BIT(root->setclean_list, ioffset);
+ }
+}
+
+/* Lookup the node at offset i and mark it clean */
+static PyObject *ext_make_clean(PyBListRoot *root, Py_ssize_t i)
+{
+ PyObject *rv;
+ Py_ssize_t so_far;
+ Py_ssize_t offset = 0;
+ PyBList *p = (PyBList *)root;
+ Py_ssize_t j = i;
+ int k;
+ int setclean = 1;
+ do {
+ blist_locate(p, j, (PyObject **) &p, &k, &so_far);
+ if (Py_REFCNT(p) > 1)
+ setclean = 0;
+ offset += so_far;
+ j -= so_far;
+ } while (!p->leaf);
+
+ rv = p->children[j];
+ ext_mark_clean(root, offset, p, setclean);
+ return rv;
+}
+
+/************************************************************************
+ * Functions for manipulating the tree
+ */
+
+/* Child k has underflowed. Borrow from k+1 */
+static void blist_borrow_right(PyBList *self, int k)
+{
+ PyBList *restrict p = (PyBList *) self->children[k];
+ PyBList *restrict right;
+ unsigned total;
+ unsigned split;
+ unsigned migrate;
+
+ invariants(self, VALID_RW);
+
+ right = blist_prepare_write(self, k+1);
+ total = p->num_children + right->num_children;
+ split = total / 2;
+ migrate = split - p->num_children;
+
+ assert(split >= HALF);
+ assert(total-split >= HALF);
+
+ copy(p, p->num_children, right, 0, migrate);
+ p->num_children += migrate;
+ shift_left(right, migrate, migrate);
+ right->num_children -= migrate;
+ blist_adjust_n(right);
+ blist_adjust_n(p);
+
+ _void();
+}
+
+/* Child k has underflowed. Borrow from k-1 */
+static void blist_borrow_left(PyBList *self, int k)
+{
+ PyBList *restrict p = (PyBList *) self->children[k];
+ PyBList *restrict left;
+ unsigned total;
+ unsigned split;
+ unsigned migrate;
+
+ invariants(self, VALID_RW);
+
+ left = blist_prepare_write(self, k-1);
+ total = p->num_children + left->num_children;
+ split = total / 2;
+ migrate = split - p->num_children;
+
+ assert(split >= HALF);
+ assert(total-split >= HALF);
+
+ shift_right(p, 0, migrate);
+ copy(p, 0, left, left->num_children - migrate, migrate);
+ p->num_children += migrate;
+ left->num_children -= migrate;
+ blist_adjust_n(left);
+ blist_adjust_n(p);
+
+ _void();
+}
+
+/* Child k has underflowed. Merge with k+1 */
+static void blist_merge_right(PyBList *self, int k)
+{
+ int i;
+ PyBList *restrict p = (PyBList *) self->children[k];
+ PyBList *restrict p2 = (PyBList *) self->children[k+1];
+
+ invariants(self, VALID_RW);
+
+ copy(p, p->num_children, p2, 0, p2->num_children);
+ for (i = 0; i < p2->num_children; i++)
+ Py_INCREF(p2->children[i]);
+ p->num_children += p2->num_children;
+ blist_forget_child(self, k+1);
+ blist_adjust_n(p);
+
+ _void();
+}
+
+/* Child k has underflowed. Merge with k-1 */
+static void blist_merge_left(PyBList *self, int k)
+{
+ int i;
+ PyBList *restrict p = (PyBList *) self->children[k];
+ PyBList *restrict p2 = (PyBList *) self->children[k-1];
+
+ invariants(self, VALID_RW);
+
+ shift_right(p, 0, p2->num_children);
+ p->num_children += p2->num_children;
+ copy(p, 0, p2, 0, p2->num_children);
+ for (i = 0; i < p2->num_children; i++)
+ Py_INCREF(p2->children[i]);
+ blist_forget_child(self, k-1);
+ blist_adjust_n(p);
+
+ _void();
+}
+
+/* Collapse the tree, if possible */
+BLIST_LOCAL(int)
+blist_collapse(PyBList *self)
+{
+ PyBList *p;
+ invariants(self, VALID_RW|VALID_COLLAPSE);
+
+ if (self->num_children != 1 || self->leaf) {
+ blist_adjust_n(self);
+ return _int(0);
+ }
+
+ p = blist_PREPARE_WRITE(self, 0);
+ blist_become_and_consume(self, p);
+ check_invariants(self);
+ return _int(1);
+}
+
+/* Check if children k-1, k, or k+1 have underflowed.
+ *
+ * If so, move things around until self is the root of a valid
+ * subtree again, possibly requiring collapsing the tree.
+ *
+ * Always calls self._adjust_n() (often via self.__collapse()).
+ */
+BLIST_LOCAL(int)
+blist_underflow(PyBList *self, int k)
+{
+ invariants(self, VALID_RW|VALID_COLLAPSE);
+
+ if (self->leaf) {
+ blist_adjust_n(self);
+ return _int(0);
+ }
+
+ if (k < self->num_children) {
+ PyBList *restrict p = blist_prepare_write(self, k);
+ int shrt = HALF - p->num_children;
+
+ while (shrt > 0) {
+ if (k+1 < self->num_children
+ && ((PyBList *)self->children[k+1])->num_children >= HALF + shrt)
+ blist_borrow_right(self, k);
+ else if (k > 0
+ && (((PyBList *)self->children[k-1])->num_children
+ >= HALF + shrt))
+ blist_borrow_left(self, k);
+ else if (k+1 < self->num_children)
+ blist_merge_right(self, k);
+ else if (k > 0)
+ blist_merge_left(self, k--);
+ else /* No siblings for p */
+ return _int(blist_collapse(self));
+
+ p = blist_prepare_write(self, k);
+ shrt = HALF - p->num_children;
+ }
+ }
+
+ if (k > 0 && ((PyBList *)self->children[k-1])->num_children < HALF) {
+ int collapse = blist_underflow(self, k-1);
+ if (collapse) return _int(collapse);
+ }
+
+ if (k+1 < self->num_children
+ && ((PyBList *)self->children[k+1])->num_children < HALF) {
+ int collapse = blist_underflow(self, k+1);
+ if (collapse) return _int(collapse);
+ }
+
+ return _int(blist_collapse(self));
+}
+
+/* Insert 'item', which may be a subtree, at index k. */
+BLIST_LOCAL(PyBList *)
+blist_insert_here(PyBList *self, int k, PyObject *item)
+{
+ /* Since the subtree may have fewer than half elements, we may
+ * need to merge it after insertion.
+ *
+ * This function may cause self to overflow. If it does, it will
+ * take the upper half of its children and put them in a new
+ * subtree and return the subtree. The caller is responsible for
+ * inserting this new subtree just to the right of self.
+ *
+ * Otherwise, it returns None.
+ */
+
+ PyBList *restrict sibling;
+
+ invariants(self, VALID_RW|VALID_OVERFLOW);
+ assert(k >= 0);
+
+ if (self->num_children < LIMIT) {
+ int collapse;
+
+ shift_right(self, k, 1);
+ self->num_children++;
+ self->children[k] = item;
+ collapse = blist_underflow(self, k);
+ assert(!collapse); (void) collapse;
+ return _blist(NULL);
+ }
+
+ sibling = blist_new_sibling(self);
+
+ if (k < HALF) {
+ int collapse;
+
+ shift_right(self, k, 1);
+ self->num_children++;
+ self->children[k] = item;
+ collapse = blist_underflow(self, k);
+ assert(!collapse); (void) collapse;
+ } else {
+ int collapse;
+
+ shift_right(sibling, k - HALF, 1);
+ sibling->num_children++;
+ sibling->children[k - HALF] = item;
+ collapse = blist_underflow(sibling, k - HALF);
+ assert(!collapse); (void) collapse;
+ blist_adjust_n(sibling);
+ }
+
+ blist_adjust_n(self);
+ check_invariants(self);
+ return _blist(sibling);
+}
+
+/* Recurse depth layers, then insert subtree on the left or right */
+BLIST_LOCAL(PyBList *)
+blist_insert_subtree(PyBList *self, int side, PyBList *subtree, int depth)
+{
+ /* This function may cause an overflow.
+ *
+ * depth == 0 means insert the subtree as a child of self.
+ * depth == 1 means insert the subtree as a grandchild, etc.
+ */
+
+ PyBList *sibling;
+ invariants(self, VALID_RW|VALID_OVERFLOW);
+ assert(side == 0 || side == -1);
+
+ self->n += subtree->n;
+
+ if (depth) {
+ PyBList *restrict p = blist_prepare_write(self, side);
+ PyBList *overflow = blist_insert_subtree(p, side,
+ subtree, depth-1);
+ if (!overflow) return _blist(NULL);
+ if (side == 0)
+ side = 1;
+ subtree = overflow;
+ }
+
+ if (side < 0)
+ side = self->num_children;
+
+ sibling = blist_insert_here(self, side, (PyObject *) subtree);
+
+ return _blist(sibling);
+}
+
+/* Handle the case where a user-visible node overflowed */
+BLIST_LOCAL(int)
+blist_overflow_root(PyBList *self, PyBList *overflow)
+{
+ PyBList *child;
+
+ invariants(self, VALID_RW);
+
+ if (!overflow) return _int(0);
+ child = blist_new();
+ if (!child) {
+ decref_later((PyObject*)overflow);
+ return _int(0);
+ }
+ blist_become_and_consume(child, self);
+ self->children[0] = (PyObject *)child;
+ self->children[1] = (PyObject *)overflow;
+ self->num_children = 2;
+ self->leaf = 0;
+ blist_adjust_n(self);
+ return _int(-1);
+}
+
+/* Concatenate two trees of potentially different heights. */
+BLIST_LOCAL(PyBList *)
+blist_concat_blist(PyBList *left_subtree, PyBList *right_subtree,
+ int height_diff, int *padj)
+{
+ /* The parameters are the two trees, and the difference in their
+ * heights expressed as left_height - right_height.
+ *
+ * Returns a tuple of the new, combined tree, and an integer.
+ * The integer expresses the height difference between the new
+ * tree and the taller of the left and right subtrees. It will
+ * be 0 if there was no change, and 1 if the new tree is taller
+ * by 1.
+ */
+
+ int adj = 0;
+ PyBList *overflow;
+ PyBList *root;
+
+ assert(Py_REFCNT(left_subtree) == 1);
+ assert(Py_REFCNT(right_subtree) == 1);
+
+ if (height_diff == 0) {
+ int collapse;
+
+ root = blist_new();
+ if (!root) {
+ decref_later((PyObject*)left_subtree);
+ decref_later((PyObject*)right_subtree);
+ return NULL;
+ }
+ root->children[0] = (PyObject *) left_subtree;
+ root->children[1] = (PyObject *) right_subtree;
+ root->leaf = 0;
+ root->num_children = 2;
+ collapse = blist_underflow(root, 0);
+ if (!collapse)
+ collapse = blist_underflow(root, 1);
+ if (!collapse)
+ adj = 1;
+ overflow = NULL;
+ } else if (height_diff > 0) { /* Left is larger */
+ root = left_subtree;
+ overflow = blist_insert_subtree(root, -1, right_subtree,
+ height_diff - 1);
+ } else { /* Right is larger */
+ root = right_subtree;
+ overflow = blist_insert_subtree(root, 0, left_subtree,
+ -height_diff - 1);
+ }
+
+ adj += -blist_overflow_root(root, overflow);
+ if (padj) *padj = adj;
+
+ return root;
+}
+
+/* Concatenate two subtrees of potentially different heights. */
+BLIST_LOCAL(PyBList *)
+blist_concat_subtrees(PyBList *left_subtree, int left_depth,
+ PyBList *right_subtree, int right_depth, int *pdepth)
+{
+ /* Returns a tuple of the new, combined subtree and its depth.
+ *
+ * Depths are the depth in the parent, not their height.
+ */
+
+ int deepest = left_depth > right_depth ?
+ left_depth : right_depth;
+ PyBList *root = blist_concat_blist(left_subtree, right_subtree,
+ -(left_depth - right_depth), pdepth);
+ if (pdepth) *pdepth = deepest - *pdepth;
+ return root;
+}
+
+/* Concatenate two roots of potentially different heights. */
+BLIST_LOCAL(PyBList *)
+blist_concat_roots(PyBList *left_root, int left_height,
+ PyBList *right_root, int right_height, int *pheight)
+{
+ /* Returns a tuple of the new, combined root and its height.
+ *
+ * Heights are the height from the root to its leaf nodes.
+ */
+
+ PyBList *root = blist_concat_blist(left_root, right_root,
+ left_height - right_height, pheight);
+ int highest = left_height > right_height ?
+ left_height : right_height;
+
+ if (pheight) *pheight = highest + *pheight;
+
+ return root;
+}
+
+BLIST_LOCAL(PyBList *)
+blist_concat_unknown_roots(PyBList *left_root, PyBList *right_root)
+{
+ return blist_concat_roots(left_root, blist_get_height(left_root),
+ right_root, blist_get_height(right_root),
+ NULL);
+}
+
+/* Child at position k is too short by "depth". Fix it */
+BLIST_LOCAL(int)
+blist_reinsert_subtree(PyBList *self, int k, int depth)
+{
+ PyBList *subtree;
+
+ invariants(self, VALID_RW);
+
+ assert(Py_REFCNT(self->children[k]) == 1);
+ subtree = (PyBList *) self->children[k];
+ shift_left(self, k+1, 1);
+ self->num_children--;
+
+ if (self->num_children > k) {
+ /* Merge right */
+ PyBList *p = blist_prepare_write(self, k);
+ PyBList *overflow = blist_insert_subtree(p, 0,
+ subtree, depth-1);
+ if (overflow) {
+ shift_right(self, k+1, 1);
+ self->num_children++;
+ self->children[k+1] = (PyObject *) overflow;
+ }
+ } else {
+ /* Merge left */
+ PyBList *p = blist_prepare_write(self, k-1);
+ PyBList *overflow = blist_insert_subtree(p, -1,
+ subtree, depth-1);
+ if (overflow) {
+ shift_right(self, k, 1);
+ self->num_children++;
+ self->children[k] = (PyObject *) overflow;
+ }
+ }
+
+ return _int(blist_underflow(self, k));
+}
+
+/************************************************************************
+ * The main insert and deletion operations
+ */
+
+/* Recursive to find position i, and insert item just there. */
+BLIST_LOCAL(PyBList *)
+ins1(PyBList *self, Py_ssize_t i, PyObject *item)
+{
+ PyBList *ret;
+ PyBList *restrict p;
+ int k;
+ Py_ssize_t so_far;
+ PyBList *overflow;
+
+ invariants(self, VALID_RW|VALID_OVERFLOW);
+
+ if (self->leaf) {
+ Py_INCREF(item);
+
+ /* Speed up the common case */
+ if (self->num_children < LIMIT) {
+ shift_right(self, i, 1);
+ self->num_children++;
+ self->n++;
+ self->children[i] = item;
+ return _blist(NULL);
+ }
+
+ return _blist(blist_insert_here(self, i, item));
+ }
+
+ blist_locate(self, i, (PyObject **) &p, &k, &so_far);
+
+ self->n += 1;
+ p = blist_prepare_write(self, k);
+ overflow = ins1(p, i - so_far, item);
+
+ if (!overflow) ret = NULL;
+ else ret = blist_insert_here(self, k+1, (PyObject *) overflow);
+
+ return _blist(ret);
+}
+
+BLIST_LOCAL(int)
+blist_extend_blist(PyBList *self, PyBList *other)
+{
+ PyBList *right, *left, *root;
+
+ invariants(self, VALID_RW);
+
+ /* Special case for speed */
+ if (self->leaf && other->leaf && self->n + other->n <= LIMIT) {
+ copyref(self, self->n, other, 0, other->n);
+ self->n += other->n;
+ self->num_children = self->n;
+ return _int(0);
+ }
+
+ /* Make not-user-visible roots for the subtrees */
+ right = blist_copy(other); /* XXX not checking return values */
+ left = blist_new();
+ if (left == NULL)
+ return _int(-1);
+ blist_become_and_consume(left, self);
+
+ if (left->leaf && right->leaf) {
+ balance_leafs(left, right);
+ self->children[0] = (PyObject *) left;
+ self->children[1] = (PyObject *) right;
+ self->num_children = 2;
+ self->leaf = 0;
+ blist_adjust_n(self);
+ return _int(0);
+ }
+
+ root = blist_concat_unknown_roots(left, right);
+ blist_become_and_consume(self, root);
+ SAFE_DECREF(root);
+ return _int(0);
+}
+
+/* Recursive version of __delslice__ */
+static int blist_delslice(PyBList *self, Py_ssize_t i, Py_ssize_t j)
+{
+ /* This may cause self to collapse. It returns 0 if it did
+ * not. If a collapse occured, it returns a positive integer
+ * indicating how much shorter this subtree is compared to when
+ * _delslice() was entered.
+ *
+ * As a special exception, it may return 0 if the entire subtree
+ * is deleted.
+ *
+ * Additionally, this function may cause an underflow.
+ */
+
+ PyBList *restrict p, *restrict p2;
+ int k, k2, depth;
+ Py_ssize_t so_far, so_far2, low;
+ int collapse_left, collapse_right, deleted_k, deleted_k2;
+
+ invariants(self, VALID_RW | VALID_PARENT | VALID_COLLAPSE);
+ check_invariants(self);
+
+ if (j > self->n)
+ j = self->n;
+
+ if (i == j)
+ return _int(0);
+
+ if (self->leaf) {
+ blist_forget_children2(self, i, j);
+ self->n = self->num_children;
+ return _int(0);
+ }
+
+ if (i == 0 && j >= self->n) {
+ /* Delete everything. */
+ blist_CLEAR(self);
+ return _int(0);
+ }
+
+ blist_locate(self, i, (PyObject **) &p, &k, &so_far);
+ blist_locate(self, j-1, (PyObject **) &p2, &k2, &so_far2);
+
+ if (k == k2) {
+ /* All of the deleted elements are contained under a single
+ * child of this node. Recurse and check for a short
+ * subtree and/or underflow
+ */
+
+ assert(so_far == so_far2);
+ p = blist_prepare_write(self, k);
+ depth = blist_delslice(p, i - so_far, j - so_far);
+ if (p->n == 0) {
+ SAFE_DECREF(p);
+ shift_left(self, k+1, 1);
+ self->num_children--;
+ return _int(blist_collapse(self));
+ }
+ if (!depth)
+ return _int(blist_underflow(self, k));
+ return _int(blist_reinsert_subtree(self, k, depth));
+ }
+
+ /* Deleted elements are in a range of child elements. There
+ * will be:
+ * - a left child (k) where we delete some (or all) of its children
+ * - a right child (k2) where we delete some (or all) of it children
+ * - children in between who are deleted entirely
+ */
+
+ /* Call _delslice recursively on the left and right */
+ p = blist_prepare_write(self, k);
+ collapse_left = blist_delslice(p, i - so_far, j - so_far);
+ p2 = blist_prepare_write(self, k2);
+ low = i-so_far2 > 0 ? i-so_far2 : 0;
+ collapse_right = blist_delslice(p2, low, j - so_far2);
+
+ deleted_k = 0; /* False */
+ deleted_k2 = 0; /* False */
+
+ /* Delete [k+1:k2] */
+ blist_forget_children2(self, k+1, k2);
+ k2 = k+1;
+
+ /* Delete k1 and k2 if they are empty */
+ if (!((PyBList *)self->children[k2])->n) {
+ decref_later((PyObject *) self->children[k2]);
+ shift_left(self, k2+1, 1);
+ self->num_children--;
+ deleted_k2 = 1; /* True */
+ }
+ if (!((PyBList *)self->children[k])->n) {
+ decref_later(self->children[k]);
+ shift_left(self, k+1, 1);
+ self->num_children--;
+ deleted_k = 1; /* True */
+ }
+
+ if (deleted_k && deleted_k2) /* # No messy subtrees. Good. */
+ return _int(blist_collapse(self));
+
+ /* The left and right may have collapsed and/or be in an
+ * underflow state. Clean them up. Work on fixing collapsed
+ * trees first, then worry about underflows.
+ */
+
+ if (!deleted_k && !deleted_k2 && collapse_left && collapse_right) {
+ /* Both exist and collapsed. Merge them into one subtree. */
+ PyBList *left, *right, *subtree;
+
+ left = (PyBList *) self->children[k];
+ right = (PyBList *) self->children[k+1];
+ shift_left(self, k+1, 1);
+ self->num_children--;
+ subtree = blist_concat_subtrees(left, collapse_left,
+ right, collapse_right,
+ &depth);
+ self->children[k] = (PyObject *) subtree;
+ } else if (deleted_k) {
+ /* Only the right potentially collapsed, point there. */
+ depth = collapse_right;
+ /* k already points to the old k2, since k was deleted */
+ } else if (!deleted_k2 && !collapse_left) {
+ /* Only the right potentially collapsed, point there. */
+ k = k + 1;
+ depth = collapse_right;
+ } else {
+ depth = collapse_left;
+ }
+
+ /* At this point, we have a potentially short subtree at k,
+ * with depth "depth".
+ */
+
+ if (!depth || self->num_children == 1) {
+ /* Doesn't need merging, or no siblings to merge with */
+ return _int(depth + blist_underflow(self, k));
+ }
+
+ /* We have a short subtree at k, and we have other children */
+ return _int(blist_reinsert_subtree(self, k, depth));
+}
+
+BLIST_LOCAL(PyObject *)
+blist_get1(PyBList *self, Py_ssize_t i)
+{
+ PyBList *p;
+ int k;
+ Py_ssize_t so_far;
+
+ invariants(self, VALID_PARENT);
+
+ if (self->leaf)
+ return _ob(self->children[i]);
+
+ blist_locate(self, i, (PyObject **) &p, &k, &so_far);
+ assert(i >= so_far);
+ return _ob(blist_get1(p, i - so_far));
+}
+
+BLIST_LOCAL(PyObject *)
+blist_pop_last_fast(PyBList *self)
+{
+ PyBList *p;
+
+ invariants(self, VALID_ROOT|VALID_RW);
+
+ for (p = self; !p->leaf;
+ p = (PyBList*)p->children[p->num_children-1]) {
+ if (p != self && Py_REFCNT(p) > 1)
+ goto cleanup_and_slow;
+ p->n--;
+ }
+
+ if ((Py_REFCNT(p) > 1 || p->num_children == HALF)
+ && self != p) {
+ PyBList *p2;
+ cleanup_and_slow:
+ for (p2 = self; p != p2;
+ p2 = (PyBList*)p2->children[p2->num_children-1])
+ p2->n++;
+ return _ob(NULL);
+ }
+ p->n--;
+ p->num_children--;
+
+ if ((self->n) % INDEX_FACTOR == 0)
+ ext_mark(self, 0, DIRTY);
+#ifdef Py_DEBUG
+ else
+ ((PyBListRoot*)self)->last_n--;
+#endif
+ return _ob(p->children[p->num_children]);
+}
+
+static void blist_delitem(PyBList *self, Py_ssize_t i)
+{
+ invariants(self, VALID_ROOT|VALID_RW);
+ if (i == self->n-1) {
+ PyObject *v = blist_pop_last_fast(self);
+ if (v) {
+ decref_later(v);
+ _void();
+ return;
+ }
+ }
+
+ blist_delslice(self, i, i+1);
+ _void();
+}
+
+static PyObject *blist_delitem_return(PyBList *self, Py_ssize_t i)
+{
+ PyObject *rv;
+
+ rv = blist_get1(self, i);
+ Py_INCREF(rv);
+ blist_delitem(self, i);
+ return rv;
+}
+
+/************************************************************************
+ * BList iterator
+ */
+
+static iter_t *iter_init(iter_t *iter, PyBList *lst)
+{
+ iter->depth = 0;
+
+ while(!lst->leaf) {
+ iter->stack[iter->depth].lst = lst;
+ iter->stack[iter->depth++].i = 1;
+ Py_INCREF(lst);
+ lst = (PyBList *) lst->children[0];
+ }
+
+ iter->leaf = lst;
+ iter->i = 0;
+ iter->depth++;
+ Py_INCREF(lst);
+
+ return iter;
+}
+
+static iter_t *iter_init2(iter_t *iter, PyBList *lst, Py_ssize_t start)
+{
+ iter->depth = 0;
+
+ assert(start >= 0);
+ while (!lst->leaf) {
+ PyBList *p;
+ int k;
+ Py_ssize_t so_far;
+
+ blist_locate(lst, start, (PyObject **) &p, &k, &so_far);
+ iter->stack[iter->depth].lst = lst;
+ iter->stack[iter->depth++].i = k + 1;
+ Py_INCREF(lst);
+ lst = p;
+ start -= so_far;
+ }
+
+ iter->leaf = lst;
+ iter->i = start;
+ iter->depth++;
+ Py_INCREF(lst);
+
+ return iter;
+}
+
+static PyObject *iter_next(iter_t *iter)
+{
+ PyBList *p;
+ int i;
+
+ p = iter->leaf;
+ if (p == NULL)
+ return NULL;
+
+ if (!p->leaf) {
+ /* If p is the root, it may have been a leaf when we began
+ * iterating, but turned into a non-leaf during iteration.
+ * Modifying the list during iteration results in undefined
+ * behavior, so just throw in the towel.
+ */
+
+ return NULL;
+ }
+
+ if (iter->i < p->num_children)
+ return p->children[iter->i++];
+
+ iter->depth--;
+ do {
+ decref_later((PyObject *) p);
+ if (!iter->depth) {
+ iter->leaf = NULL;
+ return NULL;
+ }
+ p = iter->stack[--iter->depth].lst;
+ i = iter->stack[iter->depth].i;
+ } while (i >= p->num_children);
+
+ assert(iter->stack[iter->depth].lst == p);
+ iter->stack[iter->depth++].i = i+1;
+
+ while (!p->leaf) {
+ p = (PyBList *) p->children[i];
+ Py_INCREF(p);
+ i = 0;
+ iter->stack[iter->depth].lst = p;
+ iter->stack[iter->depth++].i = i+1;
+ }
+
+ iter->leaf = iter->stack[iter->depth-1].lst;
+ iter->i = iter->stack[iter->depth-1].i;
+
+ return p->children[i];
+}
+
+static void iter_cleanup(iter_t *iter)
+{
+ int i;
+ for (i = 0; i < iter->depth-1; i++)
+ decref_later((PyObject *) iter->stack[i].lst);
+ if (iter->depth)
+ decref_later((PyObject *) iter->leaf);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_iter(PyObject *oseq)
+{
+ PyBList *seq;
+ blistiterobject *it;
+
+ if (!PyRootBList_Check(oseq)) {
+ PyErr_BadInternalCall();
+ return NULL;
+ }
+
+ seq = (PyBList *) oseq;
+
+ invariants(seq, VALID_USER);
+
+ if (num_free_iters) {
+ it = free_iters[--num_free_iters];
+ _Py_NewReference((PyObject *) it);
+ } else {
+ DANGER_BEGIN;
+ it = PyObject_GC_New(blistiterobject, &PyBListIter_Type);
+ DANGER_END;
+ if (it == NULL)
+ return _ob(NULL);
+ }
+
+ if (seq->leaf) {
+ /* Speed up common case */
+ it->iter.leaf = seq;
+ it->iter.i = 0;
+ it->iter.depth = 1;
+ Py_INCREF(seq);
+ } else
+ iter_init(&it->iter, seq);
+
+ PyObject_GC_Track(it);
+ return _ob((PyObject *) it);
+}
+
+static void blistiter_dealloc(PyObject *oit)
+{
+ blistiterobject *it;
+
+ assert(PyBListIter_Check(oit));
+ it = (blistiterobject *) oit;
+
+ PyObject_GC_UnTrack(it);
+ iter_cleanup(&it->iter);
+ if (num_free_iters < MAXFREELISTS
+ && (Py_TYPE(it) == &PyBListIter_Type))
+ free_iters[num_free_iters++] = it;
+ else
+ PyObject_GC_Del(it);
+ _decref_flush();
+}
+
+static int blistiter_traverse(PyObject *oit, visitproc visit, void *arg)
+{
+ blistiterobject *it;
+ int i;
+
+ assert(PyBListIter_Check(oit));
+ it = (blistiterobject *) oit;
+
+ for (i = 0; i < it->iter.depth-1; i++)
+ Py_VISIT(it->iter.stack[i].lst);
+ if (it->iter.depth)
+ Py_VISIT(it->iter.leaf);
+ return 0;
+}
+
+static PyObject *blistiter_next(PyObject *oit)
+{
+ blistiterobject *it = (blistiterobject *) oit;
+ PyObject *obj;
+
+ /* Speed up common case */
+ PyBList *p;
+ p = it->iter.leaf;
+ if (p == NULL)
+ return NULL;
+ if (p->leaf && it->iter.i < p->num_children) {
+ obj = p->children[it->iter.i++];
+ Py_INCREF(obj);
+ return obj;
+ }
+
+ obj = iter_next(&it->iter);
+ if (obj != NULL)
+ Py_INCREF(obj);
+
+ _decref_flush();
+ return obj;
+}
+
+BLIST_PYAPI(PyObject *)
+blistiter_len(blistiterobject *it)
+{
+ iter_t *iter = &it->iter;
+ int depth;
+ Py_ssize_t total = 0;
+
+ if (!iter->leaf)
+ return PyInt_FromLong(0);
+
+ total += iter->leaf->n - iter->i;
+
+ for (depth = iter->depth-2; depth >= 0; depth--) {
+ point_t point = iter->stack[depth];
+ int j;
+ if (point.lst->leaf) continue;
+ assert(point.i > 0);
+ for (j = point.i; j < point.lst->num_children; j++) {
+ PyBList *child = (PyBList *) point.lst->children[j];
+ total += child->n;
+ }
+ }
+ if (iter->depth > 1 && iter->stack[0].lst->leaf) {
+ int extra = iter->stack[0].lst->n - iter->stack[0].i;
+ if (extra > 0) total += extra;
+ }
+ return PyInt_FromLong(total);
+}
+
+PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
+
+static PyMethodDef blistiter_methods[] = {
+ {"__length_hint__", (PyCFunction)blistiter_len, METH_NOARGS, length_hint_doc},
+ {NULL, NULL} /* sentinel */
+};
+
+PyTypeObject PyBListIter_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "blistiterator", /* tp_name */
+ sizeof(blistiterobject), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ blistiter_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
+ 0, /* tp_doc */
+ blistiter_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ blistiter_next, /* tp_iternext */
+ blistiter_methods, /* tp_methods */
+ 0, /* tp_members */
+};
+
+/************************************************************************
+ * BList reverse iterator
+ */
+
+BLIST_LOCAL(iter_t *)
+riter_init2(iter_t *iter, PyBList *lst, Py_ssize_t start, Py_ssize_t stop)
+{
+ iter->depth = 0;
+
+ assert(stop >= 0);
+ assert(start >= 0);
+ assert(start >= stop);
+ while (!lst->leaf) {
+ PyBList *p;
+ int k;
+ Py_ssize_t so_far;
+
+ blist_locate(lst, start-1, (PyObject **) &p, &k, &so_far);
+ iter->stack[iter->depth].lst = lst;
+ iter->stack[iter->depth++].i = k - 1;
+ Py_INCREF(lst);
+ lst = p;
+ start -= so_far;
+ }
+
+ iter->leaf = lst;
+ iter->i = start-1;
+ iter->depth++;
+ Py_INCREF(lst);
+
+ return iter;
+}
+#define riter_init(iter, lst) (riter_init2((iter), (lst), (lst)->n, 0))
+
+BLIST_LOCAL(PyObject *)
+iter_prev(iter_t *iter)
+{
+ PyBList *p;
+ int i;
+
+ p = iter->leaf;
+ if (p == NULL)
+ return NULL;
+
+ if (!p->leaf) {
+ /* If p is the root, it may have been a leaf when we began
+ * iterating, but turned into a non-leaf during iteration.
+ * Modifying the list during iteration results in undefined
+ * behavior, so just throw in the towel.
+ */
+
+ return NULL;
+ }
+
+ if (iter->i >= p->num_children && iter->i >= 0)
+ iter->i = p->num_children - 1;
+
+ if (iter->i >= 0)
+ return p->children[iter->i--];
+
+ iter->depth--;
+ do {
+ decref_later((PyObject *) p);
+ if (!iter->depth) {
+ iter->leaf = NULL;
+ return NULL;
+ }
+ p = iter->stack[--iter->depth].lst;
+ i = iter->stack[iter->depth].i;
+
+ if (i >= p->num_children && i >= 0)
+ i = p->num_children - 1;
+ } while (i < 0);
+
+ assert(iter->stack[iter->depth].lst == p);
+ iter->stack[iter->depth++].i = i-1;
+
+ while (!p->leaf) {
+ p = (PyBList *) p->children[i];
+ Py_INCREF(p);
+ i = p->num_children-1;
+ iter->stack[iter->depth].lst = p;
+ iter->stack[iter->depth++].i = i-1;
+ }
+
+ iter->leaf = iter->stack[iter->depth-1].lst;
+ iter->i = iter->stack[iter->depth-1].i;
+
+ return p->children[i];
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_reversed(PyBList *seq)
+{
+ blistiterobject *it;
+
+ invariants(seq, VALID_USER);
+
+ DANGER_BEGIN;
+ it = PyObject_GC_New(blistiterobject,
+ &PyBListReverseIter_Type);
+ DANGER_END;
+ if (it == NULL)
+ return _ob(NULL);
+
+ if (seq->leaf) {
+ /* Speed up common case */
+ it->iter.leaf = seq;
+ it->iter.i = seq->n-1;
+ it->iter.depth = 1;
+ Py_INCREF(seq);
+ } else
+ riter_init(&it->iter, seq);
+
+ PyObject_GC_Track(it);
+ return _ob((PyObject *) it);
+}
+
+static PyObject *blistiter_prev(PyObject *oit)
+{
+ blistiterobject *it = (blistiterobject *) oit;
+ PyObject *obj;
+
+ /* Speed up common case */
+ PyBList *p;
+ p = it->iter.leaf;
+ if (p == NULL)
+ return NULL;
+
+ if (it->iter.i >= p->num_children && it->iter.i >= 0)
+ it->iter.i = p->num_children - 1;
+
+ if (p->leaf && it->iter.i >= 0) {
+ obj = p->children[it->iter.i--];
+ Py_INCREF(obj);
+ return obj;
+ }
+
+ obj = iter_prev(&it->iter);
+ if (obj != NULL)
+ Py_INCREF(obj);
+
+ _decref_flush();
+ return obj;
+}
+
+BLIST_PYAPI(PyObject *)
+blistriter_len(blistiterobject *it)
+{
+ iter_t *iter = &it->iter;
+ int depth;
+ Py_ssize_t total = 0;
+
+ total += iter->i + 1;
+
+ for (depth = iter->depth-2; depth >= 0; depth--) {
+ point_t point = iter->stack[depth];
+ int j;
+ if (point.lst->leaf) continue;
+ for (j = 0; j <= point.i; j++) {
+ PyBList *child = (PyBList *) point.lst->children[j];
+ total += child->n;
+ }
+ }
+ if (iter->depth > 1 && iter->stack[0].lst->leaf) {
+ int extra = iter->stack[0].i + 1;
+ if (extra > 0) total += extra;
+ }
+ return PyInt_FromLong(total);
+}
+
+static PyMethodDef blistriter_methods[] = {
+ {"__length_hint__", (PyCFunction)blistriter_len, METH_NOARGS, length_hint_doc},
+ {NULL, NULL} /* sentinel */
+};
+
+PyTypeObject PyBListReverseIter_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "blistreverseiterator", /* tp_name */
+ sizeof(blistiterobject), /* tp_basicsize */
+ 0, /* tp_itemsize */
+ /* methods */
+ blistiter_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ 0, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,/* tp_flags */
+ 0, /* tp_doc */
+ blistiter_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ PyObject_SelfIter, /* tp_iter */
+ blistiter_prev, /* tp_iternext */
+ blistriter_methods, /* tp_methods */
+ 0, /* tp_members */
+};
+
+/************************************************************************
+ * A forest is an array of BList tree structures, which may be of
+ * different heights. It's a temporary utility structure for certain
+ * operations. Specifically, it allows us to compose or decompose a
+ * BList in O(n) time.
+ *
+ * The BList trees in the forest are stored in position order, left-to-right.
+ *
+ */
+
+typedef struct Forest
+{
+ Py_ssize_t num_leafs;
+ Py_ssize_t num_trees;
+ Py_ssize_t max_trees;
+ PyBList **list;
+} Forest;
+
+#if 0
+/* Remove the right-most element. If it's a leaf, return it.
+ * Otherwise, add all of its childrent to the forest *in reverse
+ * order* and try again. Assuming only one BList was added to the
+ * forest, the effect is to return all of the leafs one-at-a-time
+ * left-to-right. */
+BLIST_LOCAL(PyBList *)
+forest_get_leaf(Forest *forest)
+{
+ PyBList *node = forest->list[--forest->num_trees];
+ PyBList **list;
+ while (!node->leaf) {
+ int i;
+ while (forest->num_trees + node->num_children
+ > forest->max_trees) {
+ list = forest->list;
+ forest->max_trees *= 2;
+ PyMem_Resize(list, PyBList*,forest->max_trees);
+ if (list == NULL) {
+ PyErr_NoMemory();
+ return NULL;
+ }
+ forest->list = list;
+ }
+
+ for (i = node->num_children - 1; i >= 0; i--)
+ forest->list[forest->num_trees++]
+ = blist_PREPARE_WRITE(node, i);
+
+ node->num_children = 0;
+ SAFE_DECREF(node);
+ node = forest->list[--forest->num_trees];
+ }
+
+ return node;
+}
+#endif
+
+#define MAX_FREE_FORESTS 20
+static PyBList **forest_saved[MAX_FREE_FORESTS];
+static unsigned forest_max_trees[MAX_FREE_FORESTS];
+static unsigned num_free_forests = 0;
+
+BLIST_LOCAL(Forest *)
+forest_init(Forest *forest)
+{
+ forest->num_trees = 0;
+ forest->num_leafs = 0;
+ if (num_free_forests) {
+ forest->list = forest_saved[--num_free_forests];
+ forest->max_trees = forest_max_trees[num_free_forests];
+ } else {
+ forest->max_trees = LIMIT; /* enough for O(LIMIT**2) items */
+ forest->list = PyMem_New(PyBList *, forest->max_trees);
+ if (forest->list == NULL)
+ return (Forest *) PyErr_NoMemory();
+ }
+ return forest;
+}
+
+#if 0
+BLIST_LOCAL(Forest *)
+forest_new(void)
+{
+ Forest *forest = PyMem_New(Forest, 1);
+ Forest *rv;
+ if (forest == NULL)
+ return (Forest *) PyErr_NoMemory();
+ rv = forest_init(forest);
+ if (rv == NULL)
+ PyMem_Free(forest);
+ return rv;
+}
+
+BLIST_LOCAL(void)
+forest_grow(Forest *forest, Py_ssize_t new_max)
+{
+ if (forest->max_trees > new_max) return;
+ /* XXX Check return value */
+ PyMem_Resize(forest->list, PyBList *, new_max);
+ forest->max_trees = new_max;
+}
+#endif
+
+/* Append a tree to the forest. Steals the reference to "leaf" */
+BLIST_LOCAL(int)
+forest_append(Forest *forest, PyBList *leaf)
+{
+ Py_ssize_t power = LIMIT;
+
+ if (!leaf->num_children) { /* Don't bother adding empty leaf nodes */
+ SAFE_DECREF(leaf);
+ return 0;
+ }
+
+ leaf->n = leaf->num_children;
+
+ if (forest->num_trees == forest->max_trees) {
+ PyBList **list = forest->list;
+
+ forest->max_trees <<= 1;
+ PyMem_Resize(list, PyBList *, forest->max_trees);
+ if (list == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ forest->list = list;
+ }
+
+ forest->list[forest->num_trees++] = leaf;
+ forest->num_leafs++;
+
+ while (forest->num_leafs % power == 0) {
+ struct PyBList *parent = blist_new();
+ int x;
+
+ if (parent == NULL) {
+ PyErr_NoMemory();
+ return -1;
+ }
+ parent->leaf = 0;
+ memcpy(parent->children,
+ &forest->list[forest->num_trees - LIMIT],
+ sizeof (PyBList *) * LIMIT);
+ parent->num_children = LIMIT;
+ forest->num_trees -= LIMIT;
+ x = blist_underflow(parent, LIMIT - 1);
+ assert(!x); (void) x;
+
+ forest->list[forest->num_trees++] = parent;
+ power *= LIMIT;
+ }
+
+ return 0;
+}
+
+BLIST_LOCAL(int)
+blist_init_from_child_array(PyBList **children, int num_children)
+{
+ int i, j, k;
+
+ assert(num_children);
+ if (num_children == 1)
+ return 1;
+
+ for (k = i = 0; i < num_children; i += LIMIT) {
+ PyBList *parent = blist_new();
+ int stop = (LIMIT < (num_children - i))
+ ? LIMIT : (num_children - i);
+ if (parent == NULL)
+ return -1;
+ parent->leaf = 0;
+ for (j = 0; j < stop; j++) {
+ parent->children[j] = (PyObject *) children[i+j];
+ assert(children[i+j]->num_children >= HALF);
+ children[i+j] = NULL;
+ }
+ parent->num_children = j;
+ blist_adjust_n(parent);
+ children[k++] = parent;
+ }
+
+ if (k <= 1)
+ return k;
+
+ if (children[k-1]->num_children < HALF) {
+ PyBList *left = children[k-2];
+ PyBList *right = children[k-1];
+ int needed = HALF - right->num_children;
+
+ shift_right(right, 0, needed);
+ copy(right, 0, left, LIMIT-needed, needed);
+ left->num_children -= needed;
+ right->num_children += needed;
+ blist_adjust_n(left);
+ blist_adjust_n(right);
+ }
+
+ return blist_init_from_child_array(children, k);
+}
+
+#if 0
+/* Like forest_append(), but handles the case where the previously
+ * added leaf is in an underflow state. */
+BLIST_LOCAL(int)
+forest_append_safe(Forest *forest, PyBList *leaf)
+{
+ PyBList *last;
+
+ if (forest->num_trees == 0)
+ goto append;
+
+ last = forest->list[forest->num_trees-1];
+
+ if (!last->leaf || last->num_children >= HALF)
+ goto append;
+
+ if (last->num_children + leaf->num_children <= LIMIT) {
+ copy(last, last->num_children, leaf, 0, leaf->num_children);
+ last->num_children += leaf->num_children;
+ last->n += leaf->num_children;
+ leaf->num_children = 0;
+ } else {
+ int needed = HALF - last->num_children;
+
+ copy(last, last->num_children, leaf, 0, needed);
+ last->num_children += needed;
+ last->n += needed;
+ shift_left(leaf, needed, needed);
+ leaf->num_children -= needed;
+ }
+
+ append:
+ return forest_append(forest, leaf);
+}
+#endif
+
+BLIST_LOCAL(void)
+forest_uninit(Forest *forest)
+{
+ Py_ssize_t i;
+ for (i = 0; i < forest->num_trees; i++)
+ decref_later((PyObject *) forest->list[i]);
+ if (num_free_forests < MAX_FREE_FORESTS && forest->max_trees == LIMIT){
+ forest_saved[num_free_forests] = forest->list;
+ forest_max_trees[num_free_forests++] = forest->max_trees;
+ } else
+ PyMem_Free(forest->list);
+}
+
+BLIST_LOCAL(void)
+forest_uninit_now(Forest *forest)
+{
+ Py_ssize_t i;
+ for (i = 0; i < forest->num_trees; i++)
+ Py_DECREF((PyObject *) forest->list[i]);
+ if (num_free_forests < MAX_FREE_FORESTS && forest->max_trees == LIMIT){
+ forest_saved[num_free_forests] = forest->list;
+ forest_max_trees[num_free_forests++] = forest->max_trees;
+ } else
+ PyMem_Free(forest->list);
+}
+
+#if 0
+BLIST_LOCAL(void)
+forest_delete(Forest *forest)
+{
+ forest_uninit(forest);
+ PyMem_Free(forest);
+}
+#endif
+
+/* Combine the forest into a final BList and delete the forest.
+ *
+ * forest_finish() assumes that only leaf nodes were passed to forest_append()
+ */
+static PyBList *forest_finish(Forest *forest)
+{
+ PyBList *out_tree = NULL; /* The final BList we are building */
+ int out_height = 0; /* It's height */
+ int group_height = 1; /* height of the next group from forest */
+
+ while(forest->num_trees) {
+ int n = forest->num_leafs % LIMIT;
+ PyBList *group;
+ int adj;
+
+ forest->num_leafs /= LIMIT;
+ group_height++;
+
+ if (!n) continue; /* No nodes at this height */
+
+ /* Merge nodes of the same height into 1 node, and
+ * merge it into our output BList.
+ */
+ group = blist_new();
+ if (group == NULL) {
+ forest_uninit(forest);
+ xdecref_later((PyObject *) out_tree);
+ return NULL;
+ }
+ group->leaf = 0;
+ memcpy(group->children,
+ &forest->list[forest->num_trees - n],
+ sizeof (PyBList *) * n);
+ group->num_children = n;
+ forest->num_trees -= n;
+ adj = blist_underflow(group, n - 1);
+ if (out_tree == NULL) {
+ out_tree = group;
+ out_height = group_height - adj;
+ } else {
+ out_tree = blist_concat_roots(group, group_height- adj,
+ out_tree, out_height,
+ &out_height);
+ }
+ }
+
+ forest_uninit(forest);
+
+ return out_tree;
+}
+
+/************************************************************************
+ * Functions that rely on forests.
+ */
+
+/* Initialize an empty BList from an array of PyObjects in O(n) time */
+BLIST_LOCAL(int)
+blist_init_from_array(PyBList *self, PyObject **restrict src, Py_ssize_t n)
+{
+ int i;
+ PyBList *final, *cur;
+ PyObject **dst;
+ PyObject **stop = &src[n];
+ PyObject **next;
+ Forest forest;
+ int gc_previous;
+
+ invariants(self, VALID_ROOT|VALID_RW);
+
+ if (n <= LIMIT) {
+ dst = self->children;
+ while (src < stop) {
+ Py_INCREF(*src);
+ *dst++ = *src++;
+ }
+ self->num_children = n;
+ self->n = n;
+ return _int(0);
+ }
+
+ if (forest_init(&forest) == NULL)
+ return _int(-1);
+
+ gc_previous = gc_pause();
+
+ cur = blist_new();
+ if (cur == NULL)
+ goto error2;
+ dst = cur->children;
+
+ while (src < stop) {
+ next = &src[LIMIT];
+ if (next > stop) next = stop;
+ while (src < next) {
+ Py_INCREF(*src);
+ *dst++ = *src++;
+ }
+ if (src == stop) break;
+
+ cur->num_children = LIMIT;
+ if (forest_append(&forest, cur) < 0)
+ goto error;
+ cur = blist_new();
+ if (cur == NULL)
+ goto error2;
+ dst = cur->children;
+ }
+
+ i = dst - cur->children;
+
+ if (i) {
+ cur->num_children = i;
+ if (forest_append(&forest, cur) < 0) {
+ error:
+ Py_DECREF(cur);
+ error2:
+ forest_uninit(&forest);
+ gc_unpause(gc_previous);
+ return _int(-1);
+ }
+ } else {
+ Py_DECREF(cur);
+ }
+
+ final = forest_finish(&forest);
+ blist_become_and_consume(self, final);
+
+ ext_reindex_set_all((PyBListRoot *) self);
+ SAFE_DECREF(final);
+
+ gc_unpause(gc_previous);
+
+ return _int(0);
+}
+
+/* Initialize an empty BList from a Python sequence in O(n) time */
+BLIST_LOCAL(int)
+blist_init_from_seq(PyBList *self, PyObject *b)
+{
+ PyObject *it;
+ PyObject *(*iternext)(PyObject *);
+ PyBList *cur, *final;
+ Forest forest;
+
+ invariants(self, VALID_ROOT | VALID_RW);
+
+ if (PyBList_Check(b)) {
+ /* We can copy other BLists in O(1) time :-) */
+ blist_become(self, (PyBList *) b);
+ ext_mark(self, 0, DIRTY);
+ ext_mark_set_dirty_all((PyBList *) b);
+ return _int(0);
+ }
+
+ if (PyTuple_CheckExact(b)) {
+ PyTupleObject *t = (PyTupleObject *) b;
+ return _int(blist_init_from_array(self, t->ob_item,
+ PyTuple_GET_SIZE(t)));
+ }
+#ifndef Py_BUILD_CORE
+ if (PyList_CheckExact(b)) {
+ PyListObject *l = (PyListObject *) b;
+ return _int(blist_init_from_array(self, l->ob_item,
+ PyList_GET_SIZE(l)));
+ }
+#endif
+
+ DANGER_BEGIN;
+ it = PyObject_GetIter(b);
+ DANGER_END;
+ if (it == NULL)
+ return _int(-1);
+ iternext = *Py_TYPE(it)->tp_iternext;
+
+ /* Try common case of len(sequence) <= LIMIT */
+ for (self->num_children = 0; self->num_children < LIMIT;
+ self->num_children++) {
+ PyObject *item;
+
+ DANGER_BEGIN;
+ item = iternext(it);
+ DANGER_END;
+
+ if (item == NULL) {
+ self->n = self->num_children;
+ if (PyErr_Occurred()) {
+ if (PyErr_ExceptionMatches(PyExc_StopIteration))
+ PyErr_Clear();
+ else
+ goto error;
+ }
+ goto done;
+ }
+
+ self->children[self->num_children] = item;
+ }
+
+ /* No such luck, build bottom-up instead. The sequence data
+ * so far goes in a leaf node. */
+
+ cur = blist_new();
+ if (cur == NULL)
+ goto error;
+ blist_become_and_consume(cur, self);
+
+ if (forest_init(&forest) == NULL) {
+ decref_later(it);
+ decref_later((PyObject *) cur);
+ return _int(-1);
+ }
+
+ if (0 > forest_append(&forest, cur))
+ goto error2;
+
+ cur = blist_new();
+ if (cur == NULL)
+ goto error2;
+
+ while (1) {
+ PyObject *item;
+ DANGER_BEGIN;
+ item = iternext(it);
+ DANGER_END;
+ if (item == NULL) {
+ if (PyErr_Occurred()) {
+ if (PyErr_ExceptionMatches(PyExc_StopIteration))
+ PyErr_Clear();
+ else
+ goto error2;
+ }
+ break;
+ }
+
+ if (cur->num_children == LIMIT) {
+ if (forest_append(&forest, cur) < 0) goto error2;
+ cur = blist_new();
+ if (cur == NULL)
+ goto error2;
+ }
+
+ cur->children[cur->num_children++] = item;
+ }
+
+ if (cur->num_children) {
+ if (forest_append(&forest, cur) < 0) goto error2;
+ cur->n = cur->num_children;
+ } else {
+ SAFE_DECREF(cur);
+ }
+
+ final = forest_finish(&forest);
+ blist_become_and_consume(self, final);
+ SAFE_DECREF(final);
+
+ done:
+ ext_reindex_set_all((PyBListRoot*)self);
+ decref_later(it);
+ return _int(0);
+
+ error2:
+ DANGER_BEGIN;
+ Py_XDECREF((PyObject *) cur);
+ forest_uninit_now(&forest);
+ DANGER_END;
+ error:
+ DANGER_BEGIN;
+ Py_DECREF(it);
+ DANGER_END;
+ blist_CLEAR(self);
+ return _int(-1);
+}
+
+/* Utility function for performing repr() */
+BLIST_LOCAL(int)
+blist_repr_r(PyBList *self)
+{
+ int i;
+ PyObject *s;
+
+ invariants(self, VALID_RW|VALID_PARENT);
+
+ if (self->leaf) {
+ for (i = 0; i < self->num_children; i++) {
+ if (Py_EnterRecursiveCall(" while getting the repr of a list"))
+ return _int(-1);
+ DANGER_BEGIN;
+ s = PyObject_Repr(self->children[i]);
+ DANGER_END;
+ Py_LeaveRecursiveCall();
+ if (s == NULL)
+ return _int(-1);
+ Py_DECREF(self->children[i]);
+ self->children[i] = s;
+ }
+ } else {
+ for (i = 0; i < self->num_children; i++) {
+ PyBList *child = blist_PREPARE_WRITE(self, i);
+ int status = blist_repr_r(child);
+ if (status < 0)
+ return _int(status);
+ }
+ }
+
+ return _int(0);
+}
+
+PyObject *
+ext_make_clean_set(PyBListRoot *root, Py_ssize_t i, PyObject *v)
+{
+ PyBList *p = (PyBList *) root;
+ PyBList *next;
+ int k;
+ Py_ssize_t so_far, offset = 0;
+ PyObject *old_value;
+ int did_mark = 0;
+
+ while (!p->leaf) {
+ blist_locate(p, i, (PyObject **) &next, &k, &so_far);
+ if (Py_REFCNT(next) <= 1)
+ p = next;
+ else {
+ p = blist_PREPARE_WRITE(p, k);
+ if (!did_mark) {
+ ext_mark((PyBList *) root, offset, DIRTY);
+ did_mark = 1;
+ }
+ }
+ assert(i >= so_far);
+ i -= so_far;
+ offset += so_far;
+ }
+
+ if (!root->leaf)
+ ext_mark_clean(root, offset, p, 1);
+
+ old_value = p->children[i];
+ p->children[i] = v;
+ return old_value;
+}
+
+PyObject *
+blist_ass_item_return_slow(PyBListRoot *root, Py_ssize_t i, PyObject *v)
+{
+ Py_ssize_t dirty_offset, ioffset;
+ PyObject *rv;
+ assert(i >= 0);
+ assert(i < root->n);
+ invariants(root, VALID_RW);
+ ioffset = i / INDEX_FACTOR;
+
+ if (root->leaf || ext_is_dirty(root, i, &dirty_offset)
+ || !GET_BIT(root->setclean_list, ioffset)) {
+ rv = ext_make_clean_set(root, i, v);
+ } else {
+ Py_ssize_t offset = root->offset_list[ioffset];
+ PyBList *p = root->index_list[ioffset];
+ assert(i >= offset);
+ assert(p);
+ assert(p->leaf);
+ if (i < offset + p->n) {
+ good:
+ /* If we're here, Py_REFCNT(p) == 1, most likely.
+ * However, we can't assert() it since there are two
+ * exceptions:
+ * 1) The user may have acquired a ref via gc, or
+ * 2) an iterator may have a reference
+ *
+ * If it's an iterator, we can go ahead and make the
+ * change anyway since we're not changing the length
+ * of the list.
+ */
+ rv = p->children[i - offset];
+ p->children[i - offset] = v;
+ if (dirty_offset >= 0)
+ ext_make_clean(root, dirty_offset);
+ } else if (ext_is_dirty(root,i + INDEX_FACTOR,&dirty_offset)
+ || !GET_BIT(root->setclean_list, ioffset+1)) {
+ rv = ext_make_clean_set(root, i, v);
+ } else {
+ ioffset++;
+ assert(ioffset < root->index_allocated);
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+ assert(p);
+ assert(p->leaf);
+ assert(i < offset + p->n);
+
+ goto good;
+ }
+ }
+
+ return _ob(rv);
+}
+
+BLIST_LOCAL_INLINE(PyObject *)
+blist_ass_item_return(PyBList *self, Py_ssize_t i, PyObject *v)
+{
+ Py_INCREF(v);
+ if (self->leaf) {
+ PyObject *rv = self->children[i];
+ self->children[i] = v;
+ return rv;
+ }
+
+ return blist_ass_item_return2((PyBListRoot*)self, i, v);
+}
+
+#ifndef Py_BUILD_CORE
+BLIST_LOCAL(PyObject *)
+blist_richcompare_list(PyBList *v, PyListObject *w, int op)
+{
+ int cmp;
+ PyObject *ret, *item;
+ Py_ssize_t i;
+ int v_stopped = 0;
+ int w_stopped = 0;
+ fast_compare_data_t fast_cmp_type = no_fast_eq;
+
+ invariants(v, VALID_RW);
+
+ if (v->n != PyList_GET_SIZE(w) && (op == Py_EQ || op == Py_NE)) {
+ /* Shortcut: if the lengths differ, the lists differ */
+ PyObject *res;
+ if (op == Py_EQ) {
+ false:
+ res = Py_False;
+ } else {
+ true:
+ res = Py_True;
+ }
+ Py_INCREF(res);
+ return _ob(res);
+ }
+
+ /* Search for the first index where items are different */
+ i = 0;
+ ITER(v, item, {
+ if (i >= PyList_GET_SIZE(w)) {
+ w_stopped = 1;
+ break;
+ }
+ if (i == 0)
+ fast_cmp_type = check_fast_cmp_type(item, Py_EQ);
+
+ cmp = fast_eq(item, w->ob_item[i], fast_cmp_type);
+
+ if (cmp < 0) {
+ ITER_CLEANUP();
+ return _ob(NULL);
+ } else if (!cmp) {
+ if (op == Py_EQ) { ITER_CLEANUP(); goto false; }
+ if (op == Py_NE) { ITER_CLEANUP(); goto true; }
+
+ /* Last RichComparebool may have modified the list */
+ if (i >= PyList_GET_SIZE(w)) {
+ w_stopped = 1;
+ break;
+ }
+
+ DANGER_BEGIN;
+ ret = PyObject_RichCompare(item, w->ob_item[i], op);
+ DANGER_END;
+ ITER_CLEANUP();
+ return ret;
+ }
+ i++;
+ });
+
+ if (!w_stopped) {
+ v_stopped = 1;
+ if (i >= PyList_GET_SIZE(w))
+ w_stopped = 1;
+ }
+
+ /* No more items to compare -- compare sizes */
+ switch (op) {
+ case Py_LT: cmp = v_stopped && !w_stopped; break;
+ case Py_LE: cmp = v_stopped; break;
+ case Py_EQ: cmp = v_stopped == w_stopped; break;
+ case Py_NE: cmp = v_stopped != w_stopped; break;
+ case Py_GT: cmp = !v_stopped && w_stopped; break;
+ case Py_GE: cmp = w_stopped; break;
+ default:
+ /* cannot happen */
+ PyErr_BadInternalCall();
+ return _ob(NULL);
+ }
+
+ if (cmp) goto true;
+ else goto false;
+}
+#endif
+
+BLIST_LOCAL(PyObject *)
+blist_richcompare_item(int c, int op, PyObject *item1, PyObject *item2)
+{
+ PyObject *ret;
+
+ if (c < 0)
+ return NULL;
+ if (!c) {
+ if (op == Py_EQ)
+ Py_RETURN_FALSE;
+ if (op == Py_NE)
+ Py_RETURN_TRUE;
+ DANGER_BEGIN;
+ ret = PyObject_RichCompare(item1, item2, op);
+ DANGER_END;
+ return ret;
+ }
+
+ /* Impossible to get here */
+ assert(0);
+ return NULL;
+}
+
+static PyObject *blist_richcompare_len(PyBList *v, PyBList *w, int op)
+{
+ /* No more items to compare -- compare sizes */
+ switch (op) {
+ case Py_LT: if (v->n < w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ case Py_LE: if (v->n <= w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ case Py_EQ: if (v->n == w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ case Py_NE: if (v->n != w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ case Py_GT: if (v->n > w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ case Py_GE: if (v->n >= w->n) Py_RETURN_TRUE; else Py_RETURN_FALSE;
+ default: return NULL; /* cannot happen */
+ }
+}
+
+BLIST_LOCAL(PyObject *)
+blist_richcompare_slow(PyBList *v, PyBList *w, int op)
+{
+ /* Search for the first index where items are different */
+ PyObject *item1, *item2;
+ iter_t it1, it2;
+ int c;
+ PyBList *leaf1, *leaf2;
+ fast_compare_data_t fast_cmp_type;
+
+ iter_init(&it1, v);
+ iter_init(&it2, w);
+
+ leaf1 = it1.leaf;
+ leaf2 = it2.leaf;
+ fast_cmp_type = check_fast_cmp_type(it1.leaf->children[0], Py_EQ);
+ do {
+ if (it1.i < leaf1->num_children) {
+ item1 = leaf1->children[it1.i++];
+ } else {
+ item1 = iter_next(&it1);
+ leaf1 = it1.leaf;
+ if (item1 == NULL) {
+ compare_len:
+ iter_cleanup(&it1);
+ iter_cleanup(&it2);
+ return blist_richcompare_len(v, w, op);
+ }
+ }
+
+ if (it2.i < leaf2->num_children) {
+ item2 = leaf2->children[it2.i++];
+ } else {
+ item2 = iter_next(&it2);
+ leaf2 = it2.leaf;
+ if (item2 == NULL)
+ goto compare_len;
+ }
+
+ c = fast_eq(item1, item2, fast_cmp_type);
+ } while (c >= 1);
+
+ iter_cleanup(&it1);
+ iter_cleanup(&it2);
+ return blist_richcompare_item(c, op, item1, item2);
+}
+
+BLIST_LOCAL(PyObject *)
+blist_richcompare_blist(PyBList *v, PyBList *w, int op)
+{
+ int i, c;
+ fast_compare_data_t fast_cmp_type;
+
+ if (v->n != w->n) {
+ /* Shortcut: if the lengths differ, the lists differ */
+ if (op == Py_EQ) {
+ Py_RETURN_FALSE;
+ } else if (op == Py_NE) {
+ Py_RETURN_TRUE;
+ }
+
+ if (!v->n) {
+ /* Shortcut: first list empty, second un-empty. */
+ switch (op) {
+ case Py_LT: case Py_LE: Py_RETURN_TRUE;
+ case Py_GT: case Py_GE: Py_RETURN_FALSE;
+ default: return NULL; /* cannot happen */
+ }
+ }
+ } else if (!v->n) {
+ /* Shortcut: two empty lists */
+ switch (op) {
+ case Py_NE: case Py_GT: case Py_LT: Py_RETURN_FALSE;
+ case Py_LE: case Py_EQ: case Py_GE: Py_RETURN_TRUE;
+ default: return NULL; /* cannot happen */
+ }
+ }
+
+ if (!v->leaf || !w->leaf)
+ return blist_richcompare_slow(v, w, op);
+
+ /* Due to the shortcuts above, we know that v->n > 0 */
+ fast_cmp_type = check_fast_cmp_type(v->children[0], Py_EQ);
+
+ for (i = 0; i < v->num_children && i < w->num_children; i++) {
+ c = fast_eq(v->children[i], w->children[i], fast_cmp_type);
+ if (c < 1)
+ return blist_richcompare_item(c, op, v->children[i],
+ w->children[i]);
+ }
+ return blist_richcompare_len(v, w, op);
+
+}
+
+/* Reverse a slice of a list in place, from lo up to (exclusive) hi. */
+BLIST_LOCAL_INLINE(void)
+reverse_slice(register PyObject **restrict lo, register PyObject **restrict hi)
+{
+ register PyObject *t;
+ assert(lo && hi);
+
+ /* slice of length 0 */
+ if (hi == lo) return;
+
+ /* Use Duff's Device
+ * http://en.wikipedia.org/wiki/Duff%27s_device
+ */
+
+ --hi;
+
+ switch ((hi - lo) & 31) {
+ case 31: do { t = *lo; *lo++ = *hi; *hi-- = t;
+ case 30: case 29: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 28: case 27: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 26: case 25: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 24: case 23: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 22: case 21: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 20: case 19: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 18: case 17: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 16: case 15: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 14: case 13: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 12: case 11: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 10: case 9: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 8: case 7: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 6: case 5: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 4: case 3: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 2: case 1: t = *lo; *lo++ = *hi; *hi-- = t;
+ case 0: ;
+ } while (lo < hi);
+ }
+}
+
+/* self *= 2 */
+static void blist_double(PyBList *self)
+{
+ if (self->num_children > HALF) {
+ blist_extend_blist(self, self);
+ return;
+ }
+
+ copyref(self, self->num_children, self, 0, self->num_children);
+ self->num_children *= 2;
+ self->n *= 2;
+}
+
+BLIST_LOCAL(int)
+blist_extend(PyBList *self, PyObject *other)
+{
+ int err;
+ PyBList *bother = NULL;
+
+ invariants(self, VALID_PARENT|VALID_RW);
+
+ if (PyBList_Check(other)) {
+ err = blist_extend_blist(self, (PyBList *) other);
+ goto done;
+ }
+
+ bother = blist_root_new();
+ err = blist_init_from_seq(bother, other);
+ if (err < 0)
+ goto done;
+ err = blist_extend_blist(self, bother);
+ ext_mark(self, 0, DIRTY);
+
+ done:
+ SAFE_XDECREF(bother);
+ return _int(err);
+}
+
+BLIST_LOCAL(PyObject *)
+blist_repeat(PyBList *self, Py_ssize_t n)
+{
+ Py_ssize_t mask;
+ PyBList *power = NULL, *rv, *remainder = NULL;
+ Py_ssize_t remainder_n = 0;
+
+ invariants(self, VALID_PARENT);
+
+ if (n <= 0 || self->n == 0)
+ return _ob((PyObject *) blist_root_new());
+
+ if ((self->n * n) / n != self->n)
+ return _ob(PyErr_NoMemory());
+
+ rv = blist_root_new();
+ if (rv == NULL)
+ return _ob(NULL);
+
+ if (n == 1) {
+ blist_become(rv, self);
+ ext_mark(rv, 0, DIRTY);
+ return _ob((PyObject *) rv);
+ }
+
+ if (self->num_children > HALF)
+ blist_become(rv, self);
+ else {
+ Py_ssize_t fit, fitn, so_far;
+
+ rv->leaf = self->leaf;
+ fit = LIMIT / self->num_children;
+ if (fit > n) fit = n;
+ fitn = fit * self->num_children;
+ xcopyref(rv, 0, self, 0, self->num_children);
+ so_far = self->num_children;
+ while (so_far*2 < fitn) {
+ xcopyref(rv, so_far, rv, 0, so_far);
+ so_far *= 2;
+ }
+ xcopyref(rv, so_far, rv, 0, (fitn - so_far));
+
+ rv->num_children = fitn;
+ rv->n = self->n * fit;
+ check_invariants(rv);
+
+ if (fit == n) {
+ ext_mark(rv, 0, DIRTY);
+ return _ob((PyObject *) rv);
+ }
+
+ remainder_n = n % fit;
+ n /= fit;
+
+ if (remainder_n) {
+ remainder = blist_root_new();
+ if (remainder == NULL)
+ goto error;
+ remainder->n = self->n * remainder_n;
+ remainder_n *= self->num_children;
+ remainder->leaf = self->leaf;
+ xcopyref(remainder, 0, rv, 0, remainder_n);
+ remainder->num_children = remainder_n;
+ check_invariants(remainder);
+ }
+ }
+
+ if (n == 0)
+ goto do_remainder;
+
+ power = rv;
+ rv = blist_root_new();
+ if (rv == NULL) {
+ SAFE_XDECREF(remainder);
+ error:
+ SAFE_DECREF(power);
+ return _ob(NULL);
+ }
+
+ if (n & 1)
+ blist_become(rv, power);
+
+ for (mask = 2; mask <= n; mask <<= 1) {
+ blist_double(power);
+ if (mask & n)
+ blist_extend_blist(rv, power);
+ }
+ SAFE_DECREF(power);
+
+ do_remainder:
+
+ if (remainder) {
+ blist_extend_blist(rv, remainder);
+ SAFE_DECREF(remainder);
+ }
+
+ check_invariants(rv);
+ ext_mark(rv, 0, DIRTY);
+ return _ob((PyObject *) rv);
+}
+
+BLIST_LOCAL(void)
+linearize_rw_r(PyBList *self)
+{
+ int i;
+
+ invariants(self, VALID_PARENT|VALID_RW);
+
+ for (i = 0; i < self->num_children; i++) {
+ PyBList *restrict p = blist_PREPARE_WRITE(self, i);
+ if (!p->leaf)
+ linearize_rw_r(p);
+ }
+
+ _void();
+ return;
+}
+
+BLIST_LOCAL(void)
+linearize_rw(PyBListRoot *self)
+{
+ int i;
+
+ if (self->leaf || self->dirty_root == CLEAN_RW)
+ return;
+
+ if (self->dirty_root == CLEAN) {
+ Py_ssize_t n = SETCLEAN_LEN(INDEX_LENGTH(self));
+ for (i = 0; i < n; i++)
+ if (self->setclean_list[i] != (unsigned) -1)
+ goto slow;
+ memset(self->setclean_list, 255,
+ SETCLEAN_LEN(INDEX_LENGTH(self)) * sizeof(unsigned));
+ self->dirty_root = CLEAN_RW;
+ return;
+ }
+
+slow:
+ linearize_rw_r((PyBList *)self);
+ ext_reindex_set_all(self);
+}
+
+BLIST_LOCAL(void)
+blist_reverse(PyBListRoot *restrict self)
+{
+ int idx, ridx;
+ PyBList *restrict left, *restrict right;
+ register PyObject **restrict slice1;
+ register PyObject **restrict slice2;
+ int n1, n2;
+
+ invariants(self, VALID_ROOT|VALID_RW);
+
+ if (self->leaf) {
+ reverse_slice(self->children,
+ &self->children[self->num_children]);
+ _void();
+ return;
+ }
+
+ linearize_rw(self);
+
+ idx = 0;
+ left = self->index_list[idx];
+ if (left == self->index_list[idx+1])
+ idx++;
+ slice1 = &left->children[0];
+ n1 = left->num_children;
+
+ ridx = INDEX_LENGTH(self)-1;
+ right = self->index_list[ridx];
+ if (right == self->index_list[ridx-1])
+ ridx--;
+ slice2 = &right->children[right->num_children-1];
+ n2 = right->num_children;
+
+ while (idx < ridx) {
+ int n = (n1 < n2) ? n1 : n2;
+ int count = (n+31) / 32;
+ switch (n & 31) {
+ register PyObject *t;
+ case 0: do { t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 31: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 30: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 29: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 28: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 27: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 26: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 25: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 24: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 23: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 22: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 21: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 20: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 19: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 18: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 17: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 16: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 15: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 14: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 13: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 12: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 11: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 10: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 9: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 8: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 7: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 6: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 5: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 4: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 3: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 2: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ case 1: t = *slice1; *slice1++ = *slice2; *slice2-- = t;
+ } while (--count > 0);
+ }
+
+ n1 -= n;
+ if (!n1) {
+ idx++;
+ left = self->index_list[idx];
+ if (left == self->index_list[idx+1])
+ idx++;
+ slice1 = &left->children[0];
+ n1 = left->num_children;
+ }
+
+ n2 -= n;
+ if (!n2) {
+ ridx--;
+ right = self->index_list[ridx];
+ if (right == self->index_list[ridx-1])
+ ridx--;
+ slice2 = &right->children[right->num_children-1];
+ n2 = right->num_children;
+ }
+ }
+
+ if (left == right && slice1 < slice2)
+ reverse_slice(slice1, slice2 + 1);
+
+ _void();
+ return;
+}
+
+BLIST_LOCAL(int)
+blist_append(PyBList *self, PyObject *v)
+{
+ PyBList *overflow;
+ PyBList *p;
+
+ invariants(self, VALID_ROOT|VALID_RW);
+
+ if (self->n == PY_SSIZE_T_MAX) {
+ PyErr_SetString(PyExc_OverflowError,
+ "cannot add more objects to list");
+ return _int(-1);
+ }
+
+ for (p = self; !p->leaf; p= (PyBList*)p->children[p->num_children-1]) {
+ if (p != self && Py_REFCNT(p) > 1)
+ goto cleanup_and_slow;
+ p->n++;
+ }
+
+ if (p->num_children == LIMIT || (p != self && Py_REFCNT(p) > 1)) {
+ PyBList *p2;
+ cleanup_and_slow:
+ for (p2 = self; p2 != p;
+ p2 = (PyBList*)p2->children[p2->num_children-1])
+ p2->n--;
+ goto slow;
+ }
+
+ p->children[p->num_children++] = v;
+ p->n++;
+ Py_INCREF(v);
+
+ if ((self->n-1) % INDEX_FACTOR == 0)
+ ext_mark(self, 0, DIRTY);
+#ifdef Py_DEBUG
+ else
+ ((PyBListRoot*)self)->last_n++;
+#endif
+ check_invariants(self);
+ return _int(0);
+
+ slow:
+ overflow = ins1(self, self->n, v);
+ if (overflow)
+ blist_overflow_root(self, overflow);
+ ext_mark(self, 0, DIRTY);
+
+ return _int(0);
+}
+
+/************************************************************************
+ * Sorting code
+ *
+ * Bits and pieces swiped from Python's listobject.c
+ *
+ * Invariant: In case of an error, any sort function returns the list
+ * to a valid state before returning. "Valid" means that each item
+ * originally in the list is still in the list. No removals, no
+ * additions, and no changes to the reference counters.
+ *
+ ************************************************************************/
+
+static void
+unwrap_leaf_array(PyBList **leafs, int leafs_n, int n,
+ sortwrapperobject *array)
+{
+ int i, j, k = 0;
+
+ for (i = 0; i < leafs_n; i++) {
+ PyBList *leaf = leafs[i];
+ if (leafs_n > 1 && !_PyObject_GC_IS_TRACKED(leafs[i]))
+ PyObject_GC_Track(leafs[i]);
+ for (j = 0; j < leaf->num_children && k < n; j++, k++) {
+ sortwrapperobject *wrapper;
+ wrapper = (sortwrapperobject *) leaf->children[j];
+ leaf->children[j] = wrapper->value;
+ DANGER_BEGIN;
+ Py_DECREF(wrapper->key);
+ DANGER_END;
+ }
+ }
+}
+
+#define KEY_ALL_DOUBLE 1
+#define KEY_ALL_LONG 2
+
+static int
+wrap_leaf_array(sortwrapperobject *restrict array,
+ PyBList **leafs, int leafs_n, int n,
+ PyObject *restrict keyfunc,
+ int *restrict pkey_flags)
+{
+ int i, j, k;
+ int key_flags;
+
+ key_flags = KEY_ALL_DOUBLE | KEY_ALL_LONG;
+
+ for (k = i = 0; i < leafs_n; i++) {
+ PyBList *restrict leaf = leafs[i];
+ if (leafs_n > 1)
+ PyObject_GC_UnTrack(leaf);
+
+ for (j = 0; j < leaf->num_children; j++) {
+ sortwrapperobject *restrict pair = &array[k];
+ PyObject *restrict key, *value = leaf->children[j];
+ PyTypeObject *type;
+ if (keyfunc == NULL) {
+ key = value;
+ Py_INCREF(key);
+ } else {
+ DANGER_BEGIN;
+ key = PyObject_CallFunctionObjArgs(
+ keyfunc, value, NULL);
+ DANGER_END;
+ if (key == NULL) {
+ unwrap_leaf_array(leafs, leafs_n, k, array);
+ return -1;
+ }
+ }
+ type = key->ob_type;
+#ifdef BLIST_FLOAT_RADIX_SORT
+ if (type == &PyFloat_Type) {
+ double d = PyFloat_AS_DOUBLE(key);
+ PY_UINT64_T di, mask;
+ memcpy(&di, &d, 8);
+ mask = (-(PY_INT64_T) (di >> 63))
+ | (1ull << 63ull);
+ pair->fkey.k_uint64 = di ^ mask;
+ key_flags &= KEY_ALL_DOUBLE;
+ } else
+#endif
+#if PY_MAJOR_VERSION < 3
+ if (type == &PyInt_Type) {
+ long i = PyInt_AS_LONG(key);
+ unsigned long u = i;
+ const unsigned long mask = 1ul << (sizeof(long)*8-1);
+ pair->fkey.k_ulong = u ^ mask;
+ key_flags &= KEY_ALL_LONG;
+ } else
+#endif
+ if (type == &PyLong_Type) {
+ unsigned long x = PyLong_AsLong(key);
+ if (x == (unsigned long) (long) -1
+ && PyErr_Occurred()) {
+ PyErr_Clear();
+ key_flags = 0;
+ } else {
+ const unsigned long mask = 1ul << (sizeof(long)*8-1);
+ pair->fkey.k_ulong = x ^ mask;
+ key_flags &= KEY_ALL_LONG;
+ }
+ } else
+ key_flags = 0;
+ pair->key = key;
+ pair->value = value;
+ leaf->children[j] = (PyObject*) pair;
+ k++;
+ }
+ }
+
+ assert(k == n);
+
+ *pkey_flags = key_flags;
+ return 0;
+}
+
+/* If COMPARE is NULL, calls PyObject_RichCompareBool with Py_LT, else calls
+ * islt. This avoids a layer of function call in the usual case, and
+ * sorting does many comparisons.
+ * Returns -1 on error, 1 if x < y, 0 if x >= y.
+ *
+ * In Python 3, COMPARE is always NULL.
+ */
+
+#define FAST_ISLT(X, Y, fast_cmp_type) \
+ (fast_lt(((sortwrapperobject *)(X))->key, \
+ ((sortwrapperobject *)(Y))->key, \
+ (fast_cmp_type)))
+
+#if PY_MAJOR_VERSION < 3
+#define ISLT(X, Y, COMPARE, fast_cmp_type) \
+ ((COMPARE) == NULL ? \
+ FAST_ISLT(X, Y, fast_cmp_type) : \
+ islt(X, Y, COMPARE))
+#else
+#define ISLT(X, Y, COMPARE, fast_cmp_type) \
+ (FAST_ISLT((X), (Y), (fast_cmp_type)))
+#endif
+
+#if PY_MAJOR_VERSION < 3
+/* XXX
+
+ Efficiency improvement:
+ Keep one PyTuple in a global spot and just change what it points to.
+ We can also skip all the INCREF/DECREF stuff then and just borrow
+ references
+*/
+static int islt(PyObject *x, PyObject *y, PyObject *compare)
+{
+ PyObject *res;
+ PyObject *args;
+ Py_ssize_t i;
+
+ Py_INCREF(x);
+ Py_INCREF(y);
+
+ DANGER_BEGIN;
+ args = PyTuple_New(2);
+ DANGER_END;
+
+ if (args == NULL) {
+ DANGER_BEGIN;
+ Py_DECREF(x);
+ Py_DECREF(y);
+ DANGER_END;
+ return -1;
+ }
+
+ PyTuple_SET_ITEM(args, 0, x);
+ PyTuple_SET_ITEM(args, 1, y);
+ DANGER_BEGIN;
+ res = PyObject_Call(compare, args, NULL);
+ Py_DECREF(args);
+ DANGER_END;
+ if (res == NULL)
+ return -1;
+ if (!PyInt_CheckExact(res)) {
+ PyErr_Format(PyExc_TypeError,
+ "comparison function must return int, not %.200s",
+ Py_TYPE(res)->tp_name);
+ Py_DECREF(res);
+ return -1;
+ }
+ i = PyInt_AsLong(res);
+ Py_DECREF(res);
+ return i < 0;
+}
+#endif
+
+#define INSERTION_THRESH 0
+#define BINARY_THRESH 10
+
+#define TESTSWAP(i, j) { \
+ if (fast_lt(sortarray[j], sortarray[i], fast_cmp_type)) { \
+ PyObject *t = sortarray[j]; \
+ sortarray[j] = sortarray[i]; \
+ sortarray[i] = t; \
+ } \
+ }
+
+#if 0
+BLIST_LOCAL(int)
+network_sort(PyObject **sortarray, Py_ssize_t n)
+{
+ fast_compare_data_t fast_cmp_type;
+ fast_cmp_type = check_fast_cmp_type(sortarray[0], Py_LT);
+
+ switch(n) {
+ case 0:
+ case 1:
+ assert(0);
+ case 2:
+ TESTSWAP(0, 1);
+ return 0;
+ case 3:
+ TESTSWAP(0, 1);
+ TESTSWAP(0, 2);
+ TESTSWAP(1, 2);
+ return 0;
+ case 4:
+ TESTSWAP(0, 1);
+ TESTSWAP(2, 3);
+ TESTSWAP(0, 2);
+ TESTSWAP(1, 3);
+ TESTSWAP(1, 2);
+ return 0;
+ case 5:
+ TESTSWAP(0, 1);
+ TESTSWAP(3, 4);
+ TESTSWAP(2, 4);
+ TESTSWAP(2, 3);
+ TESTSWAP(1, 4);
+ TESTSWAP(0, 3);
+ TESTSWAP(0, 2);
+ TESTSWAP(1, 3);
+ TESTSWAP(1, 2);
+ return 0;
+ case 6:
+ TESTSWAP(1, 2);
+ TESTSWAP(4, 5);
+
+ TESTSWAP(0, 2);
+ TESTSWAP(3, 5);
+
+ TESTSWAP(0, 1);
+ TESTSWAP(3, 4);
+ TESTSWAP(2, 5);
+
+ TESTSWAP(0, 3);
+ TESTSWAP(1, 4);
+
+ TESTSWAP(2, 4);
+ TESTSWAP(1, 3);
+
+ TESTSWAP(2, 3);
+ return 0;
+ default:
+ /* Should not be possible */
+ assert (0);
+ abort();
+ }
+}
+#endif
+
+#ifdef BLIST_FLOAT_RADIX_SORT
+BLIST_LOCAL_INLINE(int)
+insertion_sort_uint64(sortwrapperobject *array, Py_ssize_t n)
+{
+ int i, j;
+ PY_UINT64_T tmp_key;
+ PyObject *tmp_value;
+ for (i = 1; i < n; i++) {
+ tmp_key = array[i].fkey.k_uint64;
+ tmp_value = array[i].value;
+ for (j = i; j >= 1; j--) {
+ if (tmp_key >= array[j-1].fkey.k_uint64)
+ break;
+ array[j].fkey.k_uint64 = array[j-1].fkey.k_uint64;
+ array[j].value = array[j-1].value;
+ }
+ array[j].fkey.k_uint64 = tmp_key;
+ array[j].value = tmp_value;
+ }
+
+ return 0;
+}
+#endif
+
+BLIST_LOCAL_INLINE(int)
+insertion_sort_ulong(sortwrapperobject *restrict array, Py_ssize_t n)
+{
+ int i, j;
+ unsigned long tmp_key;
+ PyObject *tmp_value;
+
+ for (i = 1; i < n; i++) {
+ tmp_key = array[i].fkey.k_ulong;
+ tmp_value = array[i].value;
+ for (j = i; j >= 1; j--) {
+ if (tmp_key >= array[j-1].fkey.k_ulong)
+ break;
+ array[j].fkey.k_ulong = array[j-1].fkey.k_ulong;
+ array[j].value = array[j-1].value;
+ }
+ array[j].fkey.k_ulong = tmp_key;
+ array[j].value = tmp_value;
+ }
+
+ return 0;
+}
+
+#if 0
+static int binary_sort_int64(sortwrapperobject *array, int n)
+{
+ int i, j, low, high, mid, c;
+ sortwrapperobject tmp;
+
+ for (i = 1; i < n; i++) {
+ tmp = array[i];
+
+ c = INT64_LT(tmp, array[i-1]);
+ if (c < 0)
+ return -1;
+ if (c == 0)
+ continue;
+
+ low = 0;
+ high = i-1;
+
+ while (low < high) {
+ mid = low + (high - low)/2;
+ c = INT64_LT(tmp, array[mid]);
+ if (c < 0)
+ return -1;
+ if (c == 0)
+ low = mid+1;
+ else
+ high = mid;
+ }
+
+ for (j = i; j >= low; j--)
+ array[j] = array[j-1];
+
+ array[low] = tmp;
+ }
+
+ return 0;
+}
+#endif
+
+BLIST_LOCAL(int)
+mini_merge(PyObject **array, int middle, int n, PyObject *compare)
+{
+ int c, ret = 0;
+
+ PyObject *copy[LIMIT];
+ PyObject **left;
+ PyObject **right = &array[middle];
+ PyObject **rend = &array[n];
+ PyObject **lend = &copy[middle];
+ PyObject **src;
+ PyObject **dst;
+ fast_compare_data_t fast_cmp_type;
+
+ assert (middle <= LIMIT);
+
+ fast_cmp_type = check_fast_cmp_type(array[0], Py_LT);
+
+ for (left = array; left < right; left++) {
+ c = ISLT(*right, *left, compare, fast_cmp_type);
+ if (c < 0)
+ return -1;
+ if (c)
+ goto normal;
+ }
+
+ return 0;
+
+ normal:
+ src = left;
+ dst = left;
+
+ for (left = copy; src < right; left++)
+ *left = *src++;
+
+ lend = left;
+
+ *dst++ = *right++;
+
+ for (left = copy; left < lend && right < rend; dst++) {
+ c = ISLT(*right, *left, compare, fast_cmp_type);
+ if (c < 0) {
+ ret = -1;
+ goto done;
+ }
+ if (c == 0)
+ *dst = *left++;
+ else
+ *dst = *right++;
+ }
+
+ done:
+ while (left < lend)
+ *dst++ = *left++;
+
+ return ret;
+}
+
+#define RUN_THRESH 5
+
+BLIST_LOCAL(int)
+gallop_sort(PyObject **array, int n, PyObject *compare)
+{
+ int i;
+ int run_start = 0, run_dir = 2;
+ PyObject **runs[LIMIT/RUN_THRESH+2];
+ int ns[LIMIT/RUN_THRESH+2];
+ int num_runs = 0;
+ PyObject **run = array;
+ fast_compare_data_t fast_cmp_type;
+
+ if (n < 2) return 0;
+
+ fast_cmp_type = check_fast_cmp_type(array[0], Py_LT);
+
+ for (i = 1; i < n; i++) {
+ int c = ISLT(array[i], array[i-1], compare, fast_cmp_type);
+ assert(c < 0 || c == 0 || c == 1);
+ if (c == run_dir)
+ continue;
+ if (c < 0)
+ return -1;
+ if (run_start == i-1)
+ run_dir = c;
+ else if (i - run_start >= RUN_THRESH) {
+ if (run_dir > 0)
+ reverse_slice(run, &array[i]);
+ runs[num_runs] = run;
+ ns[num_runs++] = i - run_start;
+ run = &array[i];
+ run_start = i;
+ run_dir = 2;
+ } else {
+ int j;
+ int low = run - array;
+ int high = i-1;
+ int mid;
+ PyObject *tmp = array[i];
+
+ /* XXX: Is this a stable sort? */
+ /* XXX: In both directions? */
+
+ while (low < high) {
+ mid = low + (high - low)/2;
+ c = ISLT(tmp, array[mid], compare,
+ fast_cmp_type);
+ assert(c < 0 || c == 0 || c == 1);
+ if (c == run_dir)
+ low = mid+1;
+ else if (c < 0)
+ return -1;
+ else
+ high = mid;
+ }
+
+ for (j = i; j > low; j--)
+ array[j] = array[j-1];
+
+ array[low] = tmp;
+ }
+ }
+
+ if (run_dir > 0)
+ reverse_slice(run, &array[n]);
+ runs[num_runs] = run;
+ ns[num_runs++] = n - run_start;
+
+ while(num_runs > 1) {
+ for (i = 0; i < num_runs/2; i++) {
+ int total = ns[2*i] + ns[2*i+1];
+ if (0 > mini_merge(runs[2*i], ns[2*i], total,
+ compare)) {
+ /* List valid due to invariants */
+ return -1;
+ }
+
+ runs[i] = runs[2*i];
+ ns[i] = total;
+ }
+
+ if (num_runs & 1) {
+ runs[i] = runs[num_runs - 1];
+ ns[i] = ns[num_runs - 1];
+ }
+ num_runs = (num_runs+1)/2;
+ }
+
+ assert(ns[0] == n);
+
+ return 0;
+
+}
+
+#if 0
+BLIST_LOCAL(int)
+mini_merge_sort(PyObject **array, int n, PyObject *compare)
+{
+ int i, run_size = BINARY_THRESH;
+
+ for (i = 0; i < n; i += run_size) {
+ int len = run_size;
+ if (n - i < len)
+ len = n - i;
+ if (binary_sort(&array[i], len, compare) < 0)
+ return -1;
+ }
+
+ run_size *= 2;
+ while (run_size < n) {
+ for (i = 0; i < n; i += run_size) {
+ int len = run_size;
+ if (n - i < len)
+ len = n - i;
+ if (len <= run_size/2)
+ continue;
+ if (mini_merge(&array[i], run_size/2, len, compare) < 0)
+ return -1;
+ }
+ run_size *= 2;
+ }
+
+ return 0;
+}
+#endif
+
+#if PY_MAJOR_VERSION < 3
+BLIST_LOCAL(int)
+is_default_cmp(PyObject *cmpfunc)
+{
+ PyCFunctionObject *f;
+ if (cmpfunc == NULL || cmpfunc == Py_None)
+ return 1;
+ if (!PyCFunction_Check(cmpfunc))
+ return 0;
+ f = (PyCFunctionObject *)cmpfunc;
+ if (f->m_self != NULL)
+ return 0;
+ if (!PyString_Check(f->m_module))
+ return 0;
+ if (strcmp(PyString_AS_STRING(f->m_module), "__builtin__") != 0)
+ return 0;
+ if (strcmp(f->m_ml->ml_name, "cmp") != 0)
+ return 0;
+ return 1;
+}
+#endif
+
+BLIST_LOCAL(void)
+do_fast_merge(PyBList **restrict out, PyBList **in1, PyBList **in2,
+ int n1, int n2)
+{
+ memcpy(out, in1, sizeof(PyBList *) * n1);
+ memcpy(&out[n1], in2, sizeof(PyBList *) * n2);
+}
+
+BLIST_LOCAL(int)
+try_fast_merge(PyBList **restrict out, PyBList **in1, PyBList **in2,
+ Py_ssize_t n1, Py_ssize_t n2,
+ PyObject *compare, int *err)
+{
+ int c;
+ PyBList *end;
+
+ end = in1[n1-1];
+
+ c = ISLT(end->children[end->num_children-1],
+ in2[0]->children[0], compare, no_fast_lt);
+
+ if (c < 0) {
+ error:
+ *err = -1;
+ do_fast_merge(out, in1, in2, n1, n2);
+ return 1;
+ } else if (c) {
+ do_fast_merge(out, in1, in2, n1, n2);
+ return 1;
+ }
+
+ end = in2[n2-1];
+
+ c = ISLT(end->children[end->num_children-1],
+ in1[0]->children[0], compare, no_fast_lt);
+ if (c < 0)
+ goto error;
+ else if (c) {
+ do_fast_merge(out, in2, in1, n2, n1);
+ return 1;
+ }
+
+ return 0;
+}
+
+/* Merge two arrays of leaf nodes. */
+BLIST_LOCAL(int)
+sub_merge(PyBList **restrict out, PyBList **in1, PyBList **in2,
+ Py_ssize_t n1, Py_ssize_t n2,
+ PyObject *compare, int *err)
+{
+ int c;
+ Py_ssize_t i, j;
+ PyBList *restrict leaf1, *restrict leaf2, *restrict output;
+ int leaf1_i = 0, leaf2_i = 0;
+ Py_ssize_t nout = 0;
+ fast_compare_data_t fast_cmp_type;
+
+ if (try_fast_merge(out, in1, in2, n1, n2, compare, err))
+ return n1 + n2;
+
+ leaf1 = in1[leaf1_i++];
+ leaf2 = in2[leaf2_i++];
+
+ i = 0; /* Index into leaf 1 */
+ j = 0; /* Index into leaf 2 */
+
+ output = blist_new_no_GC();
+
+ fast_cmp_type = check_fast_cmp_type(leaf1->children[0], Py_LT);
+
+ while ((leaf1_i < n1 || i < leaf1->num_children)
+ && (leaf2_i < n2 || j < leaf2->num_children)) {
+
+ /* Check if we need to get a new input leaf node */
+ if (i == leaf1->num_children) {
+ leaf1->num_children = 0;
+ SAFE_DECREF(leaf1);
+ leaf1 = in1[leaf1_i++];
+ i = 0;
+ }
+
+ if (j == leaf2->num_children) {
+ leaf2->num_children = 0;
+ SAFE_DECREF(leaf2);
+ leaf2 = in2[leaf2_i++];
+ j = 0;
+ }
+
+ assert (i < leaf1->num_children);
+ assert (j < leaf2->num_children);
+
+ /* Check if we have filled up an output leaf node */
+ if (output->n == LIMIT) {
+ out[nout++] = output;
+ output = blist_new_no_GC();
+ }
+
+ /* Figure out which input leaf has the lower element */
+ c = ISLT(leaf2->children[j], leaf1->children[i], compare,
+ fast_cmp_type);
+ if (c < 0) {
+ *err = -1;
+ goto done;
+ }
+ if (c == 0) {
+ output->children[output->num_children++]
+ = leaf1->children[i++];
+ } else {
+ output->children[output->num_children++]
+ = leaf2->children[j++];
+ }
+
+ output->n++;
+ }
+
+ done:
+ /* Append our partially-complete output leaf node */
+ nout = append_and_squish(out, nout, output);
+
+ /* Append a partially-consumed input leaf node, if one exists */
+ if (i < leaf1->num_children) {
+ shift_left(leaf1, i, i);
+ leaf1->num_children -= i;
+ leaf1->n -= i;
+ nout = append_and_squish(out, nout, leaf1);
+ } else {
+ leaf1->num_children = 0;
+ SAFE_DECREF(leaf1);
+ }
+
+ if (j < leaf2->num_children) {
+ shift_left(leaf2, j, j);
+ leaf2->num_children -= j;
+ leaf2->n -= j;
+ nout = append_and_squish(out, nout, leaf2);
+ } else {
+ leaf2->num_children = 0;
+ SAFE_DECREF(leaf2);
+ }
+
+ nout = balance_last_2(out, nout);
+
+ /* Append the rest of any input that still has nodes. */
+ if (leaf1_i < n1) {
+ memcpy(&out[nout], &in1[leaf1_i],
+ sizeof(PyBList *) * (n1 - leaf1_i));
+ nout += n1 - leaf1_i;
+ }
+
+ if (leaf2_i < n2) {
+ memcpy(&out[nout], &in2[leaf2_i],
+ sizeof(PyBList *) * (n2 - leaf2_i));
+ nout += n2 - leaf2_i;
+ }
+
+ for (i = nout; i < n1+n2; i++)
+ out[i] = NULL;
+
+ assert(nout <= n1+n2);
+
+ return nout;
+}
+
+/* If swap is true, place the output in scratch.
+ * Otherwise, place the output in "in" */
+BLIST_LOCAL(Py_ssize_t)
+sub_sort(PyBList **restrict scratch, PyBList **in, PyObject *compare,
+ Py_ssize_t n, int *err, int swap)
+{
+ Py_ssize_t half, n1, n2;
+
+ if (!n) return n;
+ if (*err) {
+ if (swap)
+ memcpy(scratch, in, n * sizeof(PyBList *));
+ return n;
+ }
+ if (n == 1) {
+ *err |= gallop_sort(in[0]->children, in[0]->num_children,
+ compare);
+ *scratch = *in;
+ return 1;
+ }
+
+ half = n / 2;
+
+ n1 = sub_sort(scratch, in, compare, half, err, !swap);
+ n2 = sub_sort(&scratch[half], &in[half], compare, n-half, err, !swap);
+
+ /* If swap is true, the output is currently in "in".
+ * Otherwise, the output is currently in scratch.
+ *
+ * sub_merge() will reverse it.
+ */
+
+ if (!*err) {
+ if (swap)
+ n = sub_merge(scratch, in, &in[half], n1, n2, compare, err);
+ else
+ n = sub_merge(in, scratch, &scratch[half], n1, n2, compare, err);
+ } else {
+ if (swap) {
+ memcpy(scratch, in, n1 * sizeof(PyBList *));
+ memcpy(&scratch[n1], &in[half], n2 * sizeof(PyBList *));
+ } else {
+ memcpy(in, scratch, n1 * sizeof(PyBList *));
+ memcpy(&in[n1], &scratch[half], n2 * sizeof(PyBList *));
+ }
+ n = n1 + n2;
+ }
+ return n;
+}
+
+#if 0
+BLIST_LOCAL_INLINE(void)
+array_disable_GC(PyBList **leafs, Py_ssize_t num_leafs)
+{
+ Py_ssize_t i;
+ for (i = 0; i < num_leafs; i++)
+ PyObject_GC_UnTrack(leafs[i]);
+}
+#endif
+
+BLIST_LOCAL_INLINE(void)
+array_enable_GC(PyBList **leafs, Py_ssize_t num_leafs)
+{
+ Py_ssize_t i;
+ for (i = 0; i < num_leafs; i++)
+ PyObject_GC_Track(leafs[i]);
+}
+
+#define BITS_PER_PASS 8
+#define HISTOGRAM_SIZE (((Py_ssize_t) 1) << BITS_PER_PASS)
+#define MASK (HISTOGRAM_SIZE - 1)
+#define NUM_PASSES (((sizeof(unsigned long)*8-1) / BITS_PER_PASS)+1)
+
+BLIST_LOCAL_INLINE(int)
+sort_ulong(sortwrapperobject *restrict sortarray, Py_ssize_t n)
+{
+ sortwrapperobject *restrict scratch, *from, *to, *tmp;
+ Py_ssize_t histograms[HISTOGRAM_SIZE][NUM_PASSES];
+ Py_ssize_t i, j, sums[NUM_PASSES], count[NUM_PASSES], tsum;
+
+ memset(sums, 0, sizeof sums);
+ memset(count, 0, sizeof count);
+
+ scratch = PyMem_New(sortwrapperobject, n);
+ if (scratch == NULL)
+ return -1;
+
+ memset(histograms, 0, sizeof histograms);
+ for (i = 0; i < n; i++) {
+ unsigned long v = sortarray[i].fkey.k_ulong;
+ for (j = 0; j < NUM_PASSES; j++) {
+ histograms[(v >> (BITS_PER_PASS * j)) & MASK][j]++;
+ }
+ }
+
+ for (i = 0; i < HISTOGRAM_SIZE; i++) {
+ for (j = 0; j < NUM_PASSES; j++) {
+ count[j] += !!histograms[i][j];
+ tsum = histograms[i][j] + sums[j];
+ histograms[i][j] = sums[j] - 1;
+ sums[j] = tsum;
+ }
+ }
+
+ from = sortarray;
+ to = scratch;
+ for (j = 0; j < NUM_PASSES; j++) {
+ sortwrapperobject *restrict f = from;
+ sortwrapperobject *restrict t = to;
+ if (count[j] == 1) continue;
+ for (i = 0; i < n; i++) {
+ unsigned long fi = f[i].fkey.k_ulong;
+ Py_ssize_t pos = (fi >> (BITS_PER_PASS * j)) & MASK;
+ pos = ++histograms[pos][j];
+ t[pos].fkey.k_ulong = fi;
+ t[pos].value = f[i].value;
+ }
+
+ tmp = from;
+ from = to;
+ to = tmp;
+ }
+
+ if (from != sortarray)
+ for (i = 0; i < n; i++)
+ sortarray[i].value = scratch[i].value;
+
+ PyMem_Free(scratch);
+ return 0;
+}
+
+#undef NUM_PASSES
+
+#ifdef BLIST_FLOAT_RADIX_SORT
+#if SIZEOF_LONG == 8
+#define sort_uint64 sort_ulong
+#else
+#define NUM_PASSES (((64-1) / BITS_PER_PASS)+1)
+
+BLIST_LOCAL_INLINE(int)
+sort_uint64(sortwrapperobject *restrict sortarray, Py_ssize_t n)
+{
+ sortwrapperobject *restrict scratch, *from, *to, *tmp;
+ Py_ssize_t histograms[HISTOGRAM_SIZE][NUM_PASSES];
+ Py_ssize_t i, j, sums[NUM_PASSES], count[NUM_PASSES], tsum;
+
+ memset(sums, 0, sizeof sums);
+ memset(count, 0, sizeof count);
+
+ scratch = PyMem_New(sortwrapperobject, n);
+ if (scratch == NULL)
+ return -1;
+
+ memset(histograms, 0, sizeof histograms);
+ for (i = 0; i < n; i++) {
+ PY_UINT64_T v = sortarray[i].fkey.k_uint64;
+ for (j = 0; j < NUM_PASSES; j++) {
+ histograms[(v >> (BITS_PER_PASS * j)) & MASK][j]++;
+ }
+ }
+
+ for (i = 0; i < HISTOGRAM_SIZE; i++) {
+ for (j = 0; j < NUM_PASSES; j++) {
+ count[j] += !!histograms[i][j];
+ tsum = histograms[i][j] + sums[j];
+ histograms[i][j] = sums[j] - 1;
+ sums[j] = tsum;
+ }
+ }
+
+ from = sortarray;
+ to = scratch;
+ for (j = 0; j < NUM_PASSES; j++) {
+ if (count[j] == 1) continue;
+ for (i = 0; i < n; i++) {
+ PY_UINT64_T fi = from[i].fkey.k_uint64;
+ Py_ssize_t pos = (fi >> (BITS_PER_PASS * j)) & MASK;
+ pos = ++histograms[pos][j];
+ to[pos].fkey.k_uint64 = fi;
+ to[pos].value = from[i].value;
+ }
+
+ tmp = from;
+ from = to;
+ to = tmp;
+ }
+
+ if (from != sortarray)
+ for (i = 0; i < n; i++)
+ sortarray[i].value = scratch[i].value;
+
+ PyMem_Free(scratch);
+ return 0;
+}
+
+#undef NUM_PASSES
+
+#endif
+#endif
+
+BLIST_LOCAL(Py_ssize_t)
+sort(PyBListRoot *restrict self, PyObject *compare, PyObject *keyfunc)
+{
+ PyBList *leaf;
+ PyBList **leafs;
+ int err=0;
+ Py_ssize_t i, leafs_n = 0;
+ sortwrapperobject sortarraystack[10];
+ sortwrapperobject *sortarray = sortarraystack;
+ int key_flags;
+
+ if (self->leaf)
+ leafs = &leaf;
+ else {
+ leafs = PyMem_New(PyBList *, self->n / HALF + 1);
+ if (!leafs)
+ return -1;
+ }
+
+ if (self->leaf) {
+ leaf = (PyBList *) self;
+ leafs_n = 1;
+ } else {
+ linearize_rw(self);
+
+ assert(INDEX_LENGTH(self) <= self->index_allocated);
+ for (i = 0; i < INDEX_LENGTH(self)-1; i++) {
+ leaf = self->index_list[i];
+ if (leaf == self->index_list[i+1])
+ continue;
+ leafs[leafs_n++] = leaf;
+ Py_INCREF(leaf);
+ }
+ leaf = self->index_list[i];
+ leafs[leafs_n++] = leaf;
+ Py_INCREF(leaf);
+ }
+
+ if (self->n > 10) {
+ sortarray = PyMem_New(sortwrapperobject, self->n);
+ if (sortarray == NULL) {
+ sortarray = sortarraystack;
+ goto error;
+ }
+ }
+
+ err = wrap_leaf_array(sortarray, leafs, leafs_n, self->n, keyfunc,
+ &key_flags);
+ if (err < 0) {
+ error:
+ if (!self->leaf) {
+ for (i = 0; i < leafs_n; i++)
+ SAFE_DECREF(leafs[i]);
+ PyMem_Free(leafs);
+ }
+ if (sortarray != sortarraystack)
+ PyMem_Free(sortarray);
+ return -1;
+ }
+
+ if (key_flags && compare == NULL) {
+#ifdef BLIST_FLOAT_RADIX_SORT
+ if (key_flags & KEY_ALL_DOUBLE) {
+ if (self->n < 40 && self->leaf)
+ err = insertion_sort_uint64(sortarray,self->n);
+ else
+ err = sort_uint64(sortarray, self->n);
+ } else
+#endif
+ if (key_flags & KEY_ALL_LONG) {
+ if (self->n < 40 && self->leaf)
+ err = insertion_sort_ulong(sortarray, self->n);
+ else
+ err = sort_ulong(sortarray, self->n);
+ }
+ else
+ assert(0); /* Should not be possible */
+ unwrap_leaf_array(leafs, leafs_n, self->n, sortarray);
+ if (!self->leaf) {
+ for (i = 0; i < leafs_n; i++)
+ SAFE_DECREF(leafs[i]);
+ PyMem_Free(leafs);
+ }
+ } else if (self->leaf) {
+ err = gallop_sort(self->children, self->num_children, compare);
+ unwrap_leaf_array(leafs, 1, self->n, sortarray);
+ } else {
+ PyBList **scratch = PyMem_New(PyBList *, self->n / HALF + 1);
+ if (!scratch) {
+ PyMem_Free(leafs);
+ PyMem_Free(sortarray);
+ return -1;
+ }
+ leafs_n = sub_sort(scratch, leafs, compare, leafs_n, &err, 0);
+ array_enable_GC(leafs, leafs_n);
+ PyMem_Free(scratch);
+ unwrap_leaf_array(leafs, leafs_n, self->n, sortarray);
+ i = blist_init_from_child_array(leafs, leafs_n);
+
+ if (i < 0) {
+ /* XXX leaking memory here when out of memory */
+ PyMem_Free(sortarray);
+ return -1;
+ } else {
+ assert(i == 1);
+ blist_become_and_consume((PyBList *) self, leafs[0]);
+ }
+ SAFE_DECREF(leafs[0]);
+ PyMem_Free(leafs);
+ }
+
+ if (sortarray != sortarraystack)
+ PyMem_Free(sortarray);
+ return err;
+}
+
+/************************************************************************
+ * Section for functions callable directly by the interpreter.
+ *
+ * Each of these functions are marked with VALID_USER for debug mode.
+ *
+ * If they, or any function they call, makes calls to decref_later,
+ * they must call decref_flush() just before returning.
+ *
+ * These functions must not be called directly by other blist
+ * functions. They should *only* be called by the interpreter, to
+ * ensure that decref_flush() is the last thing called before
+ * returning to the interpreter.
+ */
+
+BLIST_PYAPI(PyObject *)
+py_blist_root_tp_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds)
+{
+ PyBList *self;
+
+ if (subtype == &PyRootBList_Type)
+ return (PyObject *) blist_root_new();
+
+ self = (PyBList *) subtype->tp_alloc(subtype, 0);
+ if (self == NULL)
+ return NULL;
+ self->children = PyMem_New(PyObject *, LIMIT);
+ if (self->children == NULL) {
+ subtype->tp_free(self);
+ return NULL;
+ }
+
+ self->leaf = 1;
+ ext_init((PyBListRoot *)self);
+
+ return (PyObject *) self;
+}
+
+/* Should only be used by the unpickler */
+BLIST_PYAPI(PyObject *)
+py_blist_internal_tp_new(PyTypeObject *subtype, PyObject *args, PyObject *kwds)
+{
+ assert (subtype == &PyBList_Type);
+ return (PyObject *) blist_new();
+}
+
+/* Should only be used by the unpickler */
+BLIST_PYAPI(int)
+py_blist_internal_init(PyObject *oself, PyObject *args, PyObject *kw)
+{
+ return 0;
+}
+
+BLIST_PYAPI(int)
+py_blist_init(PyObject *oself, PyObject *args, PyObject *kw)
+{
+ int ret;
+ PyObject *arg = NULL;
+ static char *kwlist[] = {"sequence", 0};
+ int err;
+ PyBList *self;
+
+ invariants(oself, VALID_USER|VALID_DECREF);
+ self = (PyBList *) oself;
+
+ DANGER_BEGIN;
+ err = PyArg_ParseTupleAndKeywords(args, kw, "|O:list", kwlist, &arg);
+ DANGER_END;
+ if (!err)
+ return _int(-1);
+
+ if (self->n) {
+ blist_CLEAR(self);
+ ext_dealloc((PyBListRoot *) self);
+ }
+
+ if (arg == NULL)
+ return _int(0);
+
+ ret = blist_init_from_seq(self, arg);
+
+ decref_flush(); /* Needed due to blist_CLEAR() call */
+ return _int(ret);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_richcompare(PyObject *v, PyObject *w, int op)
+{
+ PyObject *rv;
+
+ if (!PyRootBList_Check(v)) {
+ not_implemented:
+ Py_INCREF(Py_NotImplemented);
+ return Py_NotImplemented;
+ }
+
+ invariants((PyBList *) v, VALID_USER|VALID_DECREF);
+ if (PyRootBList_Check(w)) {
+ rv = blist_richcompare_blist((PyBList *)v, (PyBList *)w, op);
+ decref_flush();
+ return _ob(rv);
+ }
+#ifndef Py_BUILD_CORE
+ if (PyList_Check(w)) {
+ rv = blist_richcompare_list((PyBList*)v, (PyListObject*)w, op);
+ decref_flush();
+ return _ob(rv);
+ }
+#endif
+ _void();
+ goto not_implemented;
+}
+
+BLIST_PYAPI(int)
+py_blist_traverse(PyObject *oself, visitproc visit, void *arg)
+{
+ PyBList *self;
+ int i;
+
+ assert(PyBList_Check(oself));
+ self = (PyBList *) oself;
+
+ for (i = 0; i < self->num_children; i++) {
+ if (self->children[i] != NULL)
+ Py_VISIT(self->children[i]);
+ }
+ return 0;
+}
+
+BLIST_PYAPI(int)
+py_blist_clear(PyObject *oself)
+{
+ PyBList *self;
+
+ invariants(oself, VALID_USER|VALID_RW|VALID_DECREF);
+ self = (PyBList *) oself;
+
+ blist_forget_children(self);
+ self->n = 0;
+ self->leaf = 1;
+ ext_dealloc((PyBListRoot *) self);
+
+ decref_flush();
+ return _int(0);
+}
+
+#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3
+#define py_blist_nohash PyObject_HashNotImplemented
+#else
+BLIST_PYAPI(long)
+py_blist_nohash(PyObject *self)
+{
+ PyErr_SetString(PyExc_TypeError, "list objects are unhashable");
+ return -1;
+}
+#endif
+
+BLIST_PYAPI(void)
+py_blist_dealloc(PyObject *oself)
+{
+ int i;
+ PyBList *self;
+
+ assert(PyBList_Check(oself));
+ self = (PyBList *) oself;
+
+ if (_PyObject_GC_IS_TRACKED(self))
+ PyObject_GC_UnTrack(self);
+
+ Py_TRASHCAN_SAFE_BEGIN(self)
+
+ /* Py_XDECREF() is needed here because the Python C API allows list
+ * items to be NULL. */
+ for (i = 0; i < self->num_children; i++)
+ Py_XDECREF(self->children[i]);
+
+ if (PyRootBList_Check(self)) {
+ ext_dealloc((PyBListRoot *) self);
+ if (PyRootBList_CheckExact(self)
+ && num_free_ulists < MAXFREELISTS)
+ free_ulists[num_free_ulists++] = self;
+ else
+ goto free_blist;
+ } else if (Py_TYPE(self) == &PyBList_Type
+ && num_free_lists < MAXFREELISTS)
+ free_lists[num_free_lists++] = self;
+ else {
+ free_blist:
+ PyMem_Free(self->children);
+ Py_TYPE(self)->tp_free((PyObject *)self);
+ }
+
+ Py_TRASHCAN_SAFE_END(self);
+}
+
+BLIST_PYAPI(int)
+py_blist_ass_item(PyObject *oself, Py_ssize_t i, PyObject *v)
+{
+ PyObject *old_value;
+ PyBList *self;
+
+ invariants(oself, VALID_USER|VALID_RW|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ if (i >= self->n || i < 0) {
+ set_index_error();
+ return _int(-1);
+ }
+
+ if (v == NULL) {
+ blist_delitem(self, i);
+ ext_mark(self, 0, DIRTY);
+ decref_flush();
+ return _int(0);
+ }
+
+ old_value = blist_ass_item_return(self, i, v);
+ Py_XDECREF(old_value);
+ return _int(0);
+}
+
+BLIST_PYAPI(int)
+py_blist_ass_slice(PyObject *oself, Py_ssize_t ilow, Py_ssize_t ihigh,
+ PyObject *v)
+{
+ Py_ssize_t net;
+ PyBList *other, *left, *right, *self;
+
+ invariants(oself, VALID_RW|VALID_USER|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ if (ilow < 0) ilow = 0;
+ else if (ilow > self->n) ilow = self->n;
+ if (ihigh < ilow) ihigh = ilow;
+ else if (ihigh > self->n) ihigh = self->n;
+
+ if (!v) {
+ blist_delslice(self, ilow, ihigh);
+ ext_mark(self, 0, DIRTY);
+ decref_flush();
+ return _int(0);
+ }
+
+ if (PyRootBList_Check(v) && (PyObject *) self != v) {
+ other = (PyBList *) v;
+ Py_INCREF(other);
+ ext_mark_set_dirty_all(other);
+ } else {
+ other = blist_root_new();
+ if (v) {
+ int err = blist_init_from_seq(other, v);
+ if (err < 0) {
+ decref_later((PyObject *) other);
+ decref_flush();
+ return _int(-1);
+ }
+ }
+ }
+
+ net = other->n - (ihigh - ilow);
+
+ /* Special case small lists */
+ if (self->leaf && other->leaf && (self->n + net <= LIMIT))
+ {
+ Py_ssize_t i;
+
+ for (i = ilow; i < ihigh; i++)
+ decref_later(self->children[i]);
+
+ if (net >= 0)
+ shift_right(self, ihigh, net);
+ else
+ shift_left(self, ihigh, -net);
+ self->num_children += net;
+ copyref(self, ilow, other, 0, other->n);
+ SAFE_DECREF(other);
+ blist_adjust_n(self);
+ decref_flush();
+ return _int(0);
+ }
+
+ left = self;
+ right = blist_root_copy(self);
+ blist_delslice(left, ilow, left->n);
+ blist_delslice(right, 0, ihigh);
+ blist_extend_blist(left, other); /* XXX check return values */
+ blist_extend_blist(left, right);
+
+ ext_mark(self, 0, DIRTY);
+
+ SAFE_DECREF(other);
+ SAFE_DECREF(right);
+
+ decref_flush();
+
+ return _int(0);
+}
+
+BLIST_PYAPI(int)
+py_blist_ass_subscript(PyObject *oself, PyObject *item, PyObject *value)
+{
+ PyBList *self;
+
+ invariants(oself, VALID_USER|VALID_RW|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ if (PyIndex_Check(item)) {
+ Py_ssize_t i;
+ PyObject *old_value;
+
+ if (PyLong_CheckExact(item)) {
+ i = PyInt_AsSsize_t(item);
+ if (i == -1 && PyErr_Occurred()) {
+ PyErr_Clear();
+ goto number;
+ }
+ } else {
+ number:
+ i = PyNumber_AsSsize_t(item, PyExc_IndexError);
+ if (i == -1 && PyErr_Occurred())
+ return _int(-1);
+ }
+ if (i < 0)
+ i += self->n;
+
+ if (i >= self->n || i < 0) {
+ set_index_error();
+ return _int(-1);
+ }
+
+ if (self->leaf) {
+ /* Speed up common cases */
+
+ old_value = self->children[i];
+ if (value == NULL) {
+ shift_left(self, i+1, 1);
+ self->num_children--;
+ self->n--;
+ } else {
+ self->children[i] = value;
+ Py_INCREF(value);
+ }
+ DANGER_BEGIN;
+ Py_DECREF(old_value);
+ DANGER_END;
+ return _int(0);
+ }
+
+ if (value == NULL) {
+ blist_delitem(self, i);
+ ext_mark(self, 0, DIRTY);
+ decref_flush();
+ return _int(0);
+ }
+
+ Py_INCREF(value);
+ old_value = blist_ass_item_return2((PyBListRoot*)self,i,value);
+ DANGER_BEGIN;
+ Py_DECREF(old_value);
+ DANGER_END;
+ return _int(0);
+ } else if (PySlice_Check(item)) {
+ Py_ssize_t start, stop, step, slicelength;
+
+ ext_mark(self, 0, DIRTY);
+
+ if (PySlice_GetIndicesEx((PySliceObject*)item, self->n,
+ &start, &stop,&step,&slicelength)<0)
+ return _int(-1);
+
+ /* treat L[slice(a,b)] = v _exactly_ like L[a:b] = v */
+ if (step == 1 && ((PySliceObject*)item)->step == Py_None)
+ return _redir(py_blist_ass_slice(oself,start,stop,value));
+
+ if (value == NULL) {
+ /* Delete back-to-front */
+ Py_ssize_t i, cur;
+
+ if (slicelength <= 0)
+ return _int(0);
+
+ if (step > 0) {
+ stop = start - 1;
+ start = start + step*(slicelength-1);
+ step = -step;
+ }
+
+ for (cur = start, i = 0; i < slicelength;
+ cur += step, i++) {
+ PyObject *ob = blist_delitem_return(self, cur);
+ decref_later(ob);
+ }
+
+ decref_flush();
+ ext_mark(self, 0, DIRTY);
+
+ return _int(0);
+ } else { /* assign slice */
+ PyObject *ins, *seq;
+ Py_ssize_t cur, i;
+
+ DANGER_BEGIN;
+ seq = PySequence_Fast(value,
+ "Must assign iterable to extended slice");
+ DANGER_END;
+ if (!seq)
+ return _int(-1);
+
+ if (seq == (PyObject *) self) {
+ Py_DECREF(seq);
+ seq = (PyObject *) blist_root_copy(self);
+ }
+
+ if (PySequence_Fast_GET_SIZE(seq) != slicelength) {
+ PyErr_Format(PyExc_ValueError,
+ "attempt to assign sequence of size %zd to extended slice of size %zd",
+ PySequence_Fast_GET_SIZE(seq),
+ slicelength);
+ Py_DECREF(seq);
+ return _int(-1);
+ }
+
+ if (!slicelength) {
+ Py_DECREF(seq);
+ return _int(0);
+ }
+
+ for (cur = start, i = 0; i < slicelength;
+ cur += step, i++) {
+ PyObject *ob;
+ ins = PySequence_Fast_GET_ITEM(seq, i);
+ ob = blist_ass_item_return(self, cur, ins);
+ decref_later(ob);
+ }
+
+ Py_DECREF(seq);
+
+ decref_flush();
+
+ return _int(0);
+ }
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "list indices must be integers");
+ return _int(-1);
+ }
+}
+
+BLIST_PYAPI(Py_ssize_t)
+py_blist_length(PyObject *ob)
+{
+ assert(PyRootBList_Check(ob));
+ return ((PyBList *) ob)->n;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_repeat(PyObject *oself, Py_ssize_t n)
+{
+ PyObject *ret;
+ PyBList *self;
+
+ invariants(oself, VALID_USER|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ ret = blist_repeat(self, n);
+ decref_flush();
+ ext_mark_set_dirty_all(self);
+
+ return _ob(ret);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_inplace_repeat(PyObject *oself, Py_ssize_t n)
+{
+ PyBList *tmp, *self;
+
+ invariants(oself, VALID_USER|VALID_RW|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ tmp = (PyBList *) blist_repeat(self, n);
+ if (tmp == NULL)
+ return (PyObject *) _blist(NULL);
+ blist_become_and_consume(self, tmp);
+ Py_INCREF(self);
+ SAFE_DECREF(tmp);
+
+ decref_flush();
+
+ ext_mark(self, 0, DIRTY);
+
+ return (PyObject *) _blist(self);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_extend(PyBList *self, PyObject *other)
+{
+ int err;
+
+ invariants(self, VALID_USER|VALID_RW|VALID_DECREF);
+
+ err = blist_extend(self, other);
+ decref_flush();
+ ext_mark(self, 0, DIRTY);
+ if (PyBList_Check(other))
+ ext_mark_set_dirty_all((PyBList *) other);
+
+ if (err < 0)
+ return _ob(NULL);
+ Py_RETURN_NONE;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_inplace_concat(PyObject *oself, PyObject *other)
+{
+ int err;
+ PyBList *self;
+
+ invariants(oself, VALID_RW|VALID_USER|VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ err = blist_extend(self, other);
+ decref_flush();
+ ext_mark(self, 0, DIRTY);
+ if (PyBList_Check(other))
+ ext_mark_set_dirty_all((PyBList*) other);
+
+ if (err < 0)
+ return _ob(NULL);
+
+ Py_INCREF(self);
+ return _ob((PyObject *)self);
+}
+
+BLIST_PYAPI(int)
+py_blist_contains(PyObject *oself, PyObject *el)
+{
+ int c, ret = 0;
+ PyObject *item;
+ PyBList *self;
+ fast_compare_data_t fast_cmp_type;
+
+ invariants(oself, VALID_USER | VALID_DECREF);
+
+ self = (PyBList *) oself;
+ fast_cmp_type = check_fast_cmp_type(el, Py_EQ);
+
+ ITER(self, item, {
+ c = fast_eq(el, item, fast_cmp_type);
+ if (c < 0) {
+ ret = -1;
+ break;
+ }
+ if (c > 0) {
+ ret = 1;
+ break;
+ }
+ });
+
+ decref_flush();
+ return _int(ret);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_get_slice(PyObject *oself, Py_ssize_t ilow, Py_ssize_t ihigh)
+{
+ PyBList *rv, *self;
+
+ invariants(oself, VALID_USER | VALID_DECREF);
+
+ self = (PyBList *) oself;
+
+ if (ilow < 0) ilow = 0;
+ else if (ilow > self->n) ilow = self->n;
+ if (ihigh < ilow) ihigh = ilow;
+ else if (ihigh > self->n) ihigh = self->n;
+
+ rv = blist_root_new();
+ if (rv == NULL)
+ return (PyObject *) _blist(NULL);
+
+ if (ihigh <= ilow || ilow >= self->n)
+ return (PyObject *) _blist(rv);
+
+ if (self->leaf) {
+ Py_ssize_t delta = ihigh - ilow;
+
+ copyref(rv, 0, self, ilow, delta);
+ rv->num_children = delta;
+ rv->n = delta;
+ return (PyObject *) _blist(rv);
+ }
+
+ blist_become(rv, self);
+ blist_delslice(rv, ihigh, self->n);
+ blist_delslice(rv, 0, ilow);
+
+ ext_mark(rv, 0, DIRTY);
+ ext_mark_set_dirty(self, ilow, ihigh);
+ decref_flush();
+
+ return (PyObject *) _blist(rv);
+}
+
+/* This should only be called by _PyBList_GET_ITEM_FAST2() */
+PyObject *_PyBList_GetItemFast3(PyBListRoot *root, Py_ssize_t i)
+{
+ PyObject *rv;
+ Py_ssize_t dirty_offset = -1;
+
+ invariants(root, VALID_PARENT);
+ assert(!root->leaf);
+ assert(root->dirty_root != CLEAN);
+ assert(i >= 0);
+ assert(i < root->n);
+
+ if (ext_is_dirty(root, i, &dirty_offset)){
+ rv = ext_make_clean(root, i);
+ } else {
+ Py_ssize_t ioffset = i / INDEX_FACTOR;
+ Py_ssize_t offset = root->offset_list[ioffset];
+ PyBList *p = root->index_list[ioffset];
+ assert(i >= offset);
+ assert(p);
+ assert(p->leaf);
+ if (i < offset + p->n) {
+ rv = p->children[i - offset];
+ if (dirty_offset >= 0)
+ ext_make_clean(root, dirty_offset);
+ } else if (ext_is_dirty(root,i + INDEX_FACTOR,&dirty_offset)){
+ rv = ext_make_clean(root, i);
+ } else {
+ ioffset++;
+ assert(ioffset < root->index_allocated);
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+ rv = p->children[i - offset];
+ assert(p);
+ assert(p->leaf);
+ assert(i < offset + p->n);
+ if (dirty_offset >= 0)
+ ext_make_clean(root, dirty_offset);
+ }
+ }
+
+ assert(rv == blist_get1((PyBList *)root, i));
+
+ return _ob(rv);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_get_item(PyObject *oself, Py_ssize_t i)
+{
+ PyBList *self = (PyBList *) oself;
+ PyObject *ret;
+
+ invariants(self, VALID_USER);
+
+ if (i < 0 || i >= self->n) {
+ set_index_error();
+ return _ob(NULL);
+ }
+
+ if (self->leaf)
+ ret = self->children[i];
+ else
+ ret = _PyBList_GET_ITEM_FAST2((PyBListRoot*)self, i);
+ Py_INCREF(ret);
+ return _ob(ret);
+}
+
+/* Note: this may be called as __radd__, which means the arguments may
+ * be reversed. */
+BLIST_PYAPI(PyObject *)
+py_blist_concat(PyObject *ob1, PyObject *ob2)
+{
+ PyBList *rv;
+ int err;
+
+ int is_blist1 = PyRootBList_Check(ob1);
+ int is_blist2 = PyRootBList_Check(ob2);
+
+ if ((!is_blist1 && !PyList_Check(ob1))
+ || (!is_blist2 && !PyList_Check(ob2))) {
+ Py_INCREF(Py_NotImplemented);
+ return Py_NotImplemented;
+ }
+
+ if (is_blist1 && is_blist2) {
+ PyBList *blist1 = (PyBList *) ob1;
+ PyBList *blist2 = (PyBList *) ob2;
+ if (blist1->n < LIMIT && blist2->n < LIMIT
+ && blist1->n + blist2->n < LIMIT) {
+ rv = blist_root_new();
+ copyref(rv, 0, blist1, 0, blist1->n);
+ copyref(rv, blist1->n, blist2, 0, blist2->n);
+ rv->n = rv->num_children = blist1->n + blist2->n;
+ goto done;
+ }
+
+ rv = blist_root_copy(blist1);
+ blist_extend_blist(rv, blist2);
+ ext_mark(rv, 0, DIRTY);
+ ext_mark_set_dirty_all(blist2);
+ goto done;
+ }
+
+ rv = blist_root_new();
+ err = blist_init_from_seq(rv, ob1);
+ if (err < 0) {
+ decref_later((PyObject *) rv);
+ rv = NULL;
+ goto done;
+ }
+ err = blist_extend(rv, ob2);
+ if (err < 0) {
+ decref_later((PyObject *) rv);
+ rv = NULL;
+ goto done;
+ }
+ ext_mark(rv, 0, DIRTY);
+ if (PyBList_Check(ob1))
+ ext_mark_set_dirty_all((PyBList *) ob1);
+ if (PyBList_Check(ob2))
+ ext_mark_set_dirty_all((PyBList *) ob2);
+
+done:
+ _decref_flush();
+ return (PyObject *) rv;
+}
+
+/* User-visible repr() */
+BLIST_PYAPI(PyObject *)
+py_blist_repr(PyObject *oself)
+{
+ /* Basic approach: Clone self in O(1) time, then walk through
+ * the clone, changing each element to repr() of the element,
+ * in O(n) time. Finally, enclose it in square brackets and
+ * call join.
+ */
+
+ Py_ssize_t i;
+ PyBList *pieces = NULL, *self;
+ PyObject *result = NULL;
+ PyObject *s, *tmp, *tmp2;
+
+ invariants(oself, VALID_USER);
+ self = (PyBList *) oself;
+
+ DANGER_BEGIN;
+ i = Py_ReprEnter((PyObject *) self);
+ DANGER_END;
+ if (i) {
+ return i > 0 ? _ob(PyUnicode_FromString("[...]")) : _ob(NULL);
+ }
+
+ if (self->n == 0) {
+#ifdef Py_BUILD_CORE
+ result = PyUnicode_FromString("[]");
+#else
+ result = PyUnicode_FromString("blist([])");
+#endif
+ goto Done;
+ }
+
+ pieces = blist_root_copy(self);
+ if (pieces == NULL)
+ goto Done;
+
+ if (blist_repr_r(pieces) < 0)
+ goto Done;
+
+#ifdef Py_BUILD_CORE
+ s = PyUnicode_FromString("[");
+#else
+ s = PyUnicode_FromString("blist([");
+#endif
+ if (s == NULL)
+ goto Done;
+ tmp = blist_get1(pieces, 0);
+ tmp2 = PyUnicode_Concat(s, tmp);
+ Py_DECREF(s);
+ s = tmp2;
+ DANGER_BEGIN;
+ py_blist_ass_item((PyObject *) pieces, 0, s);
+ DANGER_END;
+ Py_DECREF(s);
+
+#ifdef Py_BUILD_CORE
+ s = PyUnicode_FromString("]");
+#else
+ s = PyUnicode_FromString("])");
+#endif
+ if (s == NULL)
+ goto Done;
+ tmp = blist_get1(pieces, pieces->n-1);
+ tmp2 = PyUnicode_Concat(tmp, s);
+ Py_DECREF(s);
+ tmp = tmp2;
+ DANGER_BEGIN;
+ py_blist_ass_item((PyObject *) pieces, pieces->n-1, tmp);
+ DANGER_END;
+ Py_DECREF(tmp);
+
+ s = PyUnicode_FromString(", ");
+ if (s == NULL)
+ goto Done;
+ result = PyUnicode_Join(s, (PyObject *) pieces);
+ Py_DECREF(s);
+
+ Done:
+ DANGER_BEGIN;
+ /* Only deallocating strings, so this is safe */
+ Py_XDECREF(pieces);
+ DANGER_END;
+
+ DANGER_BEGIN;
+ Py_ReprLeave((PyObject *) self);
+ DANGER_END;
+ return _ob(result);
+}
+
+#if defined(Py_DEBUG) && !defined(BLIST_IN_PYTHON)
+/* Return a string that shows the internal structure of the BList */
+BLIST_PYAPI(PyObject *)
+blist_debug(PyBList *self, PyObject *indent)
+{
+ PyObject *result, *s, *nl_indent, *comma, *indent2, *tmp, *tmp2;
+
+ invariants(self, VALID_PARENT);
+
+ comma = PyUnicode_FromString(", ");
+
+ if (indent == NULL)
+ indent = PyUnicode_FromString("");
+ else
+ Py_INCREF(indent);
+
+ tmp = PyUnicode_FromString(" ");
+ indent2 = PyUnicode_Concat(indent, tmp);
+ Py_DECREF(tmp);
+
+ if (!self->leaf) {
+ int i;
+
+ nl_indent = indent2;
+ tmp = PyUnicode_FromString("\n");
+ nl_indent = PyUnicode_Concat(indent2, tmp);
+ Py_DECREF(tmp);
+
+ result = PyUnicode_FromFormat("blist(leaf=%d, n=%d, r=%d, ",
+ self->leaf, self->n,
+ Py_REFCNT(self));
+ /* PyUnicode_Concat(&result, nl_indent); */
+
+ for (i = 0; i < self->num_children; i++) {
+ s = blist_debug((PyBList *)self->children[i], indent2);
+ tmp = PyUnicode_Concat(result, nl_indent);
+ Py_DECREF(result);
+ result = tmp;
+ tmp = PyUnicode_Concat(result, s);
+ Py_DECREF(result);
+ result = tmp;
+ Py_DECREF(s);
+ }
+
+ tmp = PyUnicode_FromString(")");
+ tmp2 = PyUnicode_Concat(result, tmp);
+ Py_DECREF(result);
+ Py_DECREF(tmp);
+ result = tmp2;
+ } else {
+ int i;
+
+ result = PyUnicode_FromFormat("blist(leaf=%d, n=%d, r=%d, ",
+ self->leaf, self->n,
+ Py_REFCNT(self));
+ for (i = 0; i < self->num_children; i++) {
+ s = PyObject_Str(self->children[i]);
+ tmp = PyUnicode_Concat(result, s);
+ Py_DECREF(result);
+ result = tmp;
+ Py_DECREF(s);
+ tmp = PyUnicode_Concat(result, comma);
+ Py_DECREF(result);
+ result = tmp;
+ }
+ }
+
+ s = indent;
+ tmp = PyUnicode_Concat(s, result);
+ Py_DECREF(result);
+ result = tmp;
+
+ Py_DECREF(comma);
+ Py_DECREF(indent);
+ check_invariants(self);
+ return _ob(result);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_debug(PyBList *self)
+{
+ invariants(self, VALID_USER);
+ return _ob(blist_debug(self, NULL));
+}
+#endif
+
+BLIST_PYAPI(PyObject *)
+py_blist_sort(PyBListRoot *self, PyObject *args, PyObject *kwds)
+{
+#if PY_MAJOR_VERSION < 3
+ static char *kwlist[] = {"cmp", "key", "reverse", 0};
+#else
+ static char *kwlist[] = {"key", "reverse", 0};
+#endif
+ int reverse = 0;
+ int ret = -1;
+ PyBListRoot saved;
+ PyObject *result = NULL;
+ PyObject *compare = NULL, *keyfunc = NULL;
+ static PyObject **extra_list = NULL;
+
+ invariants(self, VALID_USER|VALID_RW | VALID_DECREF);
+
+ if (args != NULL) {
+ int err;
+ DANGER_BEGIN;
+#if PY_MAJOR_VERSION < 3
+ err = PyArg_ParseTupleAndKeywords(args, kwds, "|OOi:sort",
+ kwlist, &compare, &keyfunc,
+ &reverse);
+ DANGER_END;
+ if (!err)
+ return _ob(NULL);
+#else
+ err = PyArg_ParseTupleAndKeywords(args, kwds, "|Oi:sort",
+ kwlist, &keyfunc, &reverse);
+ DANGER_END;
+ if (!err)
+ return _ob(NULL);
+ if (Py_SIZE(args) > 0) {
+ PyErr_SetString(PyExc_TypeError,
+ "must use keyword argument for key function");
+ return _ob(NULL);
+ }
+#endif
+ }
+
+ if (self->n < 2)
+ Py_RETURN_NONE;
+
+#if PY_MAJOR_VERSION < 3
+ if (is_default_cmp(compare))
+ compare = NULL;
+#endif
+ if (keyfunc == Py_None)
+ keyfunc = NULL;
+
+ memset(&saved, 0, offsetof(PyBListRoot, BLIST_FIRST_FIELD));
+ memcpy(&saved.BLIST_FIRST_FIELD, &self->BLIST_FIRST_FIELD,
+ sizeof(*self) - offsetof(PyBListRoot, BLIST_FIRST_FIELD));
+ Py_TYPE(&saved) = &PyRootBList_Type;
+ Py_REFCNT(&saved) = 1;
+
+ if (extra_list != NULL) {
+ self->children = extra_list;
+ extra_list = NULL;
+ } else {
+ self->children = PyMem_New(PyObject *, LIMIT);
+ if (self->children == NULL) {
+ PyErr_NoMemory();
+ goto err;
+ }
+ }
+ self->n = 0;
+ self->num_children = 0;
+ self->leaf = 1;
+ ext_init(self);
+
+ /* Reverse sort stability achieved by initially reversing the list,
+ applying a stable forward sort, then reversing the final result. */
+ if (reverse)
+ blist_reverse(&saved);
+
+ ret = sort(&saved, compare, keyfunc);
+
+ if (ret >= 0) {
+ result = Py_None;
+ if (reverse) {
+ ext_mark((PyBList*)&saved, 0, DIRTY);
+ blist_reverse(&saved);
+ }
+ } else
+ ext_mark((PyBList*)&saved, 0, DIRTY);
+
+ if (self->n && saved.n) {
+ DANGER_BEGIN;
+ /* An error may also have been raised by a comparison
+ * function. Since this may decref that traceback, it can
+ * execute arbitrary python code. */
+ PyErr_SetString(PyExc_ValueError, "list modified during sort");
+ DANGER_END;
+ result = NULL;
+ blist_CLEAR((PyBList*) self);
+ }
+
+ if (extra_list == NULL)
+ extra_list = self->children;
+ else
+ PyMem_Free(self->children);
+
+ ext_dealloc(self);
+ assert(!self->n);
+ err:
+ memcpy(&self->BLIST_FIRST_FIELD, &saved.BLIST_FIRST_FIELD,
+ sizeof(*self) - offsetof(PyBListRoot, BLIST_FIRST_FIELD));
+
+ Py_XINCREF(result);
+
+ decref_flush();
+
+ /* This must come after the decref_flush(); otherwise, we may have
+ * extra temporary references to internal nodes, which throws off the
+ * debug-mode sanity checking. */
+ if (ret >= 0)
+ ext_reindex_set_all(&saved);
+
+ return _ob(result);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_reverse(PyBList *restrict self)
+{
+ invariants(self, VALID_USER|VALID_RW);
+
+ if (self->leaf)
+ reverse_slice(self->children,
+ &self->children[self->num_children]);
+ else {
+ blist_reverse((PyBListRoot*) self);
+ }
+
+ Py_RETURN_NONE;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_count(PyBList *self, PyObject *v)
+{
+ Py_ssize_t count = 0;
+ PyObject *item;
+ int c;
+ fast_compare_data_t fast_cmp_type;
+
+ invariants(self, VALID_USER | VALID_DECREF);
+
+ fast_cmp_type = check_fast_cmp_type(v, Py_EQ);
+
+ ITER(self, item, {
+ c = fast_eq(item, v, fast_cmp_type);
+ if (c > 0)
+ count++;
+ else if (c < 0) {
+ ITER_CLEANUP();
+ decref_flush();
+ return _ob(NULL);
+ }
+ })
+
+ decref_flush();
+ return _ob(PyInt_FromSsize_t(count));
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_index(PyBList *self, PyObject *args)
+{
+ Py_ssize_t i, start=0, stop=self->n;
+ PyObject *v;
+ int c, err;
+ PyObject *item;
+ fast_compare_data_t fast_cmp_type;
+
+ invariants(self, VALID_USER|VALID_DECREF);
+
+ DANGER_BEGIN;
+ err = PyArg_ParseTuple(args, "O|O&O&:index", &v,
+ _PyEval_SliceIndex, &start,
+ _PyEval_SliceIndex, &stop);
+ DANGER_END;
+ if (!err)
+ return _ob(NULL);
+ if (start < 0) {
+ start += self->n;
+ if (start < 0)
+ start = 0;
+ } else if (start > self->n)
+ start = self->n;
+ if (stop < 0) {
+ stop += self->n;
+ if (stop < 0)
+ stop = 0;
+ } else if (stop > self->n)
+ stop = self->n;
+
+ fast_cmp_type = check_fast_cmp_type(v, Py_EQ);
+ i = start;
+ ITER2(self, item, start, stop, {
+ c = fast_eq(item, v, fast_cmp_type);
+ if (c > 0) {
+ ITER_CLEANUP();
+ decref_flush();
+ return _ob(PyInt_FromSsize_t(i));
+ } else if (c < 0) {
+ ITER_CLEANUP();
+ decref_flush();
+ return _ob(NULL);
+ }
+ i++;
+ })
+
+ decref_flush();
+ PyErr_SetString(PyExc_ValueError, "list.index(x): x not in list");
+ return _ob(NULL);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_remove(PyBList *self, PyObject *v)
+{
+ Py_ssize_t i;
+ int c;
+ PyObject *item;
+ fast_compare_data_t fast_cmp_type;
+
+ invariants(self, VALID_USER|VALID_RW|VALID_DECREF);
+
+ fast_cmp_type = check_fast_cmp_type(v, Py_EQ);
+ i = 0;
+ ITER(self, item, {
+ c = fast_eq(item, v, fast_cmp_type);
+ if (c > 0) {
+ ITER_CLEANUP();
+ blist_delitem(self, i);
+ decref_flush();
+ ext_mark(self, 0, DIRTY);
+ Py_RETURN_NONE;
+ } else if (c < 0) {
+ ITER_CLEANUP();
+ decref_flush();
+ return _ob(NULL);
+ }
+ i++;
+ })
+
+ decref_flush();
+ PyErr_SetString(PyExc_ValueError, "list.remove(x): x not in list");
+ return _ob(NULL);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_pop(PyBList *self, PyObject *args)
+{
+ Py_ssize_t i = -1;
+ PyObject *v;
+ int err;
+
+ invariants(self, VALID_USER|VALID_RW|VALID_DECREF);
+
+ DANGER_BEGIN;
+ err = PyArg_ParseTuple(args, "|n:pop", &i);
+ DANGER_END;
+ if (!err)
+ return _ob(NULL);
+
+ if (self->n == 0) {
+ /* Special-case most common failure cause */
+ PyErr_SetString(PyExc_IndexError, "pop from empty list");
+ return _ob(NULL);
+ }
+
+ if (i == -1 || i == self->n-1) {
+ v = blist_pop_last_fast(self);
+ if (v)
+ return _ob(v);
+ }
+
+ if (i < 0)
+ i += self->n;
+ if (i < 0 || i >= self->n) {
+ PyErr_SetString(PyExc_IndexError, "pop index out of range");
+ return _ob(NULL);
+ }
+
+ v = blist_delitem_return(self, i);
+ ext_mark(self, 0, DIRTY);
+
+ decref_flush(); /* Remove any deleted BList nodes */
+
+ return _ob(v); /* the caller now owns the reference the list had */
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_insert(PyBList *self, PyObject *args)
+{
+ Py_ssize_t i;
+ PyObject *v;
+ PyBList *overflow;
+ int err;
+
+ invariants(self, VALID_USER|VALID_RW);
+
+ DANGER_BEGIN;
+ err = PyArg_ParseTuple(args, "nO:insert", &i, &v);
+ DANGER_END;
+ if (!err)
+ return _ob(NULL);
+
+ if (self->n == PY_SSIZE_T_MAX) {
+ PyErr_SetString(PyExc_OverflowError,
+ "cannot add more objects to list");
+ return _ob(NULL);
+ }
+
+ if (i < 0) {
+ i += self->n;
+ if (i < 0)
+ i = 0;
+ } else if (i > self->n)
+ i = self->n;
+
+ /* Speed up the common case */
+ if (self->leaf && self->num_children < LIMIT) {
+ Py_INCREF(v);
+
+ shift_right(self, i, 1);
+ self->num_children++;
+ self->n++;
+ self->children[i] = v;
+ Py_RETURN_NONE;
+ }
+
+ overflow = ins1(self, i, v);
+ if (overflow)
+ blist_overflow_root(self, overflow);
+ ext_mark(self, 0, DIRTY);
+ Py_RETURN_NONE;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_append(PyBList *self, PyObject *v)
+{
+ int err;
+
+ invariants(self, VALID_USER|VALID_RW);
+
+ err = blist_append(self, v);
+
+ if (err < 0)
+ return _ob(NULL);
+
+ Py_RETURN_NONE;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_subscript(PyObject *oself, PyObject *item)
+{
+ PyBList *self;
+
+ invariants(oself, VALID_USER);
+
+ self = (PyBList *) oself;
+
+ if (PyIndex_Check(item)) {
+ Py_ssize_t i;
+ PyObject *ret;
+
+ if (PyLong_CheckExact(item)) {
+ i = PyInt_AsSsize_t(item);
+ if (i == -1 && PyErr_Occurred()) {
+ PyErr_Clear();
+ goto number;
+ }
+ } else {
+ number:
+ i = PyNumber_AsSsize_t(item, PyExc_IndexError);
+ if (i == -1 && PyErr_Occurred())
+ return _ob(NULL);
+ }
+
+ if (i < 0)
+ i += self->n;
+
+ if (i < 0 || i >= self->n) {
+ set_index_error();
+ return _ob(NULL);
+ }
+
+ if (self->leaf)
+ ret = self->children[i];
+ else
+ ret = _PyBList_GET_ITEM_FAST2((PyBListRoot*)self, i);
+ Py_INCREF(ret);
+
+ return _ob(ret);
+ } else if (PySlice_Check(item)) {
+ Py_ssize_t start, stop, step, slicelength, cur, i;
+ PyBList* result;
+ PyObject* it;
+
+ if (PySlice_GetIndicesEx((PySliceObject*)item, self->n,
+ &start, &stop,&step,&slicelength)<0) {
+ return _ob(NULL);
+ }
+
+ if (step == 1)
+ return _redir((PyObject *)
+ py_blist_get_slice((PyObject *) self, start, stop));
+
+ result = blist_root_new();
+
+ if (slicelength <= 0)
+ return _ob((PyObject *) result);
+
+ /* This could be made slightly faster by using forests */
+ /* Also, by special-casing small trees */
+ for (cur = start, i = 0; i < slicelength; cur += step, i++) {
+ int err;
+
+ it = blist_get1(self, cur);
+ err = blist_append(result, it);
+ if (err < 0) {
+ Py_DECREF(result);
+ return _ob(NULL);
+ }
+ }
+
+ ext_mark(result, 0, DIRTY);
+ return _ob((PyObject *) result);
+ } else {
+ PyErr_SetString(PyExc_TypeError,
+ "list indices must be integers");
+ return _ob(NULL);
+ }
+}
+
+#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3
+static PyObject *
+py_blist_root_sizeof(PyBListRoot *root)
+{
+ Py_ssize_t res;
+ res = sizeof(PyBListRoot)
+ + LIMIT * sizeof(PyObject *)
+ + root->index_allocated * (sizeof (PyBList *) +sizeof(Py_ssize_t))
+ + root->dirty_length * sizeof(Py_ssize_t)
+ + (root->index_allocated ?
+ SETCLEAN_LEN(root->index_allocated) * sizeof(unsigned): 0);
+ return PyLong_FromSsize_t(res);
+}
+
+static PyObject *
+py_blist_sizeof(PyBList *self)
+{
+ Py_ssize_t res;
+ res = sizeof(PyBList)
+ + LIMIT * sizeof(PyObject *);
+ return PyLong_FromSsize_t(res);
+}
+
+PyDoc_STRVAR(sizeof_doc,
+"L.__sizeof__() -- size of L in memory, in bytes");
+#endif
+
+/************************************************************************
+ * Routines for supporting pickling
+ */
+
+#ifndef BLIST_IN_PYTHON
+BLIST_PYAPI(PyObject *)
+blist_getstate(PyBList *self)
+{
+ PyObject *lst;
+ int i;
+
+ invariants(self, VALID_PARENT);
+
+ lst = PyList_New(self->num_children);
+ for (i = 0; i < self->num_children; i++) {
+ PyList_SET_ITEM(lst, i, self->children[i]);
+ Py_INCREF(PyList_GET_ITEM(lst, i));
+ }
+
+ if (PyRootBList_CheckExact(self))
+ ext_mark_set_dirty_all(self);
+
+ return _ob(lst);
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_setstate(PyBList *self, PyObject *state)
+{
+ Py_ssize_t i;
+
+ invariants(self, VALID_PARENT);
+
+ if (!PyList_CheckExact(state) || PyList_GET_SIZE(state) > LIMIT) {
+ PyErr_SetString(PyExc_TypeError, "invalid state");
+ return _ob(NULL);
+ }
+
+ for (self->n = i = 0; i < PyList_GET_SIZE(state); i++) {
+ PyObject *child = PyList_GET_ITEM(state, i);
+ if (Py_TYPE(child) == &PyBList_Type) {
+ self->leaf = 0;
+ self->n += ((PyBList*)child)->n;
+ } else
+ self->n++;
+ self->children[i] = child;
+ Py_INCREF(child);
+ }
+
+ self->num_children = PyList_GET_SIZE(state);
+
+ if (PyRootBList_CheckExact(self))
+ ext_reindex_all((PyBListRoot*)self);
+
+ Py_RETURN_NONE;
+}
+
+BLIST_PYAPI(PyObject *)
+py_blist_reduce(PyBList *self)
+{
+ PyObject *rv, *args, *type;
+
+ invariants(self, VALID_PARENT);
+
+ type = (PyObject *) Py_TYPE(self);
+ args = PyTuple_New(0);
+ rv = PyTuple_New(3);
+ PyTuple_SET_ITEM(rv, 0, type);
+ Py_INCREF(type);
+ PyTuple_SET_ITEM(rv, 1, args);
+ PyTuple_SET_ITEM(rv, 2, blist_getstate(self));
+
+ return _ob(rv);
+}
+#endif
+
+PyDoc_STRVAR(getitem_doc,
+ "x.__getitem__(y) <==> x[y]");
+PyDoc_STRVAR(reversed_doc,
+ "L.__reversed__() -- return a reverse iterator over the list");
+PyDoc_STRVAR(append_doc,
+"L.append(object) -- append object to end");
+PyDoc_STRVAR(extend_doc,
+"L.extend(iterable) -- extend list by appending elements from the iterable");
+PyDoc_STRVAR(insert_doc,
+"L.insert(index, object) -- insert object before index");
+PyDoc_STRVAR(pop_doc,
+"L.pop([index]) -> item -- remove and return item at index (default last)");
+PyDoc_STRVAR(remove_doc,
+"L.remove(value) -- remove first occurrence of value");
+PyDoc_STRVAR(index_doc,
+"L.index(value, [start, [stop]]) -> integer -- return first index of value");
+PyDoc_STRVAR(count_doc,
+"L.count(value) -> integer -- return number of occurrences of value");
+PyDoc_STRVAR(reverse_doc,
+"L.reverse() -- reverse *IN PLACE*");
+PyDoc_STRVAR(sort_doc,
+"L.sort(cmp=None, key=None, reverse=False) -- stable sort *IN PLACE*;\n\
+cmp(x, y) -> -1, 0, 1");
+
+static PyMethodDef blist_methods[] = {
+ {"__getitem__", (PyCFunction)py_blist_subscript, METH_O|METH_COEXIST, getitem_doc},
+ {"__reversed__",(PyCFunction)py_blist_reversed, METH_NOARGS, reversed_doc},
+#ifndef BLIST_IN_PYTHON
+ {"__reduce__", (PyCFunction)py_blist_reduce, METH_NOARGS, NULL},
+ {"__setstate__",(PyCFunction)py_blist_setstate, METH_O, NULL},
+#endif
+ {"append", (PyCFunction)py_blist_append, METH_O, append_doc},
+ {"insert", (PyCFunction)py_blist_insert, METH_VARARGS, insert_doc},
+ {"extend", (PyCFunction)py_blist_extend, METH_O, extend_doc},
+ {"pop", (PyCFunction)py_blist_pop, METH_VARARGS, pop_doc},
+ {"remove", (PyCFunction)py_blist_remove, METH_O, remove_doc},
+ {"index", (PyCFunction)py_blist_index, METH_VARARGS, index_doc},
+ {"count", (PyCFunction)py_blist_count, METH_O, count_doc},
+ {"reverse", (PyCFunction)py_blist_reverse, METH_NOARGS, reverse_doc},
+ {"sort", (PyCFunction)py_blist_sort, METH_VARARGS | METH_KEYWORDS, sort_doc},
+#if defined(Py_DEBUG) && !defined(BLIST_IN_PYTHON)
+ {"debug", (PyCFunction)py_blist_debug, METH_NOARGS, NULL},
+#endif
+#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3
+ {"__sizeof__", (PyCFunction)py_blist_root_sizeof, METH_NOARGS, sizeof_doc},
+#endif
+ {NULL, NULL} /* sentinel */
+};
+
+static PyMethodDef blist_internal_methods[] = {
+#ifndef BLIST_IN_PYTHON
+ {"__reduce__", (PyCFunction)py_blist_reduce, METH_NOARGS, NULL},
+ {"__setstate__",(PyCFunction)py_blist_setstate, METH_O, NULL},
+#endif
+#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3
+ {"__sizeof__", (PyCFunction)py_blist_sizeof, METH_NOARGS, sizeof_doc},
+#endif
+ {NULL, NULL} /* sentinel */
+};
+
+static PySequenceMethods blist_as_sequence = {
+ py_blist_length, /* sq_length */
+ 0, /* sq_concat */
+ py_blist_repeat, /* sq_repeat */
+ py_blist_get_item, /* sq_item */
+ py_blist_get_slice, /* sq_slice */
+ py_blist_ass_item, /* sq_ass_item */
+ py_blist_ass_slice, /* sq_ass_slice */
+ py_blist_contains, /* sq_contains */
+ 0, /* sq_inplace_concat */
+ py_blist_inplace_repeat, /* sq_inplace_repeat */
+};
+
+PyDoc_STRVAR(blist_doc,
+"blist() -> new list\n"
+"blist(iterable) -> new list initialized from iterable's items");
+
+static PyMappingMethods blist_as_mapping = {
+ py_blist_length,
+ py_blist_subscript,
+ py_blist_ass_subscript
+};
+
+/* All of this, just to get __radd__ to work */
+#if PY_MAJOR_VERSION < 3
+static PyNumberMethods blist_as_number = {
+ py_blist_concat, /* nb_add */
+ 0, /* nb_subtract */
+ 0, /* nb_multiply */
+ 0, /* nb_divide */
+ 0, /* nb_remainder */
+ 0, /* nb_divmod */
+ 0, /* nb_power */
+ 0, /* nb_negative */
+ 0, /* tp_positive */
+ 0, /* tp_absolute */
+ 0, /* tp_nonzero */
+ 0, /* nb_invert */
+ 0, /* nb_lshift */
+ 0, /* nb_rshift */
+ 0, /* nb_and */
+ 0, /* nb_xor */
+ 0, /* nb_or */
+ 0, /* nb_coerce */
+ 0, /* nb_int */
+ 0, /* nb_long */
+ 0, /* nb_float */
+ 0, /* nb_oct */
+ 0, /* nb_hex */
+ py_blist_inplace_concat, /* nb_inplace_add */
+ 0, /* nb_inplace_subtract */
+ 0, /* nb_inplace_multiply */
+ 0, /* nb_inplace_divide */
+ 0, /* nb_inplace_remainder */
+ 0, /* nb_inplace_power */
+ 0, /* nb_inplace_lshift */
+ 0, /* nb_inplace_rshift */
+ 0, /* nb_inplace_and */
+ 0, /* nb_inplace_xor */
+ 0, /* nb_inplace_or */
+ 0, /* nb_floor_divide */
+ 0, /* nb_true_divide */
+ 0, /* nb_inplace_floor_divide */
+ 0, /* nb_inplace_true_divide */
+ 0, /* nb_index */
+};
+#else
+static PyNumberMethods blist_as_number = {
+ (binaryfunc) py_blist_concat, /* nb_add */
+ 0, /* nb_subtract */
+ 0, /* nb_multiply */
+ 0, /* nb_remainder */
+ 0, /* nb_divmod */
+ 0, /* nb_power */
+ 0, /* nb_negative */
+ 0, /* tp_positive */
+ 0, /* tp_absolute */
+ 0, /* tp_bool */
+ 0, /* nb_invert */
+ 0, /* nb_lshift */
+ 0, /* nb_rshift */
+ 0, /* nb_and */
+ 0, /* nb_xor */
+ 0, /* nb_or */
+ 0, /* nb_int */
+ 0, /* nb_reserved */
+ 0, /* nb_float */
+ py_blist_inplace_concat, /* nb_inplace_add */
+ 0, /* nb_inplace_subtract */
+ 0, /* nb_inplace_multiply */
+ 0, /* nb_inplace_remainder */
+ 0, /* nb_inplace_power */
+ 0, /* nb_inplace_lshift */
+ 0, /* nb_inplace_rshift */
+ 0, /* nb_inplace_and */
+ 0, /* nb_inplace_xor */
+ 0, /* nb_inplace_or */
+ 0, /* nb_floor_divide */
+ 0, /* nb_true_divide */
+ 0, /* nb_inplace_floor_divide */
+ 0, /* nb_inplace_true_divide */
+ 0, /* nb_index */
+};
+#endif
+
+PyTypeObject PyBList_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+#ifdef BLIST_IN_PYTHON
+ "__internal_blist",
+#else
+ "_blist.__internal_blist",
+#endif
+ sizeof(PyBList),
+ 0,
+ py_blist_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ 0, /* tp_repr */
+ 0, /* tp_as_number */
+ 0, /* tp_as_sequence */
+ 0, /* tp_as_mapping */
+ py_blist_nohash, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_BASETYPE, /* tp_flags */
+ blist_doc, /* tp_doc */
+ py_blist_traverse, /* tp_traverse */
+ 0, /* tp_clear */
+ 0, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ 0, /* tp_iter */
+ 0, /* tp_iternext */
+ blist_internal_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ py_blist_internal_init, /* tp_init */
+ 0, /* tp_alloc */
+ py_blist_internal_tp_new, /* tp_new */
+ PyObject_GC_Del, /* tp_free */
+};
+
+PyTypeObject PyRootBList_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+#ifdef BLIST_IN_PYTHON
+ "list",
+#else
+ "_blist.blist",
+#endif
+ sizeof(PyBListRoot),
+ 0,
+ py_blist_dealloc, /* tp_dealloc */
+ 0, /* tp_print */
+ 0, /* tp_getattr */
+ 0, /* tp_setattr */
+ 0, /* tp_compare */
+ py_blist_repr, /* tp_repr */
+ &blist_as_number, /* tp_as_number */
+ &blist_as_sequence, /* tp_as_sequence */
+ &blist_as_mapping, /* tp_as_mapping */
+ py_blist_nohash, /* tp_hash */
+ 0, /* tp_call */
+ 0, /* tp_str */
+ PyObject_GenericGetAttr, /* tp_getattro */
+ 0, /* tp_setattro */
+ 0, /* tp_as_buffer */
+ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
+ Py_TPFLAGS_BASETYPE /* tp_flags */
+#if (PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3) && defined(BLIST_IN_PYTHON)
+ | Py_TPFLAGS_LIST_SUBCLASS
+#endif
+ ,
+ blist_doc, /* tp_doc */
+ py_blist_traverse, /* tp_traverse */
+ py_blist_clear, /* tp_clear */
+ py_blist_richcompare, /* tp_richcompare */
+ 0, /* tp_weaklistoffset */
+ py_blist_iter, /* tp_iter */
+ 0, /* tp_iternext */
+ blist_methods, /* tp_methods */
+ 0, /* tp_members */
+ 0, /* tp_getset */
+ 0, /* tp_base */
+ 0, /* tp_dict */
+ 0, /* tp_descr_get */
+ 0, /* tp_descr_set */
+ 0, /* tp_dictoffset */
+ py_blist_init, /* tp_init */
+ PyType_GenericAlloc, /* tp_alloc */
+ py_blist_root_tp_new, /* tp_new */
+ PyObject_GC_Del, /* tp_free */
+};
+
+static PyMethodDef module_methods[] = { { NULL } };
+
+BLIST_LOCAL(int)
+init_blist_types1(void)
+{
+ decref_init();
+ highest_set_bit_init();
+
+ Py_TYPE(&PyBList_Type) = &PyType_Type;
+ Py_TYPE(&PyRootBList_Type) = &PyType_Type;
+ Py_TYPE(&PyBListIter_Type) = &PyType_Type;
+ Py_TYPE(&PyBListReverseIter_Type) = &PyType_Type;
+
+ Py_INCREF(&PyBList_Type);
+ Py_INCREF(&PyRootBList_Type);
+ Py_INCREF(&PyBListIter_Type);
+ Py_INCREF(&PyBListReverseIter_Type);
+
+ return 0;
+}
+
+BLIST_LOCAL(int)
+init_blist_types2(void)
+{
+ if (PyType_Ready(&PyRootBList_Type) < 0) return -1;
+ if (PyType_Ready(&PyBList_Type) < 0) return -1;
+ if (PyType_Ready(&PyBListIter_Type) < 0) return -1;
+ if (PyType_Ready(&PyBListReverseIter_Type) < 0) return -1;
+
+ return 0;
+}
+
+#if PY_MAJOR_VERSION < 3
+PyMODINIT_FUNC
+init_blist(void)
+{
+#ifndef BLIST_IN_PYTHON
+ PyCFunctionObject *meth;
+ PyObject *gc_module;
+#endif
+
+ PyObject *m;
+ PyObject *limit = PyInt_FromLong(LIMIT);
+
+ init_blist_types1();
+ init_blist_types2();
+
+ m = Py_InitModule3("_blist", module_methods, "_blist");
+
+ PyModule_AddObject(m, "blist", (PyObject *) &PyRootBList_Type);
+ PyModule_AddObject(m, "_limit", limit);
+ PyModule_AddObject(m, "__internal_blist", (PyObject *)
+ &PyBList_Type);
+
+#ifndef BLIST_IN_PYTHON
+ gc_module = PyImport_ImportModule("gc");
+
+ meth = (PyCFunctionObject*)PyObject_GetAttrString(gc_module, "enable");
+ pgc_enable = meth->m_ml->ml_meth;
+
+ meth = (PyCFunctionObject*)PyObject_GetAttrString(gc_module,"disable");
+ pgc_disable = meth->m_ml->ml_meth;
+
+ meth = (PyCFunctionObject*)PyObject_GetAttrString(gc_module,
+ "isenabled");
+ pgc_isenabled = meth->m_ml->ml_meth;
+#endif
+}
+#else
+
+static struct PyModuleDef blist_module = {
+ PyModuleDef_HEAD_INIT,
+ "_blist",
+ NULL,
+ -1,
+ module_methods,
+ NULL,
+ NULL,
+ NULL,
+ NULL,
+};
+
+PyMODINIT_FUNC
+PyInit__blist(void)
+{
+#ifndef BLIST_IN_PYTHON
+ PyModuleDef *gc_module_def;
+ PyMethodDef *gc_methods;
+ PyObject *gc_module;
+#endif
+ PyObject *m;
+ PyObject *limit = PyInt_FromLong(LIMIT);
+
+ if (init_blist_types1() < 0)
+ return NULL;
+ if (init_blist_types2() < 0)
+ return NULL;
+
+ m = PyModule_Create(&blist_module);
+
+ PyModule_AddObject(m, "blist", (PyObject *) &PyRootBList_Type);
+ PyModule_AddObject(m, "_limit", limit);
+ PyModule_AddObject(m, "__internal_blist", (PyObject *)
+ &PyBList_Type);
+
+#ifndef BLIST_IN_PYTHON
+ gc_module = PyImport_ImportModule("gc");
+ gc_module_def = PyModule_GetDef(gc_module);
+ gc_methods = gc_module_def->m_methods;
+ while (gc_methods->ml_name != NULL) {
+ if (0 == strcmp(gc_methods->ml_name, "enable"))
+ pgc_enable = gc_methods->ml_meth;
+ else if (0 == strcmp(gc_methods->ml_name, "disable"))
+ pgc_disable = gc_methods->ml_meth;
+ else if (0 == strcmp(gc_methods->ml_name, "isenabled"))
+ pgc_isenabled = gc_methods->ml_meth;
+ gc_methods++;
+ }
+#endif
+ return m;
+}
+#endif
+
+/************************************************************************
+ * The List C API, for building BList into the Python core
+ */
+
+#ifdef Py_BUILD_CORE
+int
+PyList_Init1(void)
+{
+ return init_blist_types1();
+}
+
+int
+PyList_Init2(void)
+{
+ return init_blist_types2();
+}
+
+PyObject *PyList_New(Py_ssize_t size)
+{
+ PyBList *self = blist_root_new();
+ PyObject *tmp;
+
+ if (self == NULL)
+ return NULL;
+
+ if (size <= LIMIT) {
+ self->n = size;
+ self->num_children = size;
+ memset(self->children, 0, sizeof(PyObject *) * size);
+ check_invariants(self);
+ return (PyObject *) self;
+ }
+
+ self->n = 1;
+ self->num_children = 1;
+ self->children[0] = NULL;
+
+ tmp = blist_repeat(self, size);
+ check_invariants((PyBList *) tmp);
+ SAFE_DECREF(self);
+ _decref_flush();
+ check_invariants((PyBList *) tmp);
+ assert(((PyBList *)tmp)->n == size);
+ ext_dealloc((PyBListRoot *) tmp);
+ return tmp;
+}
+
+Py_ssize_t PyList_Size(PyObject *ob)
+{
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ return py_blist_length(ob);
+}
+
+PyObject *PyList_GetItem(PyObject *ob, Py_ssize_t i)
+{
+ PyBList *self = (PyBList *) ob;
+ PyObject *ret;
+
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return NULL;
+ }
+
+ assert(i >= 0 && i < self->n); /* XXX Remove */
+
+ if (i < 0 || i >= self->n) {
+ set_index_error();
+ return NULL;
+ }
+
+ ret = blist_get1((PyBList *) ob, i);
+ assert(ret != NULL);
+ return ret;
+}
+
+int PyList_SetItem(PyObject *ob, Py_ssize_t i, PyObject *item)
+{
+ int ret;
+
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ Py_XDECREF(item);
+ return -1;
+ }
+
+ assert(i >= 0 && i < ((PyBList *)ob)->n); /* XXX Remove */
+ ret = py_blist_ass_item(ob, i, item);
+ assert(Py_REFCNT(item) > 1);
+ Py_XDECREF(item);
+ return ret;
+}
+
+int PyList_Insert(PyObject *ob, Py_ssize_t i, PyObject *v)
+{
+ PyBList *overflow;
+ PyBList *self = (PyBList *) ob;
+
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ invariants(self, VALID_USER|VALID_RW);
+
+ if (self->n == PY_SSIZE_T_MAX) {
+ PyErr_SetString(PyExc_OverflowError,
+ "cannot add more objects to list");
+ return _int(0);
+ }
+
+ if (i < 0) {
+ i += self->n;
+ if (i < 0)
+ i = 0;
+ } else if (i > self->n)
+ i = self->n;
+
+ if (self->leaf && self->num_children < LIMIT) {
+ Py_INCREF(v);
+
+ shift_right(self, i, 1);
+ self->num_children++;
+ self->n++;
+ self->children[i] = v;
+ return _int(0);
+ }
+
+ overflow = ins1(self, i, v);
+ if (overflow)
+ blist_overflow_root(self, overflow);
+ ext_mark(self, 0, DIRTY);
+
+ return _int(0);
+}
+
+int PyList_Append(PyObject *ob, PyObject *item)
+{
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ return blist_append((PyBList *) ob, item);
+}
+
+PyObject *PyList_GetSlice(PyObject *ob, Py_ssize_t i, Py_ssize_t j)
+{
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return NULL;
+ }
+
+ return py_blist_get_slice(ob, i, j);
+}
+
+int PyList_SetSlice(PyObject *ob, Py_ssize_t i, Py_ssize_t j, PyObject *lst)
+{
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ return py_blist_ass_slice(ob, i, j, lst);
+}
+
+int PyList_Sort(PyObject *ob)
+{
+ PyObject *ret;
+
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ ret = py_blist_sort((PyBListRoot *) ob, NULL, NULL);
+ if (ret == NULL)
+ return -1;
+
+ Py_DECREF(ret);
+ return 0;
+}
+
+int PyList_Reverse(PyObject *ob)
+{
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return -1;
+ }
+
+ invariants((PyBList *) ob, VALID_USER|VALID_RW);
+
+ blist_reverse((PyBListRoot *) ob);
+
+ return _int(0);
+}
+
+PyObject *PyList_AsTuple(PyObject *ob)
+{
+ PyBList *self = (PyBList *) ob;
+ PyObject *item;
+ PyTupleObject *tuple;
+ int i;
+
+ if (ob == NULL || !PyList_Check(ob)) {
+ PyErr_BadInternalCall();
+ return NULL;
+ }
+
+ invariants(self, VALID_USER | VALID_DECREF);
+
+ DANGER_BEGIN;
+ tuple = (PyTupleObject *) PyTuple_New(self->n);
+ DANGER_END;
+ if (tuple == NULL)
+ return _ob(NULL);
+
+ i = 0;
+ ITER(self, item, {
+ tuple->ob_item[i++] = item;
+ Py_INCREF(item);
+ })
+
+ assert(i == self->n);
+
+ decref_flush();
+
+ return _ob((PyObject *) tuple);
+
+}
+
+PyObject *
+_PyList_Extend(PyBListRoot *ob, PyObject *b)
+{
+ return py_blist_extend((PyBList *) ob, b);
+}
+
+#if PY_MAJOR_VERSION == 2 && PY_MINOR_VERSION >= 6 || PY_MAJOR_VERSION >= 3
+void
+PyList_Fini(void)
+{
+ /* XXX free statically allocated memory here */
+}
+#endif
+
+#endif
diff --git a/_btuple.py b/_btuple.py
new file mode 100644
index 0000000..76e1d03
--- /dev/null
+++ b/_btuple.py
@@ -0,0 +1,92 @@
+from _blist import blist
+from ctypes import c_int
+import collections
+class btuple(collections.Sequence):
+ def __init__(self, seq=None):
+ if isinstance(seq, btuple):
+ self._blist = seq._blist
+ elif seq is not None:
+ self._blist = blist(seq)
+ else:
+ self._blist = blist()
+ self._hash = -1
+
+ def _btuple_or_tuple(self, other, f):
+ if isinstance(other, btuple):
+ rv = f(self._blist, other._blist)
+ elif isinstance(other, tuple):
+ rv = f(self._blist, blist(other))
+ else:
+ return NotImplemented
+ if isinstance(rv, blist):
+ rv = btuple(rv)
+ return rv
+
+ def __hash__(self):
+ # Based on tuplehash from tupleobject.c
+ if self._hash != -1:
+ return self._hash
+
+ n = len(self)
+ mult = c_int(1000003)
+ x = c_int(0x345678)
+ for ob in self:
+ n -= 1
+ y = c_int(hash(ob))
+ x = (x ^ y) * mult
+ mult += c_int(82520) + n + n
+ x += c_int(97531)
+ if x == -1:
+ x = -2;
+ self._hash = x.value
+ return self._hash
+
+ def __add__(self, other):
+ rv = self._btuple_or_tuple(other, blist.__add__)
+ if rv is NotImplemented:
+ raise TypeError
+ return rv
+ def __radd__(self, other):
+ rv = self._btuple_or_tuple(other, blist.__radd__)
+ if rv is NotImplemented:
+ raise TypeError
+ return rv
+ def __contains__(self, item):
+ return item in self._blist
+ def __eq__(self, other):
+ return self._btuple_or_tuple(other, blist.__eq__)
+ def __ge__(self, other):
+ return self._btuple_or_tuple(other, blist.__ge__)
+ def __gt__(self, other):
+ return self._btuple_or_tuple(other, blist.__gt__)
+ def __le__(self, other):
+ return self._btuple_or_tuple(other, blist.__le__)
+ def __lt__(self, other):
+ return self._btuple_or_tuple(other, blist.__lt__)
+ def __ne__(self, other):
+ return self._btuple_or_tuple(other, blist.__ne__)
+ def __iter__(self):
+ return iter(self._blist)
+ def __len__(self):
+ return len(self._blist)
+ def __getitem__(self, key):
+ if isinstance(key, slice):
+ return btuple(self._blist[key])
+ return self._blist[key]
+ def __getslice__(self, i, j):
+ return btuple(self._blist[i:j])
+ def __repr__(self):
+ return 'btuple((' + repr(self._blist)[7:-2] + '))'
+ def __str__(self):
+ return repr(self)
+ def __mul__(self, i):
+ return btuple(self._blist * i)
+ def __rmul__(self, i):
+ return btuple(i * self._blist)
+ def count(self, item):
+ return self._blist.count(item)
+ def index(self, item):
+ return self._blist.index(item)
+
+del c_int
+del collections
diff --git a/_sorteddict.py b/_sorteddict.py
new file mode 100644
index 0000000..225b806
--- /dev/null
+++ b/_sorteddict.py
@@ -0,0 +1,142 @@
+from _sortedlist import sortedset
+import collections, sys
+from _blist import blist
+
+class missingdict(dict):
+ def __missing__(self, key):
+ return self._missing(key)
+
+class KeysView(collections.KeysView, collections.Sequence):
+ def __getitem__(self, index):
+ return self._mapping._sortedkeys[index]
+ def __reversed__(self):
+ return reversed(self._mapping._sortedkeys)
+ def index(self, key):
+ return self._mapping._sortedkeys.index(key)
+ def count(self, key):
+ return self._mapping.count(key)
+ def _from_iterable(cls, it):
+ return sortedset(key=self._mapping._sortedkeys.key)
+ def bisect_left(self, key):
+ return self._mapping._sortedkeys.bisect_left(key)
+ def bisect_right(self, key):
+ return self._mapping._sortedkeys.bisect_right(key)
+ bisect = bisect_right
+
+class ItemsView(collections.ItemsView, collections.Sequence):
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ keys = self._mapping._sortedkeys[index]
+ return self._from_iterable((key, self._mapping[key])
+ for key in keys)
+ key = self._mapping.sortedkeys[index]
+ return (key, self._mapping[key])
+ def index(self, item):
+ key, value = item
+ i = self._mapping._sortedkeys.index(key)
+ if self._mapping[key] == value:
+ return i
+ raise ValueError
+ def count(self, item):
+ return 1 if item in self else 0
+ def _from_iterable(cls, it):
+ return sortedset(key=lambda item:
+ self._mapping._sortedkeys.key(item[0]))
+
+class ValuesView(collections.ValuesView, collections.Sequence):
+ def __getitem__(self, index):
+ if isinstance(index, slice):
+ keys = self._mapping._sortedkeys[index]
+ rv = sortedset(key=self._mapping._sortedkeys.key)
+ rv.update(self._mapping[key] for key in keys)
+ return rv
+ key = self._mapping.sortedkeys[index]
+ return self._mapping[key]
+
+class sorteddict(collections.MutableMapping):
+ def __init__(self, *args, **kw):
+ if hasattr(self, '__missing__'):
+ self._map = missingdict()
+ self._map._missing = self.__missing__
+ else:
+ self._map = dict()
+ key = None
+ if len(args) > 0:
+ if hasattr(args[0], '__call__'):
+ key = args[0]
+ args = args[1:]
+ elif len(args) > 1:
+ raise TypeError("'%s' object is not callable" %
+ args[0].__class__.__name__)
+ if len(args) > 1:
+ raise TypeError('sorteddict expected at most 2 arguments, got %d'
+ % len(args))
+ if len(args) == 1 and isinstance(args[0], sorteddict) and key is None:
+ key = args[0]._sortedkeys._key
+ self._sortedkeys = sortedset(key=key)
+ self.update(*args, **kw)
+
+ if sys.version_info[0] < 3:
+ def keys(self):
+ return self._sortedkeys.copy()
+ def items(self):
+ return blist((key, self[key]) for key in self)
+ def values(self):
+ return blist(self[key] for key in self)
+ def viewkeys(self):
+ return KeysView(self)
+ def viewitems(self):
+ return ItemsView(self)
+ def viewvalues(self):
+ return ValuesView(self)
+ else:
+ def keys(self):
+ return KeysView(self)
+ def items(self):
+ return ItemsView(self)
+ def values(self):
+ return ValuesView(self)
+
+ def __setitem__(self, key, value):
+ try:
+ if key not in self._map:
+ self._sortedkeys.add(key)
+ self._map[key] = value
+ except:
+ if key not in self._map:
+ self._sortedkeys.discard(key)
+ raise
+
+ def __delitem__(self, key):
+ self._sortedkeys.discard(key)
+ del self._map[key]
+
+ def __getitem__(self, key):
+ return self._map[key]
+
+ def __iter__(self):
+ return iter(self._sortedkeys)
+
+ def __len__(self):
+ return len(self._sortedkeys)
+
+ def copy(self):
+ return sorteddict(self)
+
+ @classmethod
+ def fromkeys(cls, keys, value=None, key=None):
+ if key is not None:
+ rv = cls(key)
+ else:
+ rv = cls()
+ for key in keys:
+ rv[key] = value
+ return rv
+
+ def __repr__(self):
+ return 'sorteddict(%s)' % repr(self._map)
+
+ def __eq__(self, other):
+ if not isinstance(other, sorteddict):
+ return False
+ return self._map == other._map
diff --git a/_sortedlist.py b/_sortedlist.py
new file mode 100644
index 0000000..762d20b
--- /dev/null
+++ b/_sortedlist.py
@@ -0,0 +1,647 @@
+from _blist import blist
+import collections, bisect, weakref, operator, itertools, sys, threading
+try: # pragma: no cover
+ izip = itertools.izip
+except AttributeError: # pragma: no cover
+ izip = zip
+
+__all__ = ['sortedlist', 'weaksortedlist', 'sortedset', 'weaksortedset']
+
+class ReprRecursion(object):
+ local = threading.local()
+ def __init__(self, ob):
+ if not hasattr(self.local, 'repr_count'):
+ self.local.repr_count = collections.defaultdict(int)
+ self.ob_id = id(ob)
+ self.value = self.ob_id in self.local.repr_count
+
+ def __enter__(self):
+ self.local.repr_count[self.ob_id] += 1
+ return self.value
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.local.repr_count[self.ob_id] -= 1
+ if self.local.repr_count[self.ob_id] == 0:
+ del self.local.repr_count[self.ob_id]
+ return False
+
+class _sortedbase(collections.Sequence):
+ def __init__(self, iterable=(), key=None):
+ self._key = key
+ if key is not None and not hasattr(key, '__call__'):
+ raise TypeError("'%s' object is not callable" % str(type(key)))
+ if ((isinstance(iterable,type(self))
+ or isinstance(self,type(iterable)))
+ and iterable._key is key):
+ self._blist = blist(iterable._blist)
+ else:
+ self._blist = blist()
+ for v in iterable:
+ self.add(v)
+
+ def _from_iterable(self, iterable):
+ return self.__class__(iterable, self._key)
+
+ def _u2key(self, value):
+ "Convert a user-object to the key"
+ if self._key is None:
+ return value
+ else:
+ return self._key(value)
+
+ def _u2i(self, value):
+ "Convert a user-object to the internal representation"
+ if self._key is None:
+ return value
+ else:
+ return (self._key(value), value)
+
+ def _i2u(self, value):
+ "Convert an internal object to a user-object"
+ if self._key is None:
+ return value
+ else:
+ return value[1]
+
+ def _i2key(self, value):
+ "Convert an internal object to the key"
+ if self._key is None:
+ return value
+ else:
+ return value[0]
+
+ def _bisect_left(self, v):
+ """Locate the point in the list where v would be inserted.
+
+ Returns an (i, value) tuple:
+ - i is the position where v would be inserted
+ - value is the current user-object at position i
+
+ This is the key function to override in subclasses. They must
+ accept a user-object v and return a user-object value.
+ """
+
+ key = self._u2key(v)
+ lo = 0
+ hi = len(self._blist)
+ while lo < hi:
+ mid = (lo+hi)//2
+ v = self._i2key(self._blist[mid])
+ if v < key: lo = mid + 1
+ else: hi = mid
+ if lo < len(self._blist):
+ return lo, self._i2u(self._blist[lo])
+ return lo, None
+
+ def _bisect_right(self, v):
+ """Same as _bisect_left, but go to the right of equal values"""
+
+ key = self._u2key(v)
+ lo = 0
+ hi = len(self._blist)
+ while lo < hi:
+ mid = (lo+hi)//2
+ v = self._i2key(self._blist[mid])
+ if key < v: hi = mid
+ else: lo = mid + 1
+ if lo < len(self._blist):
+ return lo, self._i2u(self._blist[lo])
+ return lo, None
+
+ def bisect_left(self, v):
+ """L.bisect_left(v) -> index
+
+ The return value i is such that all e in L[:i] have e < v, and
+ all e in a[i:] have e >= v. So if v already appears in the
+ list, i points just before the leftmost v already there.
+ """
+ return self._bisect_left(v)[0]
+
+ def bisect_right(self, v):
+ """L.bisect_right(v) -> index
+
+ Return the index where to insert item v in the list.
+
+ The return value i is such that all e in a[:i] have e <= v,
+ and all e in a[i:] have e > v. So if v already appears in the
+ list, i points just beyond the rightmost v already there.
+ """
+ return self._bisect_right(v)[0]
+
+ bisect = bisect_right
+
+ def add(self, value):
+ """Add an element."""
+ # Will throw a TypeError when trying to add an object that
+ # cannot be compared to objects already in the list.
+ i, _ = self._bisect_right(value)
+ self._blist.insert(i, self._u2i(value))
+
+ def discard(self, value):
+ """Remove an element if it is a member.
+
+ If the element is not a member, do nothing.
+
+ """
+
+ try:
+ i, v = self._bisect_left(value)
+ except TypeError:
+ # Value cannot be compared with values already in the list.
+ # Ergo, value isn't in the list.
+ return
+ i = self._advance(i, value)
+ if i >= 0:
+ del self._blist[i]
+
+ def __contains__(self, value):
+ """x.__contains__(y) <==> y in x"""
+ try:
+ i, v = self._bisect_left(value)
+ except TypeError:
+ # Value cannot be compared with values already in the list.
+ # Ergo, value isn't in the list.
+ return False
+ i = self._advance(i, value)
+ return i >= 0
+
+ def __len__(self):
+ """x.__len__() <==> len(x)"""
+ return len(self._blist)
+
+ def __iter__(self):
+ """ x.__iter__() <==> iter(x)"""
+ return (self._i2u(v) for v in self._blist)
+
+ def __getitem__(self, index):
+ """x.__getitem__(y) <==> x[y]"""
+ if isinstance(index, slice):
+ rv = self.__class__()
+ rv._blist = self._blist[index]
+ rv._key = self._key
+ return rv
+ return self._i2u(self._blist[index])
+
+ def _advance(self, i, value):
+ "Do a linear search through all items with the same key"
+ key = self._u2key(value)
+ while i < len(self._blist):
+ if self._i2u(self._blist[i]) == value:
+ return i
+ elif key < self._i2key(self._blist[i]):
+ break
+ i += 1
+ return -1
+
+ def __reversed__(self):
+ """L.__reversed__() -- return a reverse iterator over the list"""
+ return (self._i2u(v) for v in reversed(self._blist))
+
+ def index(self, value):
+ """L.index(value) -> integer -- return first index of value.
+
+ Raises ValueError if the value is not present.
+
+ """
+
+ try:
+ i, v = self._bisect_left(value)
+ except TypeError:
+ raise ValueError
+ i = self._advance(i, value)
+ if i >= 0:
+ return i
+ raise ValueError
+
+ def count(self, value):
+ """L.count(value) -> integer -- return number of occurrences of value"""
+ try:
+ i, _ = self._bisect_left(value)
+ except TypeError:
+ return 0
+ key = self._u2key(value)
+ count = 0
+ while True:
+ i = self._advance(i, value)
+ if i == -1:
+ return count
+ count += 1
+ i += 1
+
+ def pop(self, index=-1):
+ """L.pop([index]) -> item -- remove and return item at index (default last).
+
+ Raises IndexError if list is empty or index is out of range.
+
+ """
+
+ rv = self[index]
+ del self[index]
+ return rv
+
+ def __delslice__(self, i, j):
+ """x.__delslice__(i, j) <==> del x[i:j]
+
+ Use of negative indices is not supported.
+
+ """
+ del self._blist[i:j]
+
+ def __delitem__(self, i):
+ """x.__delitem__(y) <==> del x[y]"""
+ del self._blist[i]
+
+class _weaksortedbase(_sortedbase):
+ def _bisect_left(self, value):
+ key = self._u2key(value)
+ lo = 0
+ hi = len(self._blist)
+ while lo < hi:
+ mid = (lo+hi)//2
+ n, v = self._squeeze(mid)
+ hi -= n
+ if n and hi == len(self._blist):
+ continue
+ if self._i2key(self._blist[mid]) < key: lo = mid+1
+ else: hi = mid
+ n, v = self._squeeze(lo)
+ return lo, v
+
+ def _bisect_right(self, value):
+ key = self._u2key(value)
+ lo = 0
+ hi = len(self._blist)
+ while lo < hi:
+ mid = (lo+hi)//2
+ n, v = self._squeeze(mid)
+ hi -= n
+ if n and hi == len(self._blist):
+ continue
+ if key < self._i2key(self._blist[mid]): hi = mid
+ else: lo = mid+1
+ n, v = self._squeeze(lo)
+ return lo, v
+
+ _bisect = _bisect_right
+
+ def _u2i(self, value):
+ if self._key is None:
+ return weakref.ref(value)
+ else:
+ return (self._key(value), weakref.ref(value))
+
+ def _i2u(self, value):
+ if self._key is None:
+ return value()
+ else:
+ return value[1]()
+
+ def _i2key(self, value):
+ if self._key is None:
+ return value()
+ else:
+ return value[0]
+
+ def __iter__(self):
+ """ x.__iter__() <==> iter(x)"""
+ i = 0
+ while i < len(self._blist):
+ n, v = self._squeeze(i)
+ if v is None: break
+ yield v
+ i += 1
+
+ def _squeeze(self, i):
+ n = 0
+ while i < len(self._blist):
+ v = self._i2u(self._blist[i])
+ if v is None:
+ del self._blist[i]
+ n += 1
+ else:
+ return n, v
+ return n, None
+
+ def __getitem__(self, index):
+ """x.__getitem__(y) <==> x[y]"""
+ if isinstance(index, slice):
+ return _sortedbase.__getitem__(self, index)
+ n, v = self._squeeze(index)
+ if v is None:
+ raise IndexError('list index out of range')
+ return v
+
+ def __reversed__(self):
+ """L.__reversed__() -- return a reverse iterator over the list"""
+ i = len(self._blist)-1
+ while i >= 0:
+ n, v = self._squeeze(i)
+ if not n:
+ yield v
+ i -= 1
+
+ def _advance(self, i, value):
+ "Do a linear search through all items with the same key"
+ key = self._u2key(value)
+ while i < len(self._blist):
+ n, v = self._squeeze(i)
+ if v is None:
+ break
+ if v == value:
+ return i
+ elif key < self._i2key(self._blist[i]):
+ break
+ i += 1
+ return -1
+
+class _listmixin(object):
+ def remove(self, value):
+ """L.remove(value) -- remove first occurrence of value.
+
+ Raises ValueError if the value is not present.
+ """
+
+ del self[self.index(value)]
+
+ def update(self, iterable):
+ """L.update(iterable) -- add all elements from iterable into the list"""
+
+ for item in iterable:
+ self.add(item)
+
+ def __mul__(self, k):
+ if not isinstance(k, int):
+ raise TypeError("can't multiply sequence by non-int of type '%s'"
+ % str(type(int)))
+ rv = self.__class__()
+ rv._key = self._key
+ rv._blist = sum((blist([x])*k for x in self._blist), blist())
+ return rv
+ __rmul__ = __mul__
+
+ def __imul__(self, k):
+ if not isinstance(k, int):
+ raise TypeError("can't multiply sequence by non-int of type '%s'"
+ % str(type(int)))
+ self._blist = sum((blist([x])*k for x in self._blist), blist())
+ return self
+
+ def __eq__(self, other):
+ """x.__eq__(y) <==> x==y"""
+ return self._cmp_op(other, operator.eq)
+ def __ne__(self, other):
+ """x.__ne__(y) <==> x!=y"""
+ return self._cmp_op(other, operator.ne)
+ def __lt__(self, other):
+ """x.__lt__(y) <==> x<y"""
+ return self._cmp_op(other, operator.lt)
+ def __gt__(self, other):
+ """x.__gt__(y) <==> x>y"""
+ return self._cmp_op(other, operator.gt)
+ def __le__(self, other):
+ """x.__le__(y) <==> x<=y"""
+ return self._cmp_op(other, operator.le)
+ def __ge__(self, other):
+ """x.__ge__(y) <==> x>=y"""
+ return self._cmp_op(other, operator.ge)
+
+class _setmixin(object):
+ "Methods that override our base class"
+
+ def add(self, value):
+ """Add an element to the set.
+
+ This has no effect if the element is already present.
+
+ """
+ if value in self: return
+ super(_setmixin, self).add(value)
+
+ def __iter__(self):
+ it = super(_setmixin, self).__iter__()
+ while True:
+ item = next(it)
+ n = len(self)
+ yield item
+ if n != len(self):
+ raise RuntimeError('Set changed size during iteration')
+
+def safe_cmp(f):
+ def g(self, other):
+ if not isinstance(other, collections.Set):
+ raise TypeError("can only compare to a set")
+ return f(self, other)
+ return g
+
+class _setmixin2(collections.MutableSet):
+ "methods that override or supplement the collections.MutableSet methods"
+
+ __ror__ = collections.MutableSet.__or__
+ __rand__ = collections.MutableSet.__and__
+ __rxor__ = collections.MutableSet.__xor__
+
+ if sys.version_info[0] < 3: # pragma: no cover
+ __lt__ = safe_cmp(collections.MutableSet.__lt__)
+ __gt__ = safe_cmp(collections.MutableSet.__gt__)
+ __le__ = safe_cmp(collections.MutableSet.__le__)
+ __ge__ = safe_cmp(collections.MutableSet.__ge__)
+
+ def __ior__(self, it):
+ if self is it:
+ return self
+ for value in it:
+ self.add(value)
+ return self
+
+ def __isub__(self, it):
+ if self is it:
+ self.clear()
+ return self
+ for value in it:
+ self.discard(value)
+ return self
+
+ def __ixor__(self, it):
+ if self is it:
+ self.clear()
+ return self
+ for value in it:
+ if value in self:
+ self.discard(value)
+ else:
+ self.add(value)
+ return self
+
+ def __rsub__(self, other):
+ return self._from_iterable(other) - self
+
+ def _make_set(self, iterable):
+ if isinstance(iterable, collections.Set):
+ return iterable
+ return self._from_iterable(iterable)
+
+ def difference(self, *args):
+ """Return a new set with elements in the set that are not in the others."""
+ rv = self.copy()
+ rv.difference_update(*args)
+ return rv
+
+ def intersection(self, *args):
+ """Return a new set with elements common to the set and all others."""
+ rv = self.copy()
+ rv.intersection_update(*args)
+ return rv
+
+ def issubset(self, other):
+ """Test whether every element in the set is in *other*."""
+ return self <= self._make_set(other)
+
+ def issuperset(self, other):
+ """Test whether every element in *other* is in the set."""
+ return self >= self._make_set(other)
+
+ def symmetric_difference(self, other):
+ """Return a new set with elements in either the set or *other*
+ but not both."""
+
+ return self ^ self._make_set(other)
+
+ def union(self, *args):
+ """Return the union of sets as a new set.
+
+ (i.e. all elements that are in either set.)
+
+ """
+ rv = self.copy()
+ for arg in args:
+ rv |= self._make_set(arg)
+ return rv
+
+ def update(self, *args):
+ """Update the set, adding elements from all others."""
+ for arg in args:
+ self |= self._make_set(arg)
+
+ def difference_update(self, *args):
+ """Update the set, removing elements found in others."""
+ for arg in args:
+ self -= self._make_set(arg)
+
+ def intersection_update(self, *args):
+ """Update the set, keeping only elements found in it and all others."""
+ for arg in args:
+ self &= self._make_set(arg)
+
+ def symmetric_difference_update(self, other):
+ """Update the set, keeping only elements found in either set,
+ but not in both."""
+
+ self ^= self._make_set(other)
+
+ def clear(self):
+ """Remove all elements"""
+ del self._blist[:]
+
+ def copy(self):
+ return self[:]
+
+class sortedlist(_sortedbase, _listmixin):
+ """sortedlist(iterable=(), key=None) -> new sorted list
+
+ Keyword arguments:
+ iterable -- items used to initially populate the sorted list
+ key -- a function to return the sort key of an item
+
+ A sortedlist is indexable like a list, but always keeps its
+ members in sorted order.
+
+ """
+
+ def __repr__(self):
+ """x.__repr__() <==> repr(x)"""
+ if not self: return 'sortedlist()'
+ with ReprRecursion(self) as r:
+ if r: return 'sortedlist(...)'
+ return ('sortedlist(%s)' % repr(list(self)))
+
+ def _cmp_op(self, other, op):
+ if not (isinstance(other,type(self)) or isinstance(self,type(other))):
+ return NotImplemented
+ if len(self) != len(other):
+ if op is operator.eq:
+ return False
+ if op is operator.ne:
+ return True
+ for x, y in izip(self, other):
+ if x != y:
+ return op(x, y)
+ return op in (operator.eq, operator.le, operator.ge)
+
+class weaksortedlist(_listmixin, _weaksortedbase):
+ """weaksortedlist(iterable=(), key=None) -> new sorted weak list
+
+ Keyword arguments:
+ iterable -- items used to initially populate the sorted list
+ key -- a function to return the sort key of an item
+
+ A weaksortedlist is indexable like a list, but always keeps its
+ items in sorted order. The weaksortedlist weakly references its
+ members, so items will be discarded after there is no longer a
+ strong reference to the item.
+
+ """
+
+ def __repr__(self):
+ """x.__repr__() <==> repr(x)"""
+ if not self: return 'weaksortedlist()'
+ with ReprRecursion(self) as r:
+ if r: return 'weaksortedlist(...)'
+ return 'weaksortedlist(%s)' % repr(list(self))
+
+ def _cmp_op(self, other, op):
+ if not (isinstance(other,type(self)) or isinstance(self,type(other))):
+ return NotImplemented
+ for x, y in izip(self, other):
+ if x != y:
+ return op(x, y)
+ return op in (operator.eq, operator.le, operator.ge)
+
+class sortedset(_setmixin, _sortedbase, _setmixin2):
+ """sortedset(iterable=(), key=None) -> new sorted set
+
+ Keyword arguments:
+ iterable -- items used to initially populate the sorted set
+ key -- a function to return the sort key of an item
+
+ A sortedset is similar to a set but is also indexable like a list.
+ Items are maintained in sorted order.
+
+ """
+
+ def __repr__(self):
+ """x.__repr__() <==> repr(x)"""
+ if not self: return 'sortedset()'
+ with ReprRecursion(self) as r:
+ if r: return 'sortedset(...)'
+ return ('sortedset(%s)' % repr(list(self)))
+
+class weaksortedset(_setmixin, _weaksortedbase, _setmixin2):
+ """weaksortedset(iterable=(), key=None) -> new sorted weak set
+
+ Keyword arguments:
+ iterable -- items used to initially populate the sorted set
+ key -- a function to return the sort key of an item
+
+ A weaksortedset is similar to a set but is also indexable like a
+ list. Items are maintained in sorted order. The weaksortedset
+ weakly references its members, so items will be discarded after
+ there is no longer a strong reference to the item.
+
+ """
+
+ def __repr__(self):
+ """x.__repr__() <==> repr(x)"""
+ if not self: return 'weaksortedset()'
+ with ReprRecursion(self) as r:
+ if r: return 'weaksortedset(...)'
+ return 'weaksortedset(%s)' % repr(list(self))
diff --git a/blist.egg-info/PKG-INFO b/blist.egg-info/PKG-INFO
new file mode 100644
index 0000000..a43d00d
--- /dev/null
+++ b/blist.egg-info/PKG-INFO
@@ -0,0 +1,202 @@
+Metadata-Version: 1.1
+Name: blist
+Version: 1.3.4
+Summary: a list-like type with better asymptotic performance and similar performance on small lists
+Home-page: http://stutzbachenterprises.com/blist/
+Author: Stutzbach Enterprises, LLC
+Author-email: daniel@stutzbachenterprises.com
+License: BSD
+Description: blist: a list-like type with better performance
+ ===============================================
+
+ The ``blist`` is a drop-in replacement for the Python list the provides
+ better performance when modifying large lists. The blist package also
+ provides ``sortedlist``, ``sortedset``, ``weaksortedlist``,
+ ``weaksortedset``, ``sorteddict``, and ``btuple`` types.
+
+ Full documentation is at the link below:
+
+ http://stutzbachenterprises.com/blist-doc/
+
+ Python's built-in list is a dynamically-sized array; to insert or
+ removal an item from the beginning or middle of the list, it has to
+ move most of the list in memory, i.e., O(n) operations. The blist
+ uses a flexible, hybrid array/tree structure and only needs to move a
+ small portion of items in memory, specifically using O(log n)
+ operations.
+
+ For small lists, the blist and the built-in list have virtually
+ identical performance.
+
+ To use the blist, you simply change code like this:
+
+ >>> items = [5, 6, 2]
+ >>> more_items = function_that_returns_a_list()
+
+ to:
+
+ >>> from blist import blist
+ >>> items = blist([5, 6, 2])
+ >>> more_items = blist(function_that_returns_a_list())
+
+ Here are some of the use cases where the blist asymptotically
+ outperforms the built-in list:
+
+ ========================================== ================ =========
+ Use Case blist list
+ ========================================== ================ =========
+ Insertion into or removal from a list O(log n) O(n)
+ Taking slices of lists O(log n) O(n)
+ Making shallow copies of lists O(1) O(n)
+ Changing slices of lists O(log n + log k) O(n+k)
+ Multiplying a list to make a sparse list O(log k) O(kn)
+ Maintain a sorted lists with bisect.insort O(log**2 n) O(n)
+ ========================================== ================ =========
+
+ So you can see the performance of the blist in more detail, several
+ performance graphs available at the following link:
+ http://stutzbachenterprises.com/blist/
+
+ Example usage:
+
+ >>> from blist import *
+ >>> x = blist([0]) # x is a blist with one element
+ >>> x *= 2**29 # x is a blist with > 500 million elements
+ >>> x.append(5) # append to x
+ >>> y = x[4:-234234] # Take a 500 million element slice from x
+ >>> del x[3:1024] # Delete a few thousand elements from x
+
+ Other data structures
+ ---------------------
+
+ The blist package provides other data structures based on the blist:
+
+ - sortedlist
+ - sortedset
+ - weaksortedlist
+ - weaksorteset
+ - sorteddict
+ - btuple
+
+ These additional data structures are only available in Python 2.6 or
+ higher, as they make use of Abstract Base Classes.
+
+ The sortedlist is a list that's always sorted. It's iterable and
+ indexable like a Python list, but to modify a sortedlist the same
+ methods you would use on a Python set (add, discard, or remove).
+
+ >>> from blist import sortedlist
+ >>> my_list = sortedlist([3,7,2,1])
+ >>> my_list
+ sortedlist([1, 2, 3, 7])
+ >>> my_list.add(5)
+ >>> my_list[3]
+ 5
+ >>>
+
+ The sortedlist constructor takes an optional "key" argument, which may
+ be used to change the sort order just like the sorted() function.
+
+ >>> from blist import sortedlist
+ >>> my_list = sortedlist([3,7,2,1], key=lambda i: -i)
+ sortedlist([7, 3, 2, 1]
+ >>>
+
+ The sortedset is a set that's always sorted. It's iterable and
+ indexable like a Python list, but modified like a set. Essentially,
+ it's just like a sortedlist except that duplicates are ignored.
+
+ >>> from blist import sortedset
+ >>> my_set = sortedset([3,7,2,2])
+ sortedset([2, 3, 7]
+ >>>
+
+ The weaksortedlist and weaksortedset are weakref variations of the
+ sortedlist and sortedset.
+
+ The sorteddict works just like a regular dict, except the keys are
+ always sorted. The sorteddict should not be confused with Python
+ 2.7's OrderedDict type, which remembers the insertion order of the
+ keys.
+
+ >>> from blist import sorteddict
+ >>> my_dict = sorteddict({1: 5, 6: 8, -5: 9})
+ >>> my_dict.keys()
+ [-5, 1, 6]
+ >>>
+
+ The btuple is a drop-in replacement for the built-in tuple. Compared
+ to the built-in tuple, the btuple offers the following advantages:
+
+ - Constructing a btuple from a blist takes O(1) time.
+ - Taking a slice of a btuple takes O(n) time, where n is the size of
+ the original tuple. The size of the slice does not matter.
+
+ >>> from blist import blist, btuple
+ >>> x = blist([0]) # x is a blist with one element
+ >>> x *= 2**29 # x is a blist with > 500 million elements
+ >>> y = btuple(x) # y is a btuple with > 500 million elements
+
+ Installation instructions
+ -------------------------
+
+ Python 2.5 or higher is required. If building from the source
+ distribution, the Python header files are also required. In either
+ case, just run:
+
+ python setup.py install
+
+ If you're running Linux and see a bunch of compilation errors from
+ GCC, you probably do not have the Python header files installed.
+ They're usually located in a package called something like
+ "python2.6-dev".
+
+ The blist package will be installed in the 'site-packages' directory of
+ your Python installation. (Unless directed elsewhere; see the
+ "Installing Python Modules" section of the Python manuals for details
+ on customizing installation locations, etc.).
+
+ If you downloaded the source distribution and wish to run the
+ associated test suite, you can also run:
+
+ python setup.py test
+
+ which will verify the correct installation and functioning of the
+ package. The tests require Python 2.6 or higher.
+
+ Feedback
+ --------
+
+ We're eager to hear about your experiences with the blist. You can
+ email me at daniel@stutzbachenterprises.com. Alternately, bug reports
+ and feature requests may be reported on our bug tracker at:
+ http://github.com/DanielStutzbach/blist/issues
+
+ How we test
+ -----------
+
+ In addition to the tests include in the source distribution, we
+ perform the following to add extra rigor to our testing process:
+
+ 1. We use a "fuzzer": a program that randomly generates list
+ operations, performs them using both the blist and the built-in
+ list, and compares the results.
+
+ 2. We use a modified Python interpreter where we have replaced the
+ array-based built-in list with the blist. Then, we run all of
+ the regular Python unit tests.
+
+Keywords: blist list b+tree btree fast copy-on-write sparse array sortedlist sorted sortedset weak weaksortedlist weaksortedset sorteddict btuple
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: Intended Audience :: Science/Research
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: C
+Classifier: Programming Language :: Python :: 2.5
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.1
+Classifier: Programming Language :: Python :: 3.2
+Provides: blist
diff --git a/blist.egg-info/SOURCES.txt b/blist.egg-info/SOURCES.txt
new file mode 100644
index 0000000..0dc5784
--- /dev/null
+++ b/blist.egg-info/SOURCES.txt
@@ -0,0 +1,30 @@
+LICENSE
+MANIFEST.in
+README.rst
+_blist.c
+_btuple.py
+_sorteddict.py
+_sortedlist.py
+blist.h
+blist.py
+distribute_setup.py
+setup.py
+speed_test.py
+test_blist.py
+blist.egg-info/PKG-INFO
+blist.egg-info/SOURCES.txt
+blist.egg-info/dependency_links.txt
+blist.egg-info/not-zip-safe
+blist.egg-info/top_level.txt
+prototype/blist.py
+test/__init__.py
+test/btuple_tests.py
+test/list_tests.py
+test/mapping_tests.py
+test/seq_tests.py
+test/sorteddict_tests.py
+test/sortedlist_tests.py
+test/test_list.py
+test/test_set.py
+test/test_support.py
+test/unittest.py \ No newline at end of file
diff --git a/blist.egg-info/dependency_links.txt b/blist.egg-info/dependency_links.txt
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/blist.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/blist.egg-info/not-zip-safe b/blist.egg-info/not-zip-safe
new file mode 100644
index 0000000..d3f5a12
--- /dev/null
+++ b/blist.egg-info/not-zip-safe
@@ -0,0 +1 @@
+
diff --git a/blist.egg-info/top_level.txt b/blist.egg-info/top_level.txt
new file mode 100644
index 0000000..48b0d1f
--- /dev/null
+++ b/blist.egg-info/top_level.txt
@@ -0,0 +1,5 @@
+_btuple
+blist
+_sorteddict
+_sortedlist
+_blist
diff --git a/blist.h b/blist.h
new file mode 100644
index 0000000..7476fe1
--- /dev/null
+++ b/blist.h
@@ -0,0 +1,248 @@
+
+/* List object interface */
+
+/*
+Another generally useful object type is an list of object pointers.
+This is a mutable type: the list items can be changed, and items can be
+added or removed. Out-of-range indices or non-list objects are ignored.
+
+*** WARNING *** PyList_SetItem does not increment the new item's reference
+count, but does decrement the reference count of the item it replaces,
+if not nil. It does *decrement* the reference count if it is *not*
+inserted in the list. Similarly, PyList_GetItem does not increment the
+returned item's reference count.
+*/
+
+/**********************************************************************
+ * *
+ * PLEASE READ blist.rst BEFORE MODIFYING THIS CODE *
+ * *
+ **********************************************************************/
+
+#ifndef Py_BLISTOBJECT_H
+#define Py_BLISTOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#if 0
+#define BLIST_IN_PYTHON /* Define if building BList into Python */
+#endif
+
+/* pyport.h includes similar defines, but they're broken and never use
+ * "inline" except on Windows :-( */
+#if defined(_MSC_VER)
+/* ignore warnings if the compiler decides not to inline a function */
+#pragma warning(disable: 4710)
+/* fastest possible local call under MSVC */
+#define BLIST_LOCAL(type) static type __fastcall
+#define BLIST_LOCAL_INLINE(type) static __inline type __fastcall
+#elif defined(__GNUC__)
+#if defined(__i386__)
+#define BLIST_LOCAL(type) static type __attribute__((fastcall))
+#define BLIST_LOCAL_INLINE(type) static inline __attribute__((fastcall)) type
+#else
+#define BLIST_LOCAL(type) static type
+#define BLIST_LOCAL_INLINE(type) static inline type
+#endif
+#else
+#define BLIST_LOCAL(type) static type
+#define BLIST_LOCAL_INLINE(type) static type
+#endif
+
+#ifndef LIMIT
+#define LIMIT (128) /* Good performance value */
+#if 0
+#define LIMIT (8) /* Maximum size, currently low (for test purposes) */
+#endif
+#endif
+#define HALF (LIMIT/2) /* Minimum size */
+#define MAX_HEIGHT (16) /* ceil(log(PY_SSIZE_T_MAX)/log(HALF)); */
+#if LIMIT & 1
+#error LIMIT must be divisible by 2
+#endif
+#if LIMIT < 8
+#error LIMIT must be at least 8
+#endif
+#define INDEX_FACTOR (HALF)
+
+typedef struct PyBList {
+ PyObject_HEAD
+ Py_ssize_t n; /* Total # of user-object descendents */
+ int num_children; /* Number of immediate children */
+ int leaf; /* Boolean value */
+ PyObject **children; /* Immediate children */
+} PyBList;
+
+typedef struct PyBListRoot {
+ PyObject_HEAD
+#define BLIST_FIRST_FIELD n
+ Py_ssize_t n; /* Total # of user-object descendents */
+ int num_children; /* Number of immediate children */
+ int leaf; /* Boolean value */
+ PyObject **children; /* Immediate children */
+
+ PyBList **index_list;
+ Py_ssize_t *offset_list;
+ unsigned *setclean_list; /* contains index_allocated _bits_ */
+ Py_ssize_t index_allocated;
+ Py_ssize_t *dirty;
+ Py_ssize_t dirty_length;
+ Py_ssize_t dirty_root;
+ Py_ssize_t free_root;
+
+#ifdef Py_DEBUG
+ Py_ssize_t last_n; /* For debug */
+#endif
+} PyBListRoot;
+
+#define PyBList_GET_ITEM(op, i) (((PyBList *)(op))->leaf ? (((PyBList *)(op))->children[(i)]) : _PyBList_GET_ITEM_FAST2((PyBListRoot*) (op), (i)))
+
+/************************************************************************
+ * Code used when building BList into the interpreter
+ */
+
+#ifdef BLIST_IN_PYTHON
+int PyList_Init1(void);
+int PyList_Init2(void);
+typedef PyBListRoot PyListObject;
+
+//PyAPI_DATA(PyTypeObject) PyList_Type;
+
+PyAPI_DATA(PyTypeObject) PyBList_Type;
+PyAPI_DATA(PyTypeObject) PyRootBList_Type;
+#define PyList_Type PyRootBList_Type
+
+#define PyList_Check(op) \
+ PyType_FastSubclass(Py_TYPE(op), Py_TPFLAGS_LIST_SUBCLASS)
+#define PyList_CheckExact(op) ((op)->ob_type == &PyRootBList_Type)
+
+PyAPI_FUNC(PyObject *) PyList_New(Py_ssize_t size);
+PyAPI_FUNC(Py_ssize_t) PyList_Size(PyObject *);
+PyAPI_FUNC(PyObject *) PyList_GetItem(PyObject *, Py_ssize_t);
+PyAPI_FUNC(int) PyList_SetItem(PyObject *, Py_ssize_t, PyObject *);
+PyAPI_FUNC(int) PyList_Insert(PyObject *, Py_ssize_t, PyObject *);
+PyAPI_FUNC(int) PyList_Append(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyList_GetSlice(PyObject *, Py_ssize_t, Py_ssize_t);
+PyAPI_FUNC(int) PyList_SetSlice(PyObject *, Py_ssize_t, Py_ssize_t, PyObject *);
+PyAPI_FUNC(int) PyList_Sort(PyObject *);
+PyAPI_FUNC(int) PyList_Reverse(PyObject *);
+PyAPI_FUNC(PyObject *) PyList_AsTuple(PyObject *);
+PyAPI_FUNC(PyObject *) _PyList_Extend(PyBListRoot *, PyObject *);
+
+PyAPI_FUNC(void) _PyList_SetItemFast(PyObject *, Py_ssize_t, PyObject *);
+
+/* Macro, trading safety for speed */
+#define PyList_GET_ITEM(op, i) (PyBList_GET_ITEM((op), (i)))
+//#define PyList_SET_ITEM(op, i, v) (((PyBList *)(op))->leaf ? (void) (((PyBList *)(op))->children[(i)] = (v)) : (void) _PyList_SetItemFast((PyObject *) (op), (i), (v)))
+#define PyList_SET_ITEM(self, i, v) (((PyBList *)self)->leaf ? (void) (((PyBList*)self)->children[(i)] = (v)) : (void) blist_ass_item_return2((PyBListRoot*)(self), (i), (v)))
+
+//#define PyList_GET_ITEM(op, i) PyList_GetItem((PyObject*) (op), (i))
+//#define PyList_SET_ITEM(op, i, v) _PyList_SetItemFast((PyObject *) (op), (i), (v))
+#define PyList_GET_SIZE(op) ({ assert(PyList_Check(op)); (((PyBList *)(op))->n); })
+
+#define PyList_IS_LEAF(op) ({ assert(PyList_Check(op)); (((PyBList *) (op))->leaf); })
+
+PyAPI_FUNC(PyObject *) _PyBList_GetItemFast3(PyBListRoot *, Py_ssize_t);
+
+PyAPI_FUNC(PyObject *) blist_ass_item_return_slow(PyBListRoot *root, Py_ssize_t i, PyObject *v);
+PyAPI_FUNC(PyObject *) ext_make_clean_set(PyBListRoot *root, Py_ssize_t i, PyObject *v);
+#else
+PyObject *_PyBList_GetItemFast3(PyBListRoot *, Py_ssize_t);
+PyObject *blist_ass_item_return_slow(PyBListRoot *root, Py_ssize_t i, PyObject *v);
+PyObject *ext_make_clean_set(PyBListRoot *root, Py_ssize_t i, PyObject *v);
+#endif
+
+#define INDEX_FACTOR (HALF)
+
+/* This should only be called if we know the root is not a leaf */
+/* inlining a common case for speed */
+BLIST_LOCAL_INLINE(PyObject *)
+_PyBList_GET_ITEM_FAST2(PyBListRoot *root, Py_ssize_t i)
+{
+ Py_ssize_t ioffset;
+ Py_ssize_t offset;
+ PyBList *p;
+
+ assert(!root->leaf);
+ assert(i >= 0);
+ assert(i < root->n);
+
+ if (root->dirty_root >= -1 /* DIRTY */)
+ return _PyBList_GetItemFast3(root, i);
+
+ ioffset = i / INDEX_FACTOR;
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+
+ if (i < offset + p->n)
+ return p->children[i - offset];
+ ioffset++;
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+ return p->children[i - offset];
+}
+
+#define SETCLEAN_LEN(index_allocated) ((((index_allocated)-1) >> SETCLEAN_SHIFT)+1)
+#if SIZEOF_INT == 4
+#define SETCLEAN_SHIFT (5u)
+#define SETCLEAN_MASK (0x1fu)
+#elif SIZEOF_INT == 8
+#define SETCLEAN_SHIFT (6u)
+#define SETCLEAN_MASK (0x3fu)
+#else
+#error Unknown sizeof(unsigned)
+#endif
+
+#define SET_BIT(setclean_list, i) (setclean_list[(i) >> SETCLEAN_SHIFT] |= (1u << ((i) & SETCLEAN_MASK)))
+#define CLEAR_BIT(setclean_list, i) (setclean_list[(i) >> SETCLEAN_SHIFT] &= ~(1u << ((i) & SETCLEAN_MASK)))
+#define GET_BIT(setclean_list, i) (setclean_list[(i) >> SETCLEAN_SHIFT] & (1u << ((i) & SETCLEAN_MASK)))
+
+BLIST_LOCAL_INLINE(PyObject *)
+blist_ass_item_return2(PyBListRoot *root, Py_ssize_t i, PyObject *v)
+{
+ PyObject *rv;
+ Py_ssize_t offset;
+ PyBList *p;
+ Py_ssize_t ioffset = i / INDEX_FACTOR;
+
+ assert(i >= 0);
+ assert(i < root->n);
+ assert(!root->leaf);
+
+ if (root->dirty_root >= -1 /* DIRTY */
+ || !GET_BIT(root->setclean_list, ioffset))
+ return blist_ass_item_return_slow(root, i, v);
+
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+ assert(i >= offset);
+ assert(p);
+ assert(p->leaf);
+ if (i < offset + p->n) {
+ good:
+ /* Py_REFCNT(p) == 1, generally, but see comment in
+ * blist_ass_item_return_slow for caveats */
+ rv = p->children[i - offset];
+ p->children[i - offset] = v;
+ } else if (!GET_BIT(root->setclean_list, ioffset+1)) {
+ return ext_make_clean_set(root, i, v);
+ } else {
+ ioffset++;
+ assert(ioffset < root->index_allocated);
+ offset = root->offset_list[ioffset];
+ p = root->index_list[ioffset];
+ assert(p);
+ assert(p->leaf);
+ assert(i < offset + p->n);
+
+ goto good;
+ }
+
+ return rv;
+}
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_BLISTOBJECT_H */
diff --git a/blist.py b/blist.py
new file mode 100644
index 0000000..efe3ff2
--- /dev/null
+++ b/blist.py
@@ -0,0 +1,8 @@
+from _blist import *
+import collections
+if hasattr(collections, 'MutableSet'): # Only supported in Python 2.6+
+ from _sortedlist import sortedlist, sortedset, weaksortedlist, weaksortedset
+ from _sorteddict import sorteddict
+ from _btuple import btuple
+ collections.MutableSequence.register(blist)
+del collections
diff --git a/distribute_setup.py b/distribute_setup.py
new file mode 100644
index 0000000..4f7bd08
--- /dev/null
+++ b/distribute_setup.py
@@ -0,0 +1,481 @@
+#!python
+"""Bootstrap distribute installation
+
+If you want to use setuptools in your package's setup.py, just include this
+file in the same directory with it, and add this to the top of your setup.py::
+
+ from distribute_setup import use_setuptools
+ use_setuptools()
+
+If you want to require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, you can do so by supplying
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import sys
+import time
+import fnmatch
+import tempfile
+import tarfile
+from distutils import log
+
+try:
+ from site import USER_SITE
+except ImportError:
+ USER_SITE = None
+
+try:
+ import subprocess
+
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ return subprocess.call(args) == 0
+
+except ImportError:
+ # will be used for python 2.3
+ def _python_cmd(*args):
+ args = (sys.executable,) + args
+ # quoting arguments if windows
+ if sys.platform == 'win32':
+ def quote(arg):
+ if ' ' in arg:
+ return '"%s"' % arg
+ return arg
+ args = [quote(arg) for arg in args]
+ return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
+
+DEFAULT_VERSION = "0.6.12"
+DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
+SETUPTOOLS_FAKED_VERSION = "0.6c11"
+
+SETUPTOOLS_PKG_INFO = """\
+Metadata-Version: 1.0
+Name: setuptools
+Version: %s
+Summary: xxxx
+Home-page: xxx
+Author: xxx
+Author-email: xxx
+License: xxx
+Description: xxx
+""" % SETUPTOOLS_FAKED_VERSION
+
+
+def _install(tarball):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # installing
+ log.warn('Installing Distribute')
+ if not _python_cmd('setup.py', 'install'):
+ log.warn('Something went wrong during the installation.')
+ log.warn('See the error message above.')
+ finally:
+ os.chdir(old_wd)
+
+
+def _build_egg(egg, tarball, to_dir):
+ # extracting the tarball
+ tmpdir = tempfile.mkdtemp()
+ log.warn('Extracting in %s', tmpdir)
+ old_wd = os.getcwd()
+ try:
+ os.chdir(tmpdir)
+ tar = tarfile.open(tarball)
+ _extractall(tar)
+ tar.close()
+
+ # going in the directory
+ subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+ os.chdir(subdir)
+ log.warn('Now working in %s', subdir)
+
+ # building an egg
+ log.warn('Building a Distribute egg in %s', to_dir)
+ _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+ finally:
+ os.chdir(old_wd)
+ # returning the result
+ log.warn(egg)
+ if not os.path.exists(egg):
+ raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+ egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
+ % (version, sys.version_info[0], sys.version_info[1]))
+ if not os.path.exists(egg):
+ tarball = download_setuptools(version, download_base,
+ to_dir, download_delay)
+ _build_egg(egg, tarball, to_dir)
+ sys.path.insert(0, egg)
+ import setuptools
+ setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, download_delay=15, no_fake=True):
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ was_imported = 'pkg_resources' in sys.modules or \
+ 'setuptools' in sys.modules
+ try:
+ try:
+ import pkg_resources
+ if not hasattr(pkg_resources, '_distribute'):
+ if not no_fake:
+ _fake_setuptools()
+ raise ImportError
+ except ImportError:
+ return _do_download(version, download_base, to_dir, download_delay)
+ try:
+ pkg_resources.require("distribute>="+version)
+ return
+ except pkg_resources.VersionConflict:
+ e = sys.exc_info()[1]
+ if was_imported:
+ sys.stderr.write(
+ "The required version of distribute (>=%s) is not available,\n"
+ "and can't be installed while this script is running. Please\n"
+ "install a more recent version first, using\n"
+ "'easy_install -U distribute'."
+ "\n\n(Currently using %r)\n" % (version, e.args[0]))
+ sys.exit(2)
+ else:
+ del pkg_resources, sys.modules['pkg_resources'] # reload ok
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ except pkg_resources.DistributionNotFound:
+ return _do_download(version, download_base, to_dir,
+ download_delay)
+ finally:
+ if not no_fake:
+ _create_fake_setuptools_pkg_info(to_dir)
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+ to_dir=os.curdir, delay=15):
+ """Download distribute from a specified location and return its filename
+
+ `version` should be a valid distribute version number that is available
+ as an egg for download under the `download_base` URL (which should end
+ with a '/'). `to_dir` is the directory where the egg will be downloaded.
+ `delay` is the number of seconds to pause before an actual download
+ attempt.
+ """
+ # making sure we use the absolute path
+ to_dir = os.path.abspath(to_dir)
+ try:
+ from urllib.request import urlopen
+ except ImportError:
+ from urllib2 import urlopen
+ tgz_name = "distribute-%s.tar.gz" % version
+ url = download_base + tgz_name
+ saveto = os.path.join(to_dir, tgz_name)
+ src = dst = None
+ if not os.path.exists(saveto): # Avoid repeated downloads
+ try:
+ log.warn("Downloading %s", url)
+ src = urlopen(url)
+ # Read/write all in one block, so we don't create a corrupt file
+ # if the download is interrupted.
+ data = src.read()
+ dst = open(saveto, "wb")
+ dst.write(data)
+ finally:
+ if src:
+ src.close()
+ if dst:
+ dst.close()
+ return os.path.realpath(saveto)
+
+def _no_sandbox(function):
+ def __no_sandbox(*args, **kw):
+ try:
+ from setuptools.sandbox import DirectorySandbox
+ if not hasattr(DirectorySandbox, '_old'):
+ def violation(*args):
+ pass
+ DirectorySandbox._old = DirectorySandbox._violation
+ DirectorySandbox._violation = violation
+ patched = True
+ else:
+ patched = False
+ except ImportError:
+ patched = False
+
+ try:
+ return function(*args, **kw)
+ finally:
+ if patched:
+ DirectorySandbox._violation = DirectorySandbox._old
+ del DirectorySandbox._old
+
+ return __no_sandbox
+
+@_no_sandbox
+def _patch_file(path, content):
+ """Will backup the file then patch it"""
+ existing_content = open(path).read()
+ if existing_content == content:
+ # already patched
+ log.warn('Already patched.')
+ return False
+ log.warn('Patching...')
+ _rename_path(path)
+ f = open(path, 'w')
+ try:
+ f.write(content)
+ finally:
+ f.close()
+ return True
+
+
+def _same_content(path, content):
+ return open(path).read() == content
+
+def _rename_path(path):
+ new_name = path + '.OLD.%s' % time.time()
+ log.warn('Renaming %s into %s', path, new_name)
+ os.rename(path, new_name)
+ return new_name
+
+@_no_sandbox
+def _remove_flat_installation(placeholder):
+ if not os.path.isdir(placeholder):
+ log.warn('Unkown installation at %s', placeholder)
+ return False
+ found = False
+ for file in os.listdir(placeholder):
+ if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
+ found = True
+ break
+ if not found:
+ log.warn('Could not locate setuptools*.egg-info')
+ return
+
+ log.warn('Removing elements out of the way...')
+ pkg_info = os.path.join(placeholder, file)
+ if os.path.isdir(pkg_info):
+ patched = _patch_egg_dir(pkg_info)
+ else:
+ patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
+
+ if not patched:
+ log.warn('%s already patched.', pkg_info)
+ return False
+ # now let's move the files out of the way
+ for element in ('setuptools', 'pkg_resources.py', 'site.py'):
+ element = os.path.join(placeholder, element)
+ if os.path.exists(element):
+ _rename_path(element)
+ else:
+ log.warn('Could not find the %s element of the '
+ 'Setuptools distribution', element)
+ return True
+
+
+def _after_install(dist):
+ log.warn('After install bootstrap.')
+ placeholder = dist.get_command_obj('install').install_purelib
+ _create_fake_setuptools_pkg_info(placeholder)
+
+@_no_sandbox
+def _create_fake_setuptools_pkg_info(placeholder):
+ if not placeholder or not os.path.exists(placeholder):
+ log.warn('Could not find the install location')
+ return
+ pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
+ setuptools_file = 'setuptools-%s-py%s.egg-info' % \
+ (SETUPTOOLS_FAKED_VERSION, pyver)
+ pkg_info = os.path.join(placeholder, setuptools_file)
+ if os.path.exists(pkg_info):
+ log.warn('%s already exists', pkg_info)
+ return
+
+ log.warn('Creating %s', pkg_info)
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+
+ pth_file = os.path.join(placeholder, 'setuptools.pth')
+ log.warn('Creating %s', pth_file)
+ f = open(pth_file, 'w')
+ try:
+ f.write(os.path.join(os.curdir, setuptools_file))
+ finally:
+ f.close()
+
+@_no_sandbox
+def _patch_egg_dir(path):
+ # let's check if it's already patched
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ if os.path.exists(pkg_info):
+ if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
+ log.warn('%s already patched.', pkg_info)
+ return False
+ _rename_path(path)
+ os.mkdir(path)
+ os.mkdir(os.path.join(path, 'EGG-INFO'))
+ pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
+ f = open(pkg_info, 'w')
+ try:
+ f.write(SETUPTOOLS_PKG_INFO)
+ finally:
+ f.close()
+ return True
+
+
+def _before_install():
+ log.warn('Before install bootstrap.')
+ _fake_setuptools()
+
+
+def _under_prefix(location):
+ if 'install' not in sys.argv:
+ return True
+ args = sys.argv[sys.argv.index('install')+1:]
+ for index, arg in enumerate(args):
+ for option in ('--root', '--prefix'):
+ if arg.startswith('%s=' % option):
+ top_dir = arg.split('root=')[-1]
+ return location.startswith(top_dir)
+ elif arg == option:
+ if len(args) > index:
+ top_dir = args[index+1]
+ return location.startswith(top_dir)
+ elif option == '--user' and USER_SITE is not None:
+ return location.startswith(USER_SITE)
+ return True
+
+
+def _fake_setuptools():
+ log.warn('Scanning installed packages')
+ try:
+ import pkg_resources
+ except ImportError:
+ # we're cool
+ log.warn('Setuptools or Distribute does not seem to be installed.')
+ return
+ ws = pkg_resources.working_set
+ try:
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
+ replacement=False))
+ except TypeError:
+ # old distribute API
+ setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
+
+ if setuptools_dist is None:
+ log.warn('No setuptools distribution found')
+ return
+ # detecting if it was already faked
+ setuptools_location = setuptools_dist.location
+ log.warn('Setuptools installation detected at %s', setuptools_location)
+
+ # if --root or --preix was provided, and if
+ # setuptools is not located in them, we don't patch it
+ if not _under_prefix(setuptools_location):
+ log.warn('Not patching, --root or --prefix is installing Distribute'
+ ' in another location')
+ return
+
+ # let's see if its an egg
+ if not setuptools_location.endswith('.egg'):
+ log.warn('Non-egg installation')
+ res = _remove_flat_installation(setuptools_location)
+ if not res:
+ return
+ else:
+ log.warn('Egg installation')
+ pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
+ if (os.path.exists(pkg_info) and
+ _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
+ log.warn('Already patched.')
+ return
+ log.warn('Patching...')
+ # let's create a fake egg replacing setuptools one
+ res = _patch_egg_dir(setuptools_location)
+ if not res:
+ return
+ log.warn('Patched done.')
+ _relaunch()
+
+
+def _relaunch():
+ log.warn('Relaunching...')
+ # we have to relaunch the process
+ args = [sys.executable] + sys.argv
+ sys.exit(subprocess.call(args))
+
+
+def _extractall(self, path=".", members=None):
+ """Extract all members from the archive to the current working
+ directory and set owner, modification time and permissions on
+ directories afterwards. `path' specifies a different directory
+ to extract to. `members' is optional and must be a subset of the
+ list returned by getmembers().
+ """
+ import copy
+ import operator
+ from tarfile import ExtractError
+ directories = []
+
+ if members is None:
+ members = self
+
+ for tarinfo in members:
+ if tarinfo.isdir():
+ # Extract directories with a safe mode.
+ directories.append(tarinfo)
+ tarinfo = copy.copy(tarinfo)
+ tarinfo.mode = 448 # decimal for oct 0700
+ self.extract(tarinfo, path)
+
+ # Reverse sort directories.
+ if sys.version_info < (2, 4):
+ def sorter(dir1, dir2):
+ return cmp(dir1.name, dir2.name)
+ directories.sort(sorter)
+ directories.reverse()
+ else:
+ directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+ # Set correct owner, mtime and filemode on directories.
+ for tarinfo in directories:
+ dirpath = os.path.join(path, tarinfo.name)
+ try:
+ self.chown(tarinfo, dirpath)
+ self.utime(tarinfo, dirpath)
+ self.chmod(tarinfo, dirpath)
+ except ExtractError:
+ e = sys.exc_info()[1]
+ if self.errorlevel > 1:
+ raise
+ else:
+ self._dbg(1, "tarfile: %s" % e)
+
+
+def main(argv, version=DEFAULT_VERSION):
+ """Install or upgrade setuptools and EasyInstall"""
+ tarball = download_setuptools()
+ _install(tarball)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/prototype/blist.py b/prototype/blist.py
new file mode 100755
index 0000000..1849348
--- /dev/null
+++ b/prototype/blist.py
@@ -0,0 +1,2116 @@
+#!/usr/bin/python
+
+"""
+
+Copyright 2007 Stutzbach Enterprises, LLC (daniel@stutzbachenterprises.com)
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ 3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+Motivation and Design Goals
+---------------------------
+
+The goal of this module is to provide a list-like type that has
+better asymptotic performance than Python lists, while maintaining
+similar performance for lists with few items.
+
+I was driven to write this type by the work that I do. I frequently
+need to work with large lists, and run into efficiency problems when I
+need to insert or delete elements in the middle of the list. I'd like
+a type that looks, acts, and quacks like a Python list while offering
+good asymptotic performance for all common operations.
+
+A could make a type that has good asymptotic performance, but poor
+relative performance on small lists. That'd be pretty easy to achieve
+with, say, red-black trees. While sometimes I do need good asymptotic
+performance, other times I need the speed of Python's array-based
+lists for operating on a small list in a tight loop. I don't want to
+have to think about which one to use. I want one type with good
+performance in both cases.
+
+In other words, it should "just work".
+
+I don't propose replacing the existing Python list implementation. I
+am neither that ambitious, and I appreciate how tightly optimized and
+refined the existing list implementation is. I would like to see this
+type someday included in Python's collections module, so users with
+similar needs can make use of it.
+
+The data structure I've created to solve the problem is a variation of
+a B+Tree, hence I call it the "BList". It has good asymptotic
+performance for all operations, even for some operations you'd expect
+to still be O(N). For example:
+
+ >>> from blist import BList
+ >>> n = 10000000 # n = 10 million
+ >>> b = BList([0]) # O(1)
+ >>> bigb = b * n # O(log n)
+ >>> bigb2 = bigb[1:-1] # O(log n)
+ >>> del bigb2[5000000] # O(log n)
+
+With BLists, even taking a slice (line 4) takes O(log n) time. This
+wonderful feature is because BLists can implement copy-on-write. More
+on that later.
+
+Thus far, I have only implemented a Python version of BList, as a
+working prototype. Obviously, the Python implementation isn't very
+efficient at all, but it serves to illustrate the important algorithms.
+Later, I plan to implement a C version, which should have comparable
+performance to ordinary Python lists when operating on small lists.
+The Python version of BLists only outperforms Python lists when the
+lists are VERY large.
+
+Basic Idea
+----------
+
+BLists are based on B+Trees are a dictionary data structure where each
+element is a (key, value) pair and the keys are kept in sorted order.
+B+Trees internally use a tree representation. All data is stored in
+the leaf nodes of the tree, and all leaf nodes are at the same level.
+Unlike binary trees, each node has a large number of children, stored
+as an array of references within the node. The B+Tree operations ensure
+that each node always has between "limit/2" and "limit" children
+(except the root which may have between 0 and "limit" children). When
+a B+Tree has fewer than "limit/2" elements, they will all be contained
+in a single node (the root).
+
+Wikipedia has a diagram that may be helpful for understanding the
+basic structure of a B+Tree:
+ http://en.wikipedia.org/wiki/B+_tree
+
+Of course, we don't want a dictionary. We want a list.
+
+In BLists, the "key" is implicit: it's the in-order location of the value.
+Instead of keys, each BList node maintains a count of the total number
+of data elements beneath it in the tree. This allows walking the tree
+efficiently by keeping track of how far we've moved when passing by a
+child node. The tree structure gives us O(log n) for most operations,
+asymptotically.
+
+When the BList has fewer than "limit/2" data elements, they are all
+stored in the root node. In other words, for small lists a BList
+essentially reduces to an array. It should have almost identical
+performance to a regular Python list, as only one or two extra if()
+statements will be needed per method call.
+
+Adding elements
+---------------
+
+Elements are inserted recursively. Each node determines which child
+node contains the insertion point, and calls the insertion routine of
+that child.
+
+When we add elements to a BList node, the node may overflow (i.e.,
+have more than "limit" elements). Instead of overflowing, the node
+creates a new BList node and gives half of its elements to the new
+node. When the inserting function returns, the function informs its
+parent about the new sibling. This causes the parent to add the new
+node as a child. If this causes the parent to overflow, it creates a
+sibling of its own, notifies its parent, and so on.
+
+When the root of the tree overflows, it must increase the depth of the
+tree. The root creates two new children and splits all of its former
+references between these two children (i.e., all former children are now
+grandchildren).
+
+Removing an element
+-------------------
+
+Removing an element is also done recursively. Each node determines
+which child node contains the element to be removed, and calls the
+removal routine of that child.
+
+Removing an element may cause an underflow (i.e., fewer than "limit/2"
+elements). It's the parent's job to check if a child has underflowed
+after any operation that might cause an underflow. The parent must
+then repair the child, either by borrowing elements from one of the
+child's sibling or merging the child with one of its sibling. It the
+parent performs a merge, this may also cause its parent to underflow.
+
+If a node has only one element, the tree collapses. The node replaces
+its one child with its grandchildren. When removing a single element,
+this can only happen at the root.
+
+Removing a range
+----------------
+
+The __delslice__ method to remove a range of elements is the most
+complex operation for a BList to perform. The first step is to locate
+the common parent of all the elements to be removed. The parent
+deletes any children who will be completely deleted (i.e., they are
+entirely within the range to be deleted). The parent also has to deal
+with two children who may be partially deleted: they contain the left
+and right boundaries of the deletion range.
+
+The parent calls the deletion operation recursively on these two
+children. When the call returns, the children must return a valid
+BList, but they may be in an underflow state, and, worse, they may
+have needed to collapse the tree. To make life a little easier, the
+children return an integer indicating how many levels of the tree
+collapsed (if any). The parent now has two adjacent subtrees of
+different heights that need to be put back into the main tree (to keep
+it balanced).
+
+To accomplish this goal, we use a merge-tree operation, defined below.
+The parent merges the two adjacent subtrees into a single subtree,
+then merges the subtree with one of its other children. If it has no
+other children, then the parent collapses to become the subtree and
+indicates to its parent the total level of collapse.
+
+Merging subtrees
+----------------
+
+The __delslice__ method needs a way to merge two adjacent subtrees of
+potentially different heights. Because we only need to merge *adjacent*
+subtrees, we don't have to handle inserting a subtree into the middle of
+another. There are only two cases: the far-left and the far-right. If
+the two subtrees are the same height, this is a pretty simple operation where
+we join their roots together. If the trees are different heights, we
+merge the smaller into the larger as follows. Let H be the difference
+in their heights. Then, recurse through the larger tree by H levels
+and insert the smaller subtree there.
+
+Retrieving a range and copy-on-write
+------------------------------------
+
+One of the most powerful features of BLists is the ability to support
+copy-on-write. Thus far we have described a BLists as a tree
+structure where parents contain references to their children. None of
+the basic tree operations require the children to maintain references
+to their parents or siblings. Therefore, it is possible for a child
+to have *multiple parents*. The parents can happily share the child
+as long as they perform read-only operations on it. If a parent wants
+to modify a child in any way, it first checks the child's reference
+count. If it is 1, the parent has the only reference and can proceed.
+Otherwise, the parent must create a copy of the child, and relinquish
+its reference to the child.
+
+Creating a copy of a child doesn't implicitly copy the child's
+subtree. It just creates a new node with a new reference to the
+child. In other words, the child and the copy are now joint parents
+of their children.
+
+This assumes that no other code will gain references to internal BList
+nodes. The internal nodes are never exposed to the user, so this is a
+safe assumption. In the worst case, if the user manages to gain a
+reference to an internal BList node (such as through the gc module),
+it will just prevent the BList code from modifying that node. It will
+create a copy instead. User-visible nodes (i.e., the root of a tree)
+have no parents and are never shared children.
+
+Why is this copy-on-write operation so useful?
+
+Consider the common idiom of performing an operation on a slice of a
+list. Normally, this requires making a copy of that region of the
+list, which is expensive if the region is large. With copy-on-write,
+__getslice__ takes logarithmic time and logarithmic memory.
+
+As a fun but slightly less practical example, ever wanted to make
+REALLY big lists? Copy-on-write also allows for a logarithmic time
+and logarithmic memory implementation of __mul__.
+
+>>> little_list = BList([0])
+>>> big_list = little_list * 2**512 <-- 220 milliseconds
+>>> print big_list.__len__()
+13407807929942597099574024998205846127479365820592393377723561443721764030073546976801874298166903427690031858186486050853753882811946569946433649006084096
+
+(iterating over big_list is not recommended)
+
+Comparison of cost of operations with list()
+---------------------------------------------
+
+n is the size of "self", k is the size of the argument. For slice
+operations, k is the length of the slice. For __mul__, k is the value
+of the argument.
+
+ Operation list BList
+--------------- ------------ -----------------------
+init from seq O(k) O(k)
+copy O(k) O(1)
+append O(1) O(log n)
+insert O(n) O(log n)
+__mul__ O(n*k) O(log k)
+__delitem__ O(n) O(log n)
+__len__ O(1) O(1)
+iteration O(n) O(n)
+__getslice__ O(k) O(log n)
+__delslice__ O(n) O(log n + k)
+__setslice__ O(n+k) O(log n + log k) [1]
+extend O(k) O(log n + log k) [1]
+__sort__ O(n*log n) O(n*log n) [2]
+index O(k) O(log n + k)
+remove O(n) O(n)
+count O(n) O(n)
+extended slicing O(k) O(k*log n)
+__cmp__ O(min(n,k)) O(min(n,k))
+
+[1]: Plus O(k) if the sequence being added is not also a BList
+[2]: list.__sort__ requires O(n) worst-case extra memory, while BList.__sort
+ requires only (log n) extra memory
+
+For BLists smaller than "limit" elements, each operation essentially
+reduces to the equivalent list operation, so there is little-to-no
+overhead for the common case of small lists.
+
+
+Implementation Details
+======================
+
+Structure
+---------
+
+Each node has four member variables:
+
+leaf: true if this node is a leaf node (has user data as children),
+ false if this node is an interior node (has other nodes as children)
+
+children: an array of references to the node's children
+
+n: the total number of user data elements below the node.
+ equal to len(children) for leaf nodes
+
+refcount: None for a root node,
+ otherwise, the number of other nodes with references to this node
+ (i.e., parents)
+
+Global Constants
+----------------
+
+limit: the maximum size of .children, must be even and >= 8
+half: limit//2, the minimum size of .children for a valid node,
+ other than the root
+
+Definitions
+-----------
+
+- The only user-visible node is the root node.
+- All leaf nodes are at the same height in the tree.
+- If the root node has exactly one child, the root node must be a leaf node.
+- Nodes never maintain references to their parents or siblings, only to
+ their children.
+- Users call methods of the user-node, which may call methods of its
+ children, who may call their children recursively.
+- A node's user-visible elements are numbered from 0 to self.n-1. These are
+ called "positions".
+- A node's children are numbered 0 to len(self.children)-1. These are
+ called "indexes" and should not be confused with positions.
+
+- Completely private functions (called via self.) may temporarily
+ violate these invariants.
+- Functions exposed to the user must ensure these invariants are true
+ when they return.
+- Some functions are visible to the parent of a child node. When
+ these functions return, the invariants must be true as if the child
+ were a root node.
+
+Conventions
+-----------
+
+- Function that may be called by either users or the object's parent
+ either do not begin with underscores, or they begin and end with __.
+- A function that may only be called by the object itself begins with
+ __ and do not end with underscores.
+- Functions that may be called by an object's parent, but not by the user,
+ begin with a single underscore.
+
+Other rules
+-----------
+
+- If a function may cause a BList to overflow, the function has the
+ following return types:
+ - None, if the BList did not overflow
+ - Otherwise, a valid BList subtree containing a new right-hand sibling
+ for the BList that was called.
+- BList objects may modify their children if the child's .refcount is
+ 1. If the .refcount is greater than 1, the child is shared by another
+ parent. The object must copy the child, decrement the child's reference
+ counter, and modify the copy instead.
+- If an interior node has only one child, before returning it must
+ collapse the tree so it takes on the properties of its child. This
+ may mean the interior node becomes a leaf.
+- An interior node may return with no children. The parent must then
+ remove the interior node from the parent's children.
+- If a function may cause an interior node to collapse, it must have
+ the following return types:
+ - 0, if the BList did not collapse, or if it is now empty (self.n == 0)
+ - A positive integer indicating how many layers have collapsed (i.e., how
+ much shorter the subtree is compared to before the function call).
+- If a user-visible function does not modify the BList, the BList's
+ internal structure must not change. This is important for
+ supporting iterators.
+
+Observations
+------------
+
+- User-nodes always have a refcount of at least 1
+- User-callable methods may not cause the reference counter to decrement.
+- If a parent calls a child's method that may cause the child to
+ underflow, the parent must detect the underflow and merge the child
+ before returning.
+
+Pieces not implemented here that will be needed in a C version
+--------------------------------------------------------------
+
+- __deepcopy__
+- support for pickling
+- container-type support for the garbage collector
+
+Suspected Bugs:
+ - None currently, but needs more testing
+ - Passes test_list.py :-)
+
+User-visible Differences from list():
+ - If you modify the list in the middle of an iteration and continue
+ to iterate, the behavior is different. BList iteration could be
+ implemented the same way as in list, but then iteration would have
+ O(n * log n) cost instead of O(n). I'm okay with the way it is.
+
+Miscellaneous:
+ - All of the reference counter stuff is redundant with the reference
+ counting done internally on Python objects. In C we can just peak
+ at the reference counter stored in all Python objects.
+
+"""
+
+import copy, types
+from itertools import *
+
+########################################################################
+# Global constants
+
+limit = 8 # Maximum size, currently low (for testing purposes)
+half = limit//2 # Minimum size
+assert limit % 2 == 0 # Must be divisible by 2
+assert limit >= 8 # The code assumes each block is at least this big
+
+PARANOIA = 2 # Checks reference counters
+DEBUG = 1 # Checks correctness
+NO_DEBUG = 0 # No checking
+
+debugging_level = NO_DEBUG
+
+leaked_reference = False
+
+########################################################################
+# Simulate utility functions from the Python C API. These functions
+# help us detect the case where we have a self-referential list and a
+# user has asked us to print it...
+Py_Repr = []
+def Py_ReprEnter(obj):
+ if obj in Py_Repr: return 1
+ Py_Repr.append(obj)
+ return 0
+
+def Py_ReprLeave(obj):
+ for i in range(len(Py_Repr)-1,-1,-1):
+ if Py_Repr[i] == obj:
+ del Py_Repr[i]
+ break
+
+# Needed for sort
+builtin_cmp = cmp
+
+########################################################################
+# Decorators are for error-checking and code clarity. They verify
+# (most of) the invariances given above. They're replaced with no_op()
+# if debugging_level == NO_DEBUG.
+
+def modifies_self(f):
+ "Decorator for member functions which require write access to self"
+ def g(self, *args, **kw):
+ assert self.refcount == 1 or self.refcount is None
+ rv = f(self, *args, **kw)
+ assert self.refcount == 1 or not self.refcount, self.refcount
+ return rv
+ return g
+
+def parent_callable(f):
+ "Indicates the member function may be called by the BList's parent"
+ def g(self, *args, **kw):
+ #self._check_invariants()
+ rv = f(self, *args, **kw)
+ self._check_invariants()
+ return rv
+ return g
+
+def user_callable(f):
+ "Indicates a user callable function"
+ def g(self, *args, **kw):
+ assert self.refcount >= 1 or self.refcount is None
+ refs = self.refcount
+ self._check_invariants()
+ rv = f(self, *args, **kw)
+ assert self.refcount == refs
+ self._check_invariants()
+ return rv
+ return g
+
+def may_overflow(f):
+ "Indicates the member function may cause an overflow"
+ def g(self, *args, **kw):
+ rv = f(self, *args, **kw)
+ if rv is not None:
+ assert isinstance(rv, BList)
+ rv._check_invariants()
+ self._check_invariants()
+ return rv
+ return g
+
+def may_collapse(f):
+ "Indicates the member function may collapse the subtree"
+ def g(self, *args, **kw):
+ #height1 = self._get_height() ## Not reliable just before collapse
+ rv = f(self, *args, **kw)
+ #height2 = self._get_height()
+ assert isinstance(rv, int) and rv >= 0
+ self._check_invariants()
+ return rv
+ return g
+
+def no_op(f):
+ return f
+
+if debugging_level == 0:
+ modifies_self = no_op
+ parent_callable = no_op
+ user_callable = no_op
+ may_overflow = no_op
+ may_collapse = no_op
+
+########################################################################
+# Utility functions and decorators for fixing up index parameters.
+
+def sanify_index(n, i):
+ if isinstance(i, slice): return i
+ if i < 0:
+ i += n
+ return i
+
+def strong_sanify_index(n, i):
+ if isinstance(i, slice): return i
+ if i < 0:
+ i += n
+ if i < 0:
+ i = 0
+ elif i > n:
+ i = n
+ return i
+
+def allow_negative1(f):
+ "Decarator for allowing a negative position as the first argument"
+ def g(self, i, *args, **kw):
+ i = sanify_index(self.n, i)
+ return f(self, i, *args, **kw)
+ return g
+
+def allow_negative2(f):
+ "Decarator for allowing a negative position as the 1st and 2nd args"
+ def g(self, i, j, *args, **kw):
+ i = sanify_index(self.n, i)
+ j = sanify_index(self.n, j)
+ return f(self, i, j, *args, **kw)
+ return g
+
+########################################################################
+# An extra constructor and the main class
+
+def _BList(other=[]):
+ "Create a new BList for internal use"
+
+ self = BList(other)
+ self.refcount = 1
+ return self
+
+class BList(object):
+ __slots__ = ('leaf', 'children', 'n', 'refcount')
+
+ def _check_invariants(self):
+ if debugging_level == NO_DEBUG: return
+ try:
+ if debugging_level == PARANOIA:
+ self.__check_reference_count()
+ if self.leaf:
+ assert self.n == len(self.children)
+ else:
+ assert self.n == sum(child.n for child in self.children)
+ assert len(self.children) > 1 or len(self.children) == 0, len(self.children)
+ for child in self.children:
+ assert isinstance(child, BList)
+ assert half <= len(child.children) <= limit
+ assert self.refcount >= 1 or self.refcount is None \
+ or (self.refcount == 0 and not self.children)
+ except:
+ print self.debug()
+ raise
+
+ def _check_invariants_r(self):
+ if debugging_level == NO_DEBUG: return
+ self._check_invariants()
+ if self.leaf: return
+ for c in self.children:
+ c._check_invariants_r()
+
+ def __init__(self, seq=[]):
+ self.leaf = True
+
+ # Points to children
+ self.children = []
+
+ # Number of leaf elements that are descendents of this node
+ self.n = 0
+
+ # User visible objects have a refcount of None
+ self.refcount = None
+
+ # We can copy other BLists in O(1) time :-)
+ if isinstance(seq, BList):
+ self.__become(seq)
+ self._check_invariants()
+ return
+
+ self.__init_from_seq(seq)
+
+ ####################################################################
+ # Useful internal utility functions
+
+ @modifies_self
+ def __become(self, other):
+ "Turns self into a clone of other"
+
+ if id(self) == id(other):
+ self._adjust_n()
+ return
+ if not other.leaf:
+ for child in other.children:
+ child._incref()
+ if other.refcount is not None:
+ other._incref() # Other may be one of our children
+ self.__forget_children()
+ self.n = other.n
+ self.children[:] = other.children
+ self.leaf = other.leaf
+ if other.refcount is not None:
+ other._decref()
+
+ @parent_callable
+ @modifies_self
+ def _adjust_n(self):
+ "Recompute self.n"
+ if self.leaf:
+ self.n = len(self.children)
+ else:
+ self.n = sum(x.n for x in self.children)
+
+ @parent_callable
+ def _locate(self, i):
+ """We are searching for the child that contains leaf element i.
+
+ Returns a 3-tuple: (the child object, our index of the child,
+ the number of leaf elements before the child)
+ """
+ if self.leaf:
+ return self.children[i], i, i
+
+ so_far = 0
+ for k in range(len(self.children)):
+ p = self.children[k]
+ if i < so_far + p.n:
+ return p, k, so_far
+ so_far += p.n
+ else:
+ return self.children[-1], len(self.children)-1, so_far - p.n
+
+ def __get_reference_count(self):
+ "Figure out how many parents we have"
+ import gc
+
+ # Count the number of times we are pointed to by a .children
+ # list of a BList
+
+ gc.collect()
+ objs = gc.get_referrers(self)
+ total = 0
+ for obj in objs:
+ if isinstance(obj, list):
+ # Could be a .children
+ objs2 = gc.get_referrers(obj)
+ for obj2 in objs2:
+ # Could be a BList
+ if isinstance(obj2, BList):
+ total += len([x for x in obj2.children if x is self])
+ return total
+
+ def __check_reference_count(self):
+ "Validate that we're counting references properly"
+ total = self.__get_reference_count()
+
+ if self.refcount is not None:
+ # The caller may be about to increment the reference counter, so
+ # total == self.refcount or total+1 == self.refcount are OK
+ assert total == self.refcount or total+1 == self.refcount,\
+ (total, self.refcount)
+
+ # Reset the flag to avoid repeatedly raising the assertion
+ global leaked_reference
+ x = leaked_reference
+ leaked_reference = False
+ assert not x, x
+
+ def _decref(self):
+ assert self.refcount is not None
+ assert self.refcount > 0
+ if self.refcount == 1:
+ # We're going to be garbage collected. Remove all references
+ # to other objects.
+ self.__forget_children()
+ self.refcount -= 1
+
+ def _incref(self):
+ assert self.refcount is not None
+ self.refcount += 1
+
+ @parent_callable
+ def _get_height(self):
+ """Find the current height of the tree.
+
+ We could keep an extra few bytes in each node rather than
+ figuring this out dynamically, which would reduce the
+ asymptotic complexitiy of a few operations. However, I
+ suspect it's not worth the extra overhead of updating it all
+ over the place.
+ """
+
+ if self.leaf:
+ return 1
+ return 1 + self.children[-1]._get_height()
+
+ @modifies_self
+ def __forget_children(self, i=0, j=None):
+ "Remove links to some of our children, decrementing their refcounts"
+ if j is None: j = len(self.children)
+ if not self.leaf:
+ for k in range(i, j):
+ self.children[k]._decref()
+ del self.children[i:j]
+
+ def __del__(self):
+ """In C, this would be a tp_clear function instead of a __del__.
+
+ Because of the way Python's garbage collector handles __del__
+ methods, we can end up with uncollectable BList objects if the
+ user creates circular references. In C with a tp_clear
+ function, this wouldn't be a problem.
+ """
+ if self.refcount:
+ global leaked_reference
+ leaked_reference = True
+ try:
+ self.refcount = 1 # Make invariance-checker happy
+ self.__forget_children()
+ self.refcount = 0
+ except:
+ import traceback
+ traceback.print_exc()
+ raise
+
+ @modifies_self
+ def __forget_child(self, i):
+ "Removes links to one child"
+ self.__forget_children(i, i+1)
+
+ @modifies_self
+ def __prepare_write(self, pt):
+ """We are about to modify the child at index pt. Prepare it.
+
+ This function returns the child object. If the caller has
+ other references to the child, they must be discarded as they
+ may no longer be valid.
+
+ If the child's .refcount is 1, we simply return the
+ child object.
+
+ If the child's .refcount is greater than 1, we:
+
+ - copy the child object
+ - decrement the child's .refcount
+ - replace self.children[pt] with the copy
+ - return the copy
+ """
+
+ if pt < 0:
+ pt = len(self.children) + pt
+ if not self.leaf and self.children[pt].refcount > 1:
+ new_copy = _BList()
+ new_copy.__become(self.children[pt])
+ self.children[pt]._decref()
+ self.children[pt] = new_copy
+ return self.children[pt]
+
+ @staticmethod
+ def __new_sibling(children, leaf):
+ """Non-default constructor. Create a node with specific children.
+
+ We steal the reference counters from the caller.
+ """
+
+ self = _BList()
+ self.children = children
+ self.leaf = leaf
+ self._adjust_n()
+ return self
+
+ ####################################################################
+ # Functions for manipulating the tree
+
+ @modifies_self
+ def __borrow_right(self, k):
+ "Child k has underflowed. Borrow from k+1"
+ p = self.children[k]
+ right = self.__prepare_write(k+1)
+ total = len(p.children) + len(right.children)
+ split = total//2
+
+ assert split >= half
+ assert total-split >= half
+
+ migrate = split - len(p.children)
+
+ p.children.extend(right.children[:migrate])
+ del right.children[:migrate]
+ right._adjust_n()
+ p._adjust_n()
+
+ @modifies_self
+ def __borrow_left(self, k):
+ "Child k has underflowed. Borrow from k-1"
+ p = self.children[k]
+ left = self.__prepare_write(k-1)
+ total = len(p.children) + len(left.children)
+ split = total//2
+
+ assert split >= half
+ assert total-split >= half
+
+ migrate = split - len(p.children)
+
+ p.children[:0] = left.children[-migrate:]
+ del left.children[-migrate:]
+ left._adjust_n()
+ p._adjust_n()
+
+ @modifies_self
+ def __merge_right(self, k):
+ "Child k has underflowed. Merge with k+1"
+ p = self.children[k]
+ for p2 in self.children[k+1].children:
+ if not self.children[k+1].leaf:
+ p2._incref()
+ p.children.append(p2)
+ self.__forget_child(k+1)
+ p._adjust_n()
+
+ @modifies_self
+ def __merge_left(self, k):
+ "Child k has underflowed. Merge with k-1"
+ p = self.children[k]
+ if not self.children[k-1].leaf:
+ for p2 in self.children[k-1].children:
+ p2._incref()
+ p.children[:0] = self.children[k-1].children
+ self.__forget_child(k-1)
+ p._adjust_n()
+
+ @staticmethod
+ def __concat(left_subtree, right_subtree, height_diff):
+ """Concatenate two trees of potentially different heights.
+
+ The parameters are the two trees, and the difference in their
+ heights expressed as left_height - right_height.
+
+ Returns a tuple of the new, combined tree, and an integer.
+ The integer expresses the height difference between the new
+ tree and the taller of the left and right subtrees. It will
+ be 0 if there was no change, and 1 if the new tree is taller
+ by 1.
+ """
+
+ assert left_subtree.refcount == 1
+ assert right_subtree.refcount == 1
+
+ adj = 0
+
+ if height_diff == 0:
+ root = _BList()
+ root.children = [left_subtree, right_subtree]
+ root.leaf = False
+ collapse = root.__underflow(0)
+ if not collapse:
+ collapse = root.__underflow(1)
+ if not collapse:
+ adj = 1
+ overflow = None
+ elif height_diff > 0: # Left is larger
+ root = left_subtree
+ overflow = root._insert_subtree(-1, right_subtree,
+ height_diff - 1)
+ else: # Right is larger
+ root = right_subtree
+ overflow = root._insert_subtree(0, left_subtree,
+ -height_diff - 1)
+ adj += -root.__overflow_root(overflow)
+
+ return root, adj
+
+ @staticmethod
+ def __concat_subtrees(left_subtree, left_depth, right_subtree,right_depth):
+ """Concatenate two subtrees of potentially different heights.
+
+ Returns a tuple of the new, combined subtree and its depth.
+
+ Depths are the depth in the parent, not their height.
+ """
+
+ root, adj = BList.__concat(left_subtree, right_subtree,
+ -(left_depth - right_depth))
+ return root, max(left_depth, right_depth) - adj
+
+ @staticmethod
+ def __concat_roots(left_root, left_height, right_root, right_height):
+ """Concatenate two roots of potentially different heights.
+
+ Returns a tuple of the new, combined root and its height.
+
+ Heights are the height from the root to its leaf nodes.
+ """
+
+ root, adj = BList.__concat(left_root, right_root,
+ left_height - right_height)
+ return root, max(left_height, right_height) + adj
+
+ @may_collapse
+ @modifies_self
+ def __collapse(self):
+ "Collapse the tree, if possible"
+ if len(self.children) != 1 or self.leaf:
+ self._adjust_n()
+ return 0
+
+ p = self.children[0]
+ self.__become(p)
+ return 1
+
+ @may_collapse
+ @modifies_self
+ def __underflow(self, k):
+ """Check if children k-1, k, or k+1 have underflowed.
+
+ If so, move things around until self is the root of a valid
+ subtree again, possibly requiring collapsing the tree.
+
+ Always calls self._adjust_n() (often via self.__collapse()).
+ """
+
+ if self.leaf:
+ self._adjust_n()
+ return 0
+
+ if k < len(self.children):
+ p = self.__prepare_write(k)
+ short = half - len(p.children)
+
+ while short > 0:
+ if k+1 < len(self.children) \
+ and len(self.children[k+1].children) - short >= half:
+ self.__borrow_right(k)
+ elif k > 0 and len(self.children[k-1].children) - short >=half:
+ self.__borrow_left(k)
+ elif k+1 < len(self.children):
+ self.__merge_right(k)
+ elif k > 0:
+ self.__merge_left(k)
+ k = k - 1
+ else:
+ # No siblings for p
+ return self.__collapse()
+
+ p = self.__prepare_write(k)
+ short = half - len(p.children)
+
+ if k > 0 and len(self.children[k-1].children) < half:
+ collapse = self.__underflow(k-1)
+ if collapse: return collapse
+ if k+1 < len(self.children) \
+ and len(self.children[k+1].children) <half:
+ collapse = self.__underflow(k+1)
+ if collapse: return collapse
+
+ return self.__collapse()
+
+ @modifies_self
+ def __overflow_root(self, overflow):
+ "Handle the case where a user-visible node overflowed"
+ self._check_invariants()
+ if not overflow: return 0
+ child = _BList(self)
+ self.__forget_children()
+ self.children[:] = [child, overflow]
+ self.leaf = False
+ self._adjust_n()
+ self._check_invariants()
+ return -1
+
+ @may_overflow
+ @modifies_self
+ def __insert_here(self, k, item):
+ """Insert 'item', which may be a subtree, at index k.
+
+ Since the subtree may have fewer than half elements, we may
+ need to merge it after insertion.
+
+ This function may cause self to overflow. If it does, it will
+ take the upper half of its children and put them in a new
+ subtree and return the subtree. The caller is responsible for
+ inserting this new subtree just to the right of self.
+
+ Otherwise, it returns None.
+
+ """
+
+ if k < 0:
+ k += len(self.children)
+
+ if len(self.children) < limit:
+ self.children.insert(k, item)
+ collapse = self.__underflow(k)
+ assert not collapse
+ self._adjust_n()
+ return None
+
+ sibling = BList.__new_sibling(self.children[half:], self.leaf)
+ del self.children[half:]
+
+ if k < half:
+ self.children.insert(k, item)
+ collapse = self.__underflow(k)
+ assert not collapse
+ else:
+ sibling.children.insert(k - half, item)
+ collapse = sibling.__underflow(k-half)
+ assert not collapse
+ sibling._adjust_n()
+ self._adjust_n()
+ return sibling
+
+ @may_overflow
+ @modifies_self
+ def _insert_subtree(self, side, subtree, depth):
+ """Recurse depth layers, then insert subtree on the left or right
+
+ This function may cause an overflow.
+
+ depth == 0 means insert the subtree as a child of self.
+ depth == 1 means insert the subtree as a grandchild, etc.
+
+ """
+ assert side == 0 or side == -1
+
+ self._check_invariants()
+ subtree._check_invariants()
+
+ self.n += subtree.n
+
+ if depth:
+ p = self.__prepare_write(side)
+ overflow = p._insert_subtree(side, subtree, depth-1)
+ if not overflow: return None
+ subtree = overflow
+
+ if side < 0:
+ side = len(self.children)
+
+ sibling = self.__insert_here(side, subtree)
+
+ return sibling
+
+ @modifies_self
+ def __reinsert_subtree(self, k, depth):
+ 'Child at position k is too short by "depth". Fix it'
+
+ assert self.children[k].refcount == 1, self.children[k].refcount
+ subtree = self.children.pop(k)
+ if len(self.children) > k:
+ # Merge right
+ p = self.__prepare_write(k)
+ overflow = p._insert_subtree(0, subtree, depth-1)
+ if overflow:
+ self.children.insert(k+1, overflow)
+ else:
+ # Merge left
+ p = self.__prepare_write(k-1)
+ overflow = p._insert_subtree(-1, subtree, depth-1)
+ if overflow:
+ self.children.insert(k, overflow)
+ return self.__underflow(k)
+
+ ####################################################################
+ # The main insert and deletion operations
+
+ @may_overflow
+ @modifies_self
+ def _insert(self, i, item):
+ """Recursive to find position i, and insert item just there.
+
+ This function may cause an overflow.
+
+ """
+ if self.leaf:
+ return self.__insert_here(i, item)
+
+ p, k, so_far = self._locate(i)
+ del p
+ self.n += 1
+ p = self.__prepare_write(k)
+ overflow = p._insert(i - so_far, item)
+ del p
+ if not overflow: return
+ return self.__insert_here(k+1, overflow)
+
+ @user_callable
+ @modifies_self
+ def __iadd__(self, other):
+ # Make not-user-visible roots for the subtrees
+ right = _BList(other)
+ left = _BList(self)
+
+ left_height = left._get_height()
+ right_height = right._get_height()
+
+ root = BList.__concat_subtrees(left, -left_height,
+ right, -right_height)[0]
+ self.__become(root)
+ root._decref()
+ return self
+
+ @parent_callable
+ @may_collapse
+ @modifies_self
+ def _delslice(self, i, j):
+ """Recursive version of __delslice__
+
+ This may cause self to collapse. It returns None if it did
+ not. If a collapse occured, it returns a positive integer
+ indicating how much shorter this subtree is compared to when
+ _delslice() was entered.
+
+ Additionally, this function may cause an underflow.
+
+ """
+
+ if i == 0 and j >= self.n:
+ # Delete everything.
+ self.__forget_children()
+ self.n = 0
+ return 0
+
+ if self.leaf:
+ del self.children[i:j]
+ self.n = len(self.children)
+ return 0
+
+ p, k, so_far = self._locate(i)
+ p2, k2, so_far2 = self._locate(j-1)
+ del p
+ del p2
+
+ if k == k2:
+ # All of the deleted elements are contained under a single
+ # child of this node. Recurse and check for a short
+ # subtree and/or underflow
+
+ assert so_far == so_far2
+ p = self.__prepare_write(k)
+ depth = p._delslice(i - so_far, j - so_far)
+ if not depth:
+ return self.__underflow(k)
+ return self.__reinsert_subtree(k, depth)
+
+ # Deleted elements are in a range of child elements. There
+ # will be:
+ # - a left child (k) where we delete some (or all) of its children
+ # - a right child (k2) where we delete some (or all) of it children
+ # - children in between who are deleted entirely
+
+ # Call _delslice recursively on the left and right
+ p = self.__prepare_write(k)
+ collapse_left = p._delslice(i - so_far, j - so_far)
+ del p
+ p2 = self.__prepare_write(k2)
+ collapse_right = p2._delslice(max(0, i - so_far2), j - so_far2)
+ del p2
+
+ deleted_k = False
+ deleted_k2 = False
+
+ # Delete [k+1:k2]
+ self.__forget_children(k+1, k2)
+ k2 = k+1
+
+ # Delete k1 and k2 if they are empty
+ if not self.children[k2].n:
+ self.children[k2]._decref()
+ del self.children[k2]
+ deleted_k2 = True
+ if not self.children[k].n:
+ self.children[k]._decref()
+ del self.children[k]
+ deleted_k = True
+
+ if deleted_k and deleted_k2: # No messy subtrees. Good.
+ return self.__collapse()
+
+ # The left and right may have collapsed and/or be in an
+ # underflow state. Clean them up. Work on fixing collapsed
+ # trees first, then worry about underflows.
+
+ if not deleted_k and not deleted_k2 \
+ and collapse_left and collapse_right:
+ # Both exist and collapsed. Merge them into one subtree.
+ left = self.children.pop(k)
+ right = self.children.pop(k)
+ subtree, depth = BList.__concat_subtrees(left, collapse_left,
+ right, collapse_right)
+ del left
+ del right
+ self.children.insert(k, subtree)
+
+ elif deleted_k:
+ # Only the right potentially collapsed, point there.
+ depth = collapse_right
+ # k already points to the old k2, since k was deleted
+ elif not deleted_k2 and not collapse_left:
+ # Only the right potentially collapsed, point there.
+ k = k + 1
+ depth = collapse_right
+ else:
+ depth = collapse_left
+
+ # At this point, we have a potentially short subtree at k,
+ # with depth "depth".
+
+ if not depth or len(self.children) == 1:
+ # Doesn't need merging, or no siblings to merge with
+ return depth + self.__underflow(k)
+
+ # We definitely have a short subtree at k, and we have other children
+ return self.__reinsert_subtree(k, depth)
+
+ @modifies_self
+ def __init_from_seq(self, seq):
+ # Try the common case of a sequence <= limit in length
+ iterator = iter(seq)
+ for i in range(limit):
+ try:
+ x = iterator.next()
+ except StopIteration:
+ self.n = len(self.children)
+ self._check_invariants()
+ return
+ except AttributeError:
+ raise TypeError('instance has no next() method')
+ self.children.append(x)
+ self.n = limit
+ assert limit == len(self.children)
+ self._check_invariants()
+
+ # No such luck, build bottom-up instead.
+ # The sequence data so far goes in a leaf node.
+ cur = _BList()
+ self._check_invariants()
+ cur._check_invariants()
+ cur.__become(self)
+ cur._check_invariants()
+ self.__forget_children()
+ cur._check_invariants()
+
+ forest = Forest()
+ forest.append_leaf(cur)
+ cur = _BList()
+
+ while 1:
+ try:
+ x = iterator.next()
+ except StopIteration:
+ break
+ if len(cur.children) == limit:
+ cur.n = limit
+ cur._check_invariants()
+ forest.append_leaf(cur)
+ cur = _BList()
+ cur.children.append(x)
+
+ if cur.children:
+ forest.append_leaf(cur)
+ cur.n = len(cur.children)
+ else:
+ cur._decref()
+
+ final = forest.finish()
+ self.__become(final)
+ final._decref()
+
+ ########################################################################
+ # Below here are other user-callable functions built using the above
+ # primitives and user functions.
+
+ @parent_callable
+ def _str(self, f):
+ """Recursive version of __str__
+
+ Not technically user-callable, but nice to keep near the other
+ string functions.
+ """
+
+ if self.leaf:
+ return ', '.join(f(x) for x in self.children)
+ else:
+ return ', '.join(x._str(f) for x in self.children)
+
+ @user_callable
+ def __str__(self):
+ "User-visible function"
+ if Py_ReprEnter(self):
+ return '[...]'
+ #rv = 'BList(%s)' % self._str()
+ rv = '[%s]' % self._str(str)
+ Py_ReprLeave(self)
+ return rv
+
+ @user_callable
+ def __repr__(self):
+ "User-visible function"
+ if Py_ReprEnter(self):
+ return '[...]'
+ #rv = 'BList(%s)' % self._str()
+ rv = '[%s]' % self._str(repr)
+ Py_ReprLeave(self)
+ return rv
+
+ def debug(self, indent=''):
+ import gc
+ gc.collect()
+ "Return a string that shows the internal structure of the BList"
+ indent = indent + ' '
+ if not self.leaf:
+ rv = 'blist(leaf=%s, n=%s, r=%s, %s)' % (
+ str(self.leaf), str(self.n), str(self.refcount),
+ '\n%s' % indent +
+ ('\n%s' % indent).join([x.debug(indent+' ')
+ for x in self.children]))
+ else:
+ rv = 'blist(leaf=%s, n=%s, r=%s, %s)' % (
+ str(self.leaf), str(self.n), str(self.refcount),
+ str(self.children))
+ return rv
+
+ @user_callable
+ @allow_negative1
+ def __getitem__(self, i):
+ "User-visible function"
+ if isinstance(i, slice):
+ start, stop, step = i.indices(self.n)
+ return BList(self[j] for j in xrange(start, stop, step))
+
+ if type(i) != types.IntType and type(i) != types.LongType:
+ raise TypeError('list indices must be integers')
+
+ if i >= self.n or i < 0:
+ raise IndexError
+
+ if self.leaf:
+ return self.children[i]
+
+ p, k, so_far = self._locate(i)
+ assert i >= so_far
+ return p.__getitem__(i - so_far)
+
+ @user_callable
+ @modifies_self
+ @allow_negative1
+ def __setitem__(self, i, y):
+ "User-visible function"
+
+ if isinstance(i, slice):
+ start, stop, step = i.indices(self.n)
+ if step == 1:
+ # More efficient
+ self[start:stop] = y
+ return
+ y = _BList(y)
+ raw_length = (stop - start)
+ length = raw_length//step
+ if raw_length % step:
+ length += 1
+ if length != len(y):
+ leny = len(y)
+ y._decref()
+ raise ValueError('attempt to assign sequence of size %d '
+ 'to extended slice of size %d'
+ % (leny, length))
+ k = 0
+ for j in xrange(start, stop, step):
+ self[j] = y[k]
+ k += 1
+ y._decref()
+ return
+
+ if i >= self.n or i < 0:
+ raise IndexError
+
+ if self.leaf:
+ self.children[i] = y
+ return
+
+ p, k, so_far = self._locate(i)
+ p = self.__prepare_write(k)
+ p.__setitem__(i-so_far, y)
+
+ @user_callable
+ def __len__(self):
+ "User-visible function"
+ return self.n
+
+ @user_callable
+ def __iter__(self):
+ "User-visible function"
+ return self._iter(0, None)
+
+ def _iter(self, i, j):
+ "Make an efficient iterator between elements i and j"
+ if self.leaf:
+ return ShortBListIterator(self, i, j)
+ return BListIterator(self, i, j)
+
+ @user_callable
+ def __cmp__(self, other):
+ if not isinstance(other, BList) and not isinstance(other, list):
+ return cmp(id(type(self)), id(type(other)))
+
+ iter1 = iter(self)
+ iter2 = iter(other)
+ x_failed = False
+ y_failed = False
+ while 1:
+ try:
+ x = iter1.next()
+ except StopIteration:
+ x_failed = True
+ try:
+ y = iter2.next()
+ except StopIteration:
+ y_failed = True
+ if x_failed or y_failed: break
+
+ c = cmp(x, y)
+ if c: return c
+
+ if x_failed and y_failed: return 0
+ if x_failed: return -1
+ return 1
+
+ @user_callable
+ def __contains__(self, item):
+ for x in self:
+ if x == item: return True
+ return False
+
+ @user_callable
+ @modifies_self
+ def __setslice__(self, i, j, other):
+ # Python automatically adds len(self) to these values if they
+ # are negative. They'll get incremented a second time below
+ # when we use them as slice arguments. Subtract len(self)
+ # from them to keep them at the same net value.
+ #
+ # If they went positive the first time, that's OK. Python
+ # won't change them any further.
+
+ if i < 0:
+ i -= self.n
+ if j < 0:
+ j -= self.n
+
+ # Make a not-user-visible root for the other subtree
+ other = _BList(other)
+
+ # Efficiently handle the common case of small lists
+ if self.leaf and other.leaf and self.n + other.n <= limit:
+ self.children[i:j] = other.children
+ other._decref()
+ self._adjust_n()
+ return
+
+ left = self
+ right = _BList(self)
+ del left[i:]
+ del right[:j]
+ left += other
+ left += right
+
+ other._decref()
+ right._decref()
+
+ @user_callable
+ @modifies_self
+ def extend(self, other):
+ return self.__iadd__(other)
+
+ @user_callable
+ @modifies_self
+ def pop(self, i=-1):
+ try:
+ i = int(i)
+ except ValueError:
+ raise TypeError('an integer is required')
+ rv = self[i]
+ del self[i]
+ return rv
+
+ @user_callable
+ def index(self, item, i=0, j=None):
+ i, j, _ = slice(i, j).indices(self.n)
+ for k, x in enumerate(self._iter(i, j)):
+ if x == item:
+ return k + i
+ raise ValueError('list.index(x): x not in list')
+
+ @user_callable
+ @modifies_self
+ def remove(self, item):
+ for i, x in enumerate(self):
+ if x == item:
+ del self[i]
+ return
+ raise ValueError('list.index(x): x not in list')
+
+ @user_callable
+ def count(self, item):
+ rv = 0
+ for x in self:
+ if x == item:
+ rv += 1
+ return rv
+
+ @user_callable
+ @modifies_self
+ def reverse(self):
+ self.children.reverse()
+ if self.leaf: return
+ for i in range(len(self.children)):
+ p = self.__prepare_write(i)
+ p.reverse()
+
+ @user_callable
+ def __mul__(self, n):
+ if n <= 0:
+ return BList()
+
+ power = BList(self)
+ rv = BList()
+
+ if n & 1:
+ rv += self
+ mask = 2
+
+ while mask <= n:
+ power += power
+ if mask & n:
+ rv += power
+ mask <<= 1
+ return rv
+
+ __rmul__ = __mul__
+
+ @user_callable
+ @modifies_self
+ def __imul__(self, n):
+ self.__become(self * n)
+ return self
+
+ @parent_callable
+ @modifies_self
+ def _merge(self, other, cmp=None, key=None, reverse=False):
+ """Merge two sorted BLists into one sorted BList, part of MergeSort
+
+ This function consumes the two input BLists along the way,
+ making the MergeSort nearly in-place. This function gains ownership
+ of the self and other objects and must .decref() them if appropriate.
+
+ It returns one sorted BList.
+
+ It operates by maintaining two forests (lists of BList
+ objects), one for each of the two inputs lists. When it needs
+ a new leaf node, it looks at the first element of the forest
+ and checks to see if it's a leaf. If so, it grabs that. If
+ not a leaf, it takes that node, removes the root, and prepends
+ the children to the forest. Then, it checks again for a leaf.
+ It repeats this process until it is able to acquire a leaf.
+ This process avoids the cost of doing O(log n) work O(n) times
+ (for a total O(n log n) cost per merge). It takes O(log n)
+ extra memory and O(n) steps.
+
+ We also maintain a forest for the output. Whenever we fill an
+ output leaf node, we append it to the output forest. We keep
+ track of the total number of leaf nodes added to the forest,
+ and use that to analytically determine if we have "limit" nodes at the
+ end of the forest all of the same height. When we do, we remove them
+ from the forest, place them under a new node, and put the new node on
+ the end of the forest. This guarantees that the output forest
+ takes only O(log n) extra memory. When we're done with the input, we
+ merge the forest into one final BList.
+
+ Whenever we finish with an input leaf node, we add it to a
+ recyclable list, which we use as a source for nodes for the
+ output. Since the output will use only O(1) more nodes than the
+ combined input, this part is effectively in-place.
+
+ Overall, this function uses O(log n) extra memory and takes O(n) time.
+ """
+
+ other._check_invariants();
+ if not cmp:
+ cmp = builtin_cmp
+
+ recyclable = []
+
+ def do_cmp(a, b):
+ "Utility function for performing a comparison"
+
+ if key:
+ a = a[key]
+ b = b[key]
+ x = cmp(a, b)
+ if reverse:
+ x = -x
+ return x
+
+ def recycle(node):
+ "We've consumed a node, set it aside for re-use"
+ del node.children[:]
+ node.n = 0
+ node.leaf = True
+ recyclable.append(node)
+ assert node.refcount == 1
+ assert node.__get_reference_count() == 0
+
+ def get_node(leaf):
+ "Get a node, either from the recycled list or through allocation"
+ if recyclable:
+ node = recyclable.pop(-1)
+ else:
+ node = _BList()
+ node.leaf = leaf
+ return node
+
+ def get_leaf(forest):
+ "Get a new leaf node to process from one of the input forests"
+ node = forest.pop(-1)
+ assert not node.__get_reference_count()
+ while not node.leaf:
+ forest.extend(reversed(node.children))
+ recycle(node)
+ node = forest.pop(-1)
+ assert node.__get_reference_count() == 0
+ return node
+
+ try:
+ if do_cmp(self[-1], other[0]) <= 0: # Speed up a common case
+ self += other
+ other._decref()
+ return self
+
+ # Input forests
+ forest1 = [self]
+ forest2 = [other]
+
+ # Output forests
+ forest_out = Forest()
+
+ # Input leaf nodes we are currently processing
+ leaf1 = get_leaf(forest1)
+ leaf2 = get_leaf(forest2)
+
+ # Index into leaf1 and leaf2, respectively
+ i = 0
+ j = 0
+
+ # Current output leaf node we are building
+ output = get_node(leaf=True)
+
+ while ((forest1 or i < len(leaf1.children))
+ and (forest2 or j < len(leaf2.children))):
+
+ # Check if we need to get a new input leaf node
+ if i == len(leaf1.children):
+ recycle(leaf1)
+ leaf1 = get_leaf(forest1)
+ i = 0
+ if j == len(leaf2.children):
+ recycle(leaf2)
+ leaf2 = get_leaf(forest2)
+ j = 0
+
+ # Check if we have filled up an output leaf node
+ if output.n == limit:
+ forest_out.append_leaf(output)
+ output = get_node(leaf=True)
+
+ # Figure out which input leaf has the lower element
+ if do_cmp(leaf1.children[i], leaf2.children[j]) <= 0:
+ output.children.append(leaf1.children[i])
+ i += 1
+ else:
+ output.children.append(leaf2.children[j])
+ j += 1
+
+ output.n += 1
+
+ # At this point, we have completely consumed at least one
+ # of the lists
+
+ # Append our partially-complete output leaf node to the forest
+ forest_out.append_leaf(output)
+
+ # Append a partially-consumed input leaf node, if one exists
+ if i < len(leaf1.children):
+ del leaf1.children[:i]
+ forest_out.append_leaf(leaf1)
+ else:
+ recycle(leaf1)
+ if j < len(leaf2.children):
+ del leaf2.children[:j]
+ forest_out.append_leaf(leaf2)
+ else:
+ recycle(leaf2)
+
+ # Append the rest of whichever input forest still has
+ # nodes. This could be sped up by merging trees instead
+ # of doing it leaf-by-leaf.
+ while forest1:
+ forest_out.append_leaf(get_leaf(forest1))
+ while forest2:
+ forest_out.append_leaf(get_leaf(forest2))
+
+ out_tree = forest_out.finish()
+
+ finally:
+ # Fix reference counters, in case the user-compare function
+ # threw an exception.
+ for c in recyclable:
+ c._decref()
+
+ return out_tree
+
+ @parent_callable
+ @modifies_self
+ def _sort(self, *args, **kw):
+ if self.leaf:
+ self.children.sort(*args, **kw)
+ return
+ for i in range(len(self.children)):
+ self.__prepare_write(i)
+ self.children[i]._sort(*args, **kw)
+ while len(self.children) != 1:
+ children = []
+ for i in range(0, len(self.children)-1, 2):
+ #print 'Merge:', self.children[i], self.children[i+1]
+ a = self.children[i]
+ b = self.children[i+1]
+ self.children[i] = None # Keep reference-checker happy
+ self.children[i+1] = None
+ self.children[i] = a._merge(b, *args, **kw)
+ #print '->', self.children[i].debug()
+ #assert list(self.children[i]) == sorted(self.children[i], *args, **kw)
+ #self.children[i+1]._decref()
+ children.append(self.children[i])
+ self.children[:] = children
+ self.__become(self.children[0])
+ self._check_invariants_r()
+
+ @user_callable
+ @modifies_self
+ def sort(self, *args, **kw):
+ if self.leaf: # Special case to speed up common case
+ self.children.sort(*args, **kw)
+ return
+ no_list = BList()
+ real_self = BList(self)
+ self.__become(no_list)
+ try:
+ real_self._sort(*args, **kw)
+ self._check_invariants_r()
+ if self.n:
+ raise ValueError('list modified during sort')
+ finally:
+ self._check_invariants_r()
+ real_self._check_invariants_r()
+ self.__become(real_self)
+ self._check_invariants_r()
+
+ @user_callable
+ def __add__(self, other):
+ if not isinstance(other, BList) and not isinstance(other, list):
+ raise TypeError('can only concatenate list (not "%s") to list'
+ % str(type(other)))
+ rv = BList(self)
+ rv += other
+ return rv
+
+ @user_callable
+ def __radd__(self, other):
+ if not isinstance(other, BList) and not isinstance(other, list):
+ raise TypeError('can only concatenate list (not "%s") to list'
+ % str(type(other)))
+ rv = BList(other)
+ rv += self
+ return rv
+
+ @user_callable
+ @modifies_self
+ def append(self, item):
+ "User-visible function"
+ self.insert(len(self), item)
+
+ @user_callable
+ @modifies_self
+ @allow_negative1
+ def insert(self, i, item):
+ "User-visible function"
+ if i > self.n:
+ i = self.n
+ overflow = self._insert(i, item)
+ self.__overflow_root(overflow)
+
+ @user_callable
+ @modifies_self
+ def __delslice__(self, i, j):
+ "User-visible function"
+ if i >= j:
+ return
+ self._delslice(i, j)
+
+ @user_callable
+ @modifies_self
+ @allow_negative1
+ def __delitem__(self, i):
+ "User-visible function"
+
+ if isinstance(i, slice):
+ start, stop, step = i.indices(self.n)
+ if step == 1:
+ # More efficient
+ self.__delslice__(start, stop)
+ return
+ j = start
+ if step > 0:
+ step -= 1 # We delete an item at each step
+ while j < len(self) and j < stop:
+ del self[j]
+ j += step
+ else:
+ for j in range(start, stop, step):
+ del self[j]
+ return
+
+ if i >= self.n or i < 0:
+ raise IndexError
+
+ self.__delslice__(i, i+1)
+
+ @user_callable
+ def __getslice__(self, i, j):
+ "User-visible function"
+
+ # If the indices were negative, Python has already added len(self) to
+ # them. If they're still negative, treat them as 0.
+ if i < 0: i = 0
+ if j < 0: j = 0
+
+ if j <= i:
+ return BList()
+
+ if i >= self.n:
+ return BList()
+
+ if self.leaf:
+ return BList(self.children[i:j])
+
+ rv = BList(self)
+ del rv[j:]
+ del rv[:i]
+
+ return rv
+
+ def __copy__(self):
+ return BList(self)
+
+########################################################################
+# Forest class; an internal utility class for building BLists bottom-up
+
+class Forest:
+ def __init__(self):
+ self.num_leafs = 0
+ self.forest = []
+
+ def append_leaf(self, leaf):
+ "Append a leaf to the output forest, possible combining nodes"
+
+ if not leaf.children: # Don't add empty leaf nodes
+ leaf._decref()
+ return
+ self.forest.append(leaf)
+ leaf._adjust_n()
+
+ # Every "limit" leaf nodes, combine the last "limit" nodes
+ # This takes "limit" leaf nodes and replaces them with one node
+ # that has the leaf nodes as children.
+
+ # Every "limit**2" leaf nodes, take the last "limit" nodes
+ # (which have height 2) and replace them with one node
+ # (with height 3).
+
+ # Every "limit**i" leaf nodes, take the last "limit" nodes
+ # (which have height i) and replace them with one node
+ # (with height i+1).
+
+ i = 1
+ self.num_leafs += 1
+ while self.num_leafs % limit**i == 0:
+ parent = _BList()
+ parent.leaf = False
+ assert len(self.forest) >= limit, \
+ (len(self.forest), limit, i, self.num_leafs)
+ parent.children[:] = self.forest[-limit:]
+ del self.forest[-limit:]
+
+ # If the right-hand node has too few children,
+ # borrow from a neighbor
+ x = parent._BList__underflow(len(parent.children)-1)
+ assert not x
+
+ self.forest.append(parent)
+ i += 1
+ parent._check_invariants_r()
+
+ def finish(self):
+ "Combine the forest into a final BList"
+
+ out_tree = None # The final BList we are building
+ out_height = 0 # It's height
+ group_height = 1 # The height of the next group from the forest
+ while self.forest:
+ n = self.num_leafs % limit # Numbers of same-height nodes
+ self.num_leafs /= limit
+ group_height += 1
+
+ if not n:
+ # No nodes at this height
+ continue
+
+ # Merge nodes of the same height into 1 node, and
+ # merge it into our output BList.
+ group = _BList()
+ group.leaf = False
+ group.children[:] = self.forest[-n:]
+ del self.forest[-n:]
+ adj = group._BList__underflow(len(group.children)-1)
+ if not out_tree:
+ out_tree = group
+ out_height = group_height - adj
+ else:
+ out_tree, out_height = BList._BList__concat_roots(group,
+ group_height - adj,
+ out_tree,
+ out_height)
+ out_tree._check_invariants_r()
+ return out_tree
+
+
+########################################################################
+# Iterator classes. BList._iter() choses which one to use.
+
+class ShortBListIterator:
+ "A low-overhead iterator for short lists"
+
+ def __init__(self, lst, start=0, stop=None):
+ if stop is None:
+ stop = len(lst)
+ self.cur = start
+ self.stop = stop
+ self.lst = lst
+
+ def next(self):
+ if self.cur >= self.stop or self.cur >= self.lst.n:
+ self.stop = 0 # Ensure the iterator cannot be restarted
+ raise StopIteration
+
+ rv = BList.__getitem__(self.lst, self.cur)
+ self.cur += 1
+ return rv
+
+ def __iter__(self):
+ return self
+
+class BListIterator:
+ """A high-overhead iterator that is more asymptotically efficient.
+
+ Maintain a stack to traverse the tree. The first step is to copy
+ the list so we don't have to worry about user's modifying the list
+ and wreaking havoc with our references. Copying the list is O(1),
+ but not worthwhile for lists that only contain a single leaf node.
+ """
+
+ def __init__(self, lst, start=0, stop=None):
+ self.stack = []
+ lst = BList(lst) # Protect against users modifying the list
+ if stop is None:
+ stop = len(lst)
+ if stop < 0: stop = 0
+ if start < 0: start = 0
+ self.remaining = stop - start
+ while not lst.leaf:
+ p, k, so_far = lst._locate(start)
+ self.stack.append([lst, k+1])
+ lst = lst.children[0]
+ start -= so_far
+ self.stack.append([lst, start])
+
+ def next(self):
+ if not self.remaining:
+ raise StopIteration
+ self.remaining -= 1
+
+ p, i = self.stack[-1]
+ if i < len(p.children):
+ self.stack[-1][1] += 1
+ return p.children[i]
+
+ while 1:
+ if not self.stack: raise StopIteration
+ p, i = self.stack.pop()
+ if i < len(p.children):
+ break
+
+ self.stack.append([p, i+1])
+
+ while not p.leaf:
+ p = p.children[i]
+ i = 0
+ self.stack.append([p, i+1])
+
+ return p.children[i]
+
+ def __iter__(self):
+ return self
+
+ def __copy__(self):
+ rv = BListIterator.__new__()
+ rv.stack = copy.copy(self.stack)
+ rv.remaining = self.remaining
+ return rv
+
+########################################################################
+# Test code
+
+def main():
+ n = 512
+
+ data = range(n)
+ import random
+ random.shuffle(data)
+ x = BList(data)
+ x.sort()
+
+ assert list(x) == sorted(data), x
+
+ lst = BList()
+ t = tuple(range(n))
+ for i in range(n):
+ lst.append(i)
+ if tuple(lst) != t[:i+1]:
+ print i, tuple(lst), t[:i+1]
+ print lst.debug()
+ break
+
+ x = lst[4:258]
+ assert tuple(x) == tuple(t[4:258])
+ x.append(-1)
+ assert tuple(x) == tuple(t[4:258] + (-1,))
+ assert tuple(lst) == t
+
+ lst[200] = 6
+ assert tuple(x) == tuple(t[4:258] + (-1,))
+ assert tuple(lst) == tuple(t[0:200] + (6,) + t[201:])
+
+ del lst[200]
+ #print lst.debug()
+ assert tuple(lst) == tuple(t[0:200] + t[201:])
+
+ lst2 = BList(range(limit+1))
+ assert tuple(lst2) == tuple(range(limit+1))
+ del lst2[1]
+ del lst2[-1]
+ assert tuple(lst2) == (0,) + tuple(range(2,limit))
+ assert lst2.leaf
+ assert len(lst2.children) == limit-1
+
+ lst = BList(range(n))
+ lst.insert(200, 0)
+ assert tuple(lst) == (t[0:200] + (0,) + t[200:])
+ del lst[200:]
+ assert tuple(lst) == tuple(range(200))
+
+ lst = BList(range(3))
+ lst*3
+ assert lst*3 == range(3)*3
+
+ a = BList('spam')
+ a.extend('eggs')
+ assert a == list('spameggs')
+
+ x = BList([0])
+ for i in range(290) + [1000, 10000, 100000, 1000000, 10000000]:
+ if len(x*i) != i:
+ print 'mul failure', i
+ print (x*i).debug()
+ break
+
+ little_list = BList([0])
+ big_list = little_list * 2**512
+
+blist = BList
+
+if __name__ == '__main__':
+ main()
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..861a9f5
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,5 @@
+[egg_info]
+tag_build =
+tag_date = 0
+tag_svn_revision = 0
+
diff --git a/setup.py b/setup.py
new file mode 100755
index 0000000..22751ff
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+
+import sys
+import distribute_setup
+distribute_setup.use_setuptools()
+from setuptools import setup, Extension
+
+define_macros = []
+
+import ctypes
+if ctypes.sizeof(ctypes.c_double) == 8:
+ dv = ctypes.c_double(9006104071832581.0)
+ iv = ctypes.cast(ctypes.pointer(dv), ctypes.POINTER(ctypes.c_uint64))
+ if iv.contents.value == 0x433fff0102030405:
+ define_macros.append(('BLIST_FLOAT_RADIX_SORT', 1))
+
+setup(name='blist',
+ version='1.3.4',
+ description='a list-like type with better asymptotic performance and similar performance on small lists',
+ author='Stutzbach Enterprises, LLC',
+ author_email='daniel@stutzbachenterprises.com',
+ url='http://stutzbachenterprises.com/blist/',
+ license = "BSD",
+ keywords = "blist list b+tree btree fast copy-on-write sparse array sortedlist sorted sortedset weak weaksortedlist weaksortedset sorteddict btuple",
+ ext_modules=[Extension('_blist', ['_blist.c'],
+ define_macros=define_macros,
+ )],
+ provides = ['blist'],
+ py_modules=['blist', '_sortedlist', '_sorteddict', '_btuple'],
+ test_suite = "test_blist.test_suite",
+ zip_safe = False, # zips are broken on cygwin for C extension modules
+ classifiers = [
+ 'Development Status :: 5 - Production/Stable',
+ 'Intended Audience :: Developers',
+ 'Intended Audience :: Science/Research',
+ 'License :: OSI Approved :: BSD License',
+ 'Programming Language :: C',
+ 'Programming Language :: Python :: 2.5',
+ 'Programming Language :: Python :: 2.6',
+ 'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
+ 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.2',
+ ],
+ long_description=open('README.rst').read()
+)
diff --git a/speed_test.py b/speed_test.py
new file mode 100755
index 0000000..b3b9cfe
--- /dev/null
+++ b/speed_test.py
@@ -0,0 +1,340 @@
+#!/usr/bin/python
+from __future__ import print_function
+
+import os, sys, subprocess
+from math import *
+
+# The tests to run are near the bottom
+
+MIN_REPS = 3
+NUM_POINTS = 9
+MIN_TIME = 0.1
+MAX_TIME = 1.0
+MAX_X = 100000
+
+def makedir(x):
+ try:
+ os.mkdir(x)
+ except OSError:
+ pass
+
+def rm(x):
+ try:
+ os.unlink(x)
+ except OSError:
+ pass
+
+makedir('fig')
+makedir('fig/relative')
+makedir('fig/absolute')
+makedir('.cache')
+makedir('dat')
+makedir('gnuplot')
+
+limits = (128,)
+current_limit = None
+def make(limit):
+ global current_limit
+ current_limit = limit
+
+setup = 'from blist import blist'
+
+ns = []
+for i in range(50+1):
+ ns.append(int(floor(10**(i*0.1))))
+ns = list(i for i in sorted(set(ns)) if i <= MAX_X)
+
+def smart_timeit(stmt, setup, hint):
+ n = hint
+ while 1:
+ v = timeit(stmt, setup, n)
+ if v[0]*n > MIN_TIME:
+ return v, n
+ n <<= 1
+
+import timeit
+timeit_path = timeit.__file__
+
+timeit_cache = {}
+def timeit(stmt, setup, rep):
+ assert rep >= MIN_REPS
+ key = (stmt, setup, rep, current_limit)
+ if key in timeit_cache:
+ return timeit_cache[key]
+ try:
+ n = NUM_POINTS
+ args =[sys.executable, timeit_path,
+ '-r', str(n), '-v', '-n', str(rep), '-s', setup, '--', stmt]
+ p = subprocess.Popen(args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ so, se = p.communicate()
+ try:
+ lines = so.split(b'\n')
+ raw = lines[0]
+ number = int(lines[1].split()[0])
+ times = [float(x) / number for x in raw.split()[2:]]
+ times.sort()
+ # median, lower quartile, upper quartile
+ v = (times[n//2], times[n//4], times[3*n//4])
+ timeit_cache[key] = v
+ return v
+ except:
+ print(so)
+ print(se)
+ raise
+ except:
+ print(stmt)
+ print(setup)
+ raise
+
+values = {}
+def get_timing1(limit, label, setup_n, template, typename, use_rep_map):
+ f = open('dat/%s-%s.dat' % (str(limit), label), 'w')
+ print('#', label, file=f)
+ print('#', template.replace('\n', '\\n'), file=f)
+ if setup_n is None:
+ setup_n = "x = TypeToTest(range(n))"
+ else:
+ setup_n = setup_n
+ ftimeit = open('fig/%s.txt' % label, 'w')
+ print('<div class="blist_inner">Setup: <code>%s</code><br/>' % setup_n.replace('\n', '<br/>'),
+ file=ftimeit)
+ print('Timed: <code>%s</code></div>' % template.replace('\n', '<br/>'),
+ file=ftimeit)
+ ftimeit.close()
+
+ for i in reversed(list(range(len(ns)))):
+ n = ns[i]
+ key = (limit, label, setup_n, n, template, typename)
+ print(n, end=' ')
+ sys.stdout.flush()
+ setup2 = '\nTypeToTest = %s\nn = %d\n' % (typename, n)
+ setup3 = setup + '\n' + setup2 + setup_n
+ stmt = template
+ if not use_rep_map:
+ if i < len(ns)-1:
+ rep_map[n] = max(rep_map[n], rep_map[ns[i+1]])
+ v, rep = smart_timeit(stmt, setup3, rep_map[n])
+ if rep_map[n] < rep:
+ rep_map[n] = rep
+ else:
+ k = rep_map[n]
+ if k * values[key] > MAX_TIME:
+ k = max(MIN_REPS, int(ceil(MAX_TIME / values[key])))
+ v = timeit(stmt, setup3, k)
+ values[key] = v[0]
+ v = [x*1000 for x in v]
+ if typename == 'list':
+ list_values[n] = v[0]
+ print(n, file=f, end=' ')
+ for x in v:
+ print(x, file=f, end=' ')
+ for x in v:
+ print(x/list_values[n], file=f, end=' ')
+ print(file=f)
+ print()
+ f.close()
+
+def get_timing(label, setup_n, template):
+ global rep_map, list_values
+ rep_map = {}
+ list_values = {}
+ for n in ns:
+ rep_map[n] = MIN_REPS
+ make('list')
+ get_timing1('list', label, setup_n, template, 'list', False)
+ for limit in limits:
+ print('Timing', label, limit, ':', end=' ')
+ sys.stdout.flush()
+ make(limit)
+ get_timing1(limit, label, setup_n, template, 'blist', False)
+
+ make('list')
+ get_timing1('list', label, setup_n, template, 'list', True)
+ for limit in limits:
+ print('Timing', label, limit, ':', end=' ')
+ sys.stdout.flush()
+ make(limit)
+ get_timing1(limit, label, setup_n, template, 'blist', True)
+
+ plot(label, True)
+ plot(label, False)
+ html(label)
+
+def html(label):
+ fname = 'fig/%s.html' % label
+ f = open(fname, 'w')
+ if timing_d[label][0] is None:
+ setup = 'x = TypeToTest(range(n))'
+ else:
+ setup = timing_d[label][0]
+ print('''
+<html>
+<head>
+<title>BList vs Python list timing results: %s</title>
+<script src="svg.js"></script>
+</head>
+<body>
+<div style="width: 100%%; background-color: #ccc;">
+<a href="/">Home</a>
+| <a href="/blist/">BList</a>
+| <a href="http://pokersleuth.com/">Poker Sleuth</a>
+| <a href="http://pokersleuth.com/poker-crunch.shtml">Poker Calculator</a>
+| <a href="http://pokersleuth.com/hand-converter.shtml">Hand Converter</a>
+
+</div>
+
+<object data="absolute/%s.svg" width="480" height="360"
+ type="image/svg+xml"></object>
+<object data="relative/%s.svg" width="480" height="360"
+ type="image/svg+xml"></object>
+<p>
+Setup:
+<pre>
+%s
+</pre>
+Timed:
+<pre>
+%s
+</pre>
+</body>
+</html>
+ ''' % (label, label, label, setup, timing_d[label][1]), file=f)
+ f.close()
+
+def plot(label, relative):
+ safe_label = label.replace('_', '\\\\_')
+ fname = 'gnuplot/%s.gnuplot' % label
+ f = open(fname, 'w')
+ if relative:
+ d = 'fig/relative/'
+ else:
+ d = 'fig/absolute/'
+ print("""
+set output "%s/%s.svg"
+set xlabel "List Size (n)"
+set title "%s"
+set terminal svg size 480,360 dynamic enhanced
+set size noratio 1,1
+set key top left
+set bars 0.2
+set pointsize 0.5
+set xtics ("1" 1, "10" 10, "100" 100, "1k" 1000, "10k" 10000, "100k" 100000, "1M" 1000000)
+""" % (d, label, safe_label), file=f)
+
+ if relative:
+ print('set title "Normalized Execution Times, log-linear scale"', file=f)
+ print('set logscale x', file=f)
+ print('set yrange [0:*]', file=f)
+ print('set yrange [0:200]', file=f)
+ print('set ylabel "Execution Time (%)"', file=f)
+ k = 3
+ m = 100.0
+ else:
+ print('set title "Raw Execution Times, log-log scale"', file=f)
+ print('set logscale xy', file=f)
+ print('set yrange [0.00001:10]', file=f)
+ print('set ylabel "Execution Time"', file=f)
+ print('set ytics ("1 ns" 0.000001, "10 ns" 0.00001, "100 ns" 0.0001, "1 us" 0.001, "10 us" 0.01, "100 us" 0.1, "1 ms" 1.0, "10 ms" 10.0, "100 ms" 100.0)', file=f)
+ k = 0
+ m = 1.0
+
+ print (('plot "dat/list-%s.dat" using 1:(%f*$%d):(%f*$%d):(%f*$%d) title "list()" with yerr pt 1, \\' % (label, m, k+2, m, k+3, m, k+4)), file=f)
+ for limit in limits:
+ print ((' "dat/%d-%s.dat" using 1:(%f*$%d):(%f*$%d):(%f*$%d) title "blist()" with yerr pt 1 '% (limit, label, m, k+2, m, k+3, m, k+4)), file=f)
+ print(file=f)
+ f.flush()
+ f.close()
+ if os.system('gnuplot "%s"' % fname):
+ raise RuntimeError('Gnuplot failure')
+
+timing_d = {}
+def add_timing(name, auto, stmt):
+ timing_d[name] = (auto, stmt)
+
+def run_timing(name):
+ auto, stmt = timing_d[name]
+ get_timing(name, auto, stmt)
+
+def run_all():
+ for k in sorted(timing_d):
+ run_timing(k)
+
+########################################################################
+# Tests to run are below here.
+# The arguments to add_timing are as follows:
+# 1) name of the test
+# 2) setup code to run once. "None" means x = TypeToTest(range(n))
+# 3) code to execute repeatedly in a loop
+#
+# The following symbols will autoamtically be defined:
+# - blist
+# - TypeToTest
+# - n
+
+add_timing('eq list', 'x = TypeToTest(range(n))\ny=range(n)', 'x==y')
+#add_timing('eq recursive', 'x = TypeToTest()\nx.append(x)\ny = TypeToTest()\ny.append(y)', 'try:\n x==y\nexcept RuntimeError:\n pass')
+
+add_timing('FIFO', None, """\
+x.insert(0, 0)
+x.pop(0)
+""")
+
+add_timing('LIFO', None, """\
+x.append(0)
+x.pop(-1)
+""")
+
+add_timing('add', None, "x + x")
+add_timing('contains', None, "-1 in x")
+#add_timing('getitem1', None, "x[0]")
+#add_timing('getitem2', None, "x.__getitem__(0)")
+add_timing('getitem3', 'x = TypeToTest(range(n))\nm = n//2', "x[m]")
+add_timing('getslice', None, "x[1:-1]")
+add_timing('forloop', None, "for i in x:\n pass")
+add_timing('len', None, "len(x)")
+add_timing('eq', None, "x == x")
+add_timing('mul10', None, "x * 10")
+#add_timing('setitem1', None, 'x[0] = 1')
+add_timing('setitem3', 'x = TypeToTest(range(n))\nm = n//2', 'x[m] = 1')
+add_timing('count', None, 'x.count(5)')
+add_timing('reverse', None, 'x.reverse()')
+add_timing('delslice', None, 'del x[len(x)//4:3*len(x)//4]\nx *= 2')
+add_timing('setslice', None, 'x[:] = x')
+
+add_timing('sort random', 'import random\nx = [random.randrange(n*4) for i in range(n)]', 'y = TypeToTest(x)\ny.sort()')
+add_timing('sort random key', 'import random\nx = [random.randrange(n*4) for i in range(n)]', 'y = TypeToTest(x)\ny.sort(key=float)')
+add_timing('sort sorted', None, 'x.sort()')
+add_timing('sort sorted key', None, 'x.sort(key=int)')
+add_timing('sort reversed', 'x = list(range(n))\nx.reverse()', 'y = TypeToTest(x)\ny.sort()')
+add_timing('sort reversed key', 'x = list(range(n))\nx.reverse()', 'y = TypeToTest(x)\ny.sort(key=int)')
+
+add_timing('sort random tuples', 'import random\nx = [(random.random(), random.random()) for i in range(n)]', 'y = TypeToTest(x)\ny.sort()')
+
+ob_def = '''
+import random
+class ob:
+ def __init__(self, v):
+ self.v = v
+ def __lt__(self, other):
+ return self.v < other.v
+x = [ob(random.randrange(n*4)) for i in range(n)]
+'''
+
+add_timing('sort random objects', ob_def, 'y = TypeToTest(x)\ny.sort()')
+add_timing('sort sorted objects', ob_def + 'x.sort()', 'x.sort()')
+
+add_timing('init from list', 'x = list(range(n))', 'y = TypeToTest(x)')
+add_timing('init from tuple', 'x = tuple(range(n))', 'y = TypeToTest(x)')
+add_timing('init from iterable', 'x = range(n)', 'y = TypeToTest(x)')
+add_timing('init from same type', None, 'y = TypeToTest(x)')
+
+add_timing('shuffle', 'from random import shuffle\nx = TypeToTest(range(n))', 'shuffle(x)')
+
+if __name__ == '__main__':
+ make(128)
+ if len(sys.argv) == 1:
+ run_all()
+ else:
+ for name in sys.argv[1:]:
+ run_timing(name)
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/__init__.py
diff --git a/test/btuple_tests.py b/test/btuple_tests.py
new file mode 100644
index 0000000..d027ea6
--- /dev/null
+++ b/test/btuple_tests.py
@@ -0,0 +1,161 @@
+# Based on Python's tuple_tests.py, licensed under the Python License
+# Agreement
+
+import unittest
+import sys
+from . import seq_tests
+import blist
+import random
+import gc
+from _btuple import btuple
+import gc
+
+class bTupleTest(seq_tests.CommonTest):
+ type2test = btuple
+
+ def test_constructors(self):
+ super(bTupleTest, self).test_len()
+ # calling built-in types without argument must return empty
+ self.assertEqual(tuple(), ())
+ t0_3 = (0, 1, 2, 3)
+ t0_3_bis = tuple(t0_3)
+ self.assert_(t0_3 is t0_3_bis)
+ self.assertEqual(tuple([]), ())
+ self.assertEqual(tuple([0, 1, 2, 3]), (0, 1, 2, 3))
+ self.assertEqual(tuple(''), ())
+ self.assertEqual(tuple('spam'), ('s', 'p', 'a', 'm'))
+
+ def test_truth(self):
+ super(bTupleTest, self).test_truth()
+ self.assert_(not ())
+ self.assert_((42, ))
+
+ def test_len(self):
+ super(bTupleTest, self).test_len()
+ self.assertEqual(len(()), 0)
+ self.assertEqual(len((0,)), 1)
+ self.assertEqual(len((0, 1, 2)), 3)
+
+ def test_iadd(self):
+ super(bTupleTest, self).test_iadd()
+ u = (0, 1)
+ u2 = u
+ u += (2, 3)
+ self.assert_(u is not u2)
+
+ def test_imul(self):
+ super(bTupleTest, self).test_imul()
+ u = (0, 1)
+ u2 = u
+ u *= 3
+ self.assert_(u is not u2)
+
+ def test_tupleresizebug(self):
+ # Check that a specific bug in _PyTuple_Resize() is squashed.
+ def f():
+ for i in range(1000):
+ yield i
+ self.assertEqual(list(tuple(f())), list(range(1000)))
+
+ def test_hash(self):
+ # See SF bug 942952: Weakness in tuple hash
+ # The hash should:
+ # be non-commutative
+ # should spread-out closely spaced values
+ # should not exhibit cancellation in tuples like (x,(x,y))
+ # should be distinct from element hashes: hash(x)!=hash((x,))
+ # This test exercises those cases.
+ # For a pure random hash and N=50, the expected number of occupied
+ # buckets when tossing 252,600 balls into 2**32 buckets
+ # is 252,592.6, or about 7.4 expected collisions. The
+ # standard deviation is 2.73. On a box with 64-bit hash
+ # codes, no collisions are expected. Here we accept no
+ # more than 15 collisions. Any worse and the hash function
+ # is sorely suspect.
+
+ N=50
+ base = list(range(N))
+ xp = [(i, j) for i in base for j in base]
+ inps = base + [(i, j) for i in base for j in xp] + \
+ [(i, j) for i in xp for j in base] + xp + list(zip(base))
+ collisions = len(inps) - len(set(map(hash, inps)))
+ self.assert_(collisions <= 15)
+
+ def test_repr(self):
+ l0 = btuple()
+ l2 = btuple((0, 1, 2))
+ a0 = self.type2test(l0)
+ a2 = self.type2test(l2)
+
+ self.assertEqual(str(a0), repr(l0))
+ self.assertEqual(str(a2), repr(l2))
+ self.assertEqual(repr(a0), "btuple(())")
+ self.assertEqual(repr(a2), "btuple((0, 1, 2))")
+
+ def _not_tracked(self, t):
+ if sys.version_info[0] < 3:
+ return
+ else: # pragma: no cover
+ # Nested tuples can take several collections to untrack
+ gc.collect()
+ gc.collect()
+ self.assertFalse(gc.is_tracked(t), t)
+
+ def _tracked(self, t):
+ if sys.version_info[0] < 3:
+ return
+ else: # pragma: no cover
+ self.assertTrue(gc.is_tracked(t), t)
+ gc.collect()
+ gc.collect()
+ self.assertTrue(gc.is_tracked(t), t)
+
+ def test_track_literals(self):
+ # Test GC-optimization of tuple literals
+ x, y, z = 1.5, "a", []
+
+ self._not_tracked(())
+ self._not_tracked((1,))
+ self._not_tracked((1, 2))
+ self._not_tracked((1, 2, "a"))
+ self._not_tracked((1, 2, (None, True, False, ()), int))
+ self._not_tracked((object(),))
+ self._not_tracked(((1, x), y, (2, 3)))
+
+ # Tuples with mutable elements are always tracked, even if those
+ # elements are not tracked right now.
+ self._tracked(([],))
+ self._tracked(([1],))
+ self._tracked(({},))
+ self._tracked((set(),))
+ self._tracked((x, y, z))
+
+ def check_track_dynamic(self, tp, always_track):
+ x, y, z = 1.5, "a", []
+
+ check = self._tracked if always_track else self._not_tracked
+ check(tp())
+ check(tp([]))
+ check(tp(set()))
+ check(tp([1, x, y]))
+ check(tp(obj for obj in [1, x, y]))
+ check(tp(set([1, x, y])))
+ check(tp(tuple([obj]) for obj in [1, x, y]))
+ check(tuple(tp([obj]) for obj in [1, x, y]))
+
+ self._tracked(tp([z]))
+ self._tracked(tp([[x, y]]))
+ self._tracked(tp([{x: y}]))
+ self._tracked(tp(obj for obj in [x, y, z]))
+ self._tracked(tp(tuple([obj]) for obj in [x, y, z]))
+ self._tracked(tuple(tp([obj]) for obj in [x, y, z]))
+
+ def test_track_dynamic(self):
+ # Test GC-optimization of dynamically constructed tuples.
+ self.check_track_dynamic(tuple, False)
+
+ def test_track_subtypes(self):
+ # Tuple subtypes must always be tracked
+ class MyTuple(tuple):
+ pass
+ self.check_track_dynamic(MyTuple, True)
diff --git a/test/list_tests.py b/test/list_tests.py
new file mode 100644
index 0000000..53976a9
--- /dev/null
+++ b/test/list_tests.py
@@ -0,0 +1,558 @@
+# This file taken from Python, licensed under the Python License Agreement
+
+from __future__ import print_function
+"""
+Tests common to list and UserList.UserList
+"""
+
+import sys
+import os
+
+from . import unittest
+from test import test_support, seq_tests
+
+from decimal import Decimal
+
+def CmpToKey(mycmp):
+ 'Convert a cmp= function into a key= function'
+ class K(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __lt__(self, other):
+ return mycmp(self.obj, other.obj) == -1
+ return K
+
+class CommonTest(seq_tests.CommonTest):
+
+ def test_init(self):
+ # Iterable arg is optional
+ self.assertEqual(self.type2test([]), self.type2test())
+
+ # Init clears previous values
+ a = self.type2test([1, 2, 3])
+ a.__init__()
+ self.assertEqual(a, self.type2test([]))
+
+ # Init overwrites previous values
+ a = self.type2test([1, 2, 3])
+ a.__init__([4, 5, 6])
+ self.assertEqual(a, self.type2test([4, 5, 6]))
+
+ # Mutables always return a new object
+ b = self.type2test(a)
+ self.assertNotEqual(id(a), id(b))
+ self.assertEqual(a, b)
+
+ def test_repr(self):
+ l0 = []
+ l2 = [0, 1, 2]
+ a0 = self.type2test(l0)
+ a2 = self.type2test(l2)
+
+ self.assertEqual(str(a0), 'blist(%s)' % str(l0))
+ self.assertEqual(repr(a0), 'blist(%s)' % repr(l0))
+ self.assertEqual(repr(a2), 'blist(%s)' % repr(l2))
+ self.assertEqual(str(a2), "blist([0, 1, 2])")
+ self.assertEqual(repr(a2), "blist([0, 1, 2])")
+
+ a2.append(a2)
+ a2.append(3)
+ self.assertEqual(str(a2), "blist([0, 1, 2, [...], 3])")
+ self.assertEqual(repr(a2), "blist([0, 1, 2, [...], 3])")
+
+ def test_print(self):
+ d = self.type2test(range(200))
+ d.append(d)
+ d.extend(range(200,400))
+ d.append(d)
+ d.append(400)
+ try:
+ fo = open(test_support.TESTFN, "w")
+ fo.write(str(d))
+ fo.close()
+ fo = open(test_support.TESTFN, "r")
+ self.assertEqual(fo.read(), repr(d))
+ finally:
+ fo.close()
+ os.remove(test_support.TESTFN)
+
+ def test_set_subscript(self):
+ a = self.type2test(list(range(20)))
+ self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 0), [1,2,3])
+ self.assertRaises(TypeError, a.__setitem__, slice(0, 10), 1)
+ self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 2), [1,2])
+ self.assertRaises(TypeError, a.__getitem__, 'x', 1)
+ a[slice(2,10,3)] = [1,2,3]
+ self.assertEqual(a, self.type2test([0, 1, 1, 3, 4, 2, 6, 7, 3,
+ 9, 10, 11, 12, 13, 14, 15,
+ 16, 17, 18, 19]))
+
+ def test_reversed(self):
+ a = self.type2test(list(range(20)))
+ r = reversed(a)
+ self.assertEqual(list(r), self.type2test(list(range(19, -1, -1))))
+ if hasattr(r, '__next__'): # pragma: no cover
+ self.assertRaises(StopIteration, r.__next__)
+ else: # pragma: no cover
+ self.assertRaises(StopIteration, r.next)
+ self.assertEqual(list(reversed(self.type2test())),
+ self.type2test())
+
+ def test_setitem(self):
+ a = self.type2test([0, 1])
+ a[0] = 0
+ a[1] = 100
+ self.assertEqual(a, self.type2test([0, 100]))
+ a[-1] = 200
+ self.assertEqual(a, self.type2test([0, 200]))
+ a[-2] = 100
+ self.assertEqual(a, self.type2test([100, 200]))
+ self.assertRaises(IndexError, a.__setitem__, -3, 200)
+ self.assertRaises(IndexError, a.__setitem__, 2, 200)
+
+ a = self.type2test([])
+ self.assertRaises(IndexError, a.__setitem__, 0, 200)
+ self.assertRaises(IndexError, a.__setitem__, -1, 200)
+ self.assertRaises(TypeError, a.__setitem__)
+
+ a = self.type2test([0,1,2,3,4])
+ a[0] = 1
+ a[1] = 2
+ a[2] = 3
+ self.assertEqual(a, self.type2test([1,2,3,3,4]))
+ a[0] = 5
+ a[1] = 6
+ a[2] = 7
+ self.assertEqual(a, self.type2test([5,6,7,3,4]))
+ a[-2] = 88
+ a[-1] = 99
+ self.assertEqual(a, self.type2test([5,6,7,88,99]))
+ a[-2] = 8
+ a[-1] = 9
+ self.assertEqual(a, self.type2test([5,6,7,8,9]))
+
+ def test_delitem(self):
+ a = self.type2test([0, 1])
+ del a[1]
+ self.assertEqual(a, [0])
+ del a[0]
+ self.assertEqual(a, [])
+
+ a = self.type2test([0, 1])
+ del a[-2]
+ self.assertEqual(a, [1])
+ del a[-1]
+ self.assertEqual(a, [])
+
+ a = self.type2test([0, 1])
+ self.assertRaises(IndexError, a.__delitem__, -3)
+ self.assertRaises(IndexError, a.__delitem__, 2)
+
+ a = self.type2test([])
+ self.assertRaises(IndexError, a.__delitem__, 0)
+
+ self.assertRaises(TypeError, a.__delitem__)
+
+ def test_setslice(self):
+ l = [0, 1]
+ a = self.type2test(l)
+
+ for i in range(-3, 4):
+ a[:i] = l[:i]
+ self.assertEqual(a, l)
+ a2 = a[:]
+ a2[:i] = a[:i]
+ self.assertEqual(a2, a)
+ a[i:] = l[i:]
+ self.assertEqual(a, l)
+ a2 = a[:]
+ a2[i:] = a[i:]
+ self.assertEqual(a2, a)
+ for j in range(-3, 4):
+ a[i:j] = l[i:j]
+ self.assertEqual(a, l)
+ a2 = a[:]
+ a2[i:j] = a[i:j]
+ self.assertEqual(a2, a)
+
+ aa2 = a2[:]
+ aa2[:0] = [-2, -1]
+ self.assertEqual(aa2, [-2, -1, 0, 1])
+ aa2[0:] = []
+ self.assertEqual(aa2, [])
+
+ a = self.type2test([1, 2, 3, 4, 5])
+ a[:-1] = a
+ self.assertEqual(a, self.type2test([1, 2, 3, 4, 5, 5]))
+ a = self.type2test([1, 2, 3, 4, 5])
+ a[1:] = a
+ self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5]))
+ a = self.type2test([1, 2, 3, 4, 5])
+ a[1:-1] = a
+ self.assertEqual(a, self.type2test([1, 1, 2, 3, 4, 5, 5]))
+
+ a = self.type2test([])
+ a[:] = tuple(range(10))
+ self.assertEqual(a, self.type2test(list(range(10))))
+
+ if sys.version_info[0] < 3:
+ self.assertRaises(TypeError, a.__setslice__, 0, 1, 5)
+ self.assertRaises(TypeError, a.__setslice__)
+
+ def test_delslice(self):
+ a = self.type2test([0, 1])
+ del a[1:2]
+ del a[0:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test([0, 1])
+ del a[1:2]
+ del a[0:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test([0, 1])
+ del a[-2:-1]
+ self.assertEqual(a, self.type2test([1]))
+
+ a = self.type2test([0, 1])
+ del a[-2:-1]
+ self.assertEqual(a, self.type2test([1]))
+
+ a = self.type2test([0, 1])
+ del a[1:]
+ del a[:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test([0, 1])
+ del a[1:]
+ del a[:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test([0, 1])
+ del a[-1:]
+ self.assertEqual(a, self.type2test([0]))
+
+ a = self.type2test([0, 1])
+ del a[-1:]
+ self.assertEqual(a, self.type2test([0]))
+
+ a = self.type2test([0, 1])
+ del a[:]
+ self.assertEqual(a, self.type2test([]))
+
+ def test_append(self):
+ a = self.type2test([])
+ a.append(0)
+ a.append(1)
+ a.append(2)
+ self.assertEqual(a, self.type2test([0, 1, 2]))
+
+ self.assertRaises(TypeError, a.append)
+
+ def test_extend(self):
+ a1 = self.type2test([0])
+ a2 = self.type2test((0, 1))
+ a = a1[:]
+ a.extend(a2)
+ self.assertEqual(a, a1 + a2)
+
+ a.extend(self.type2test([]))
+ self.assertEqual(a, a1 + a2)
+
+ a.extend(a)
+ self.assertEqual(a, self.type2test([0, 0, 1, 0, 0, 1]))
+
+ a = self.type2test("spam")
+ a.extend("eggs")
+ self.assertEqual(a, list("spameggs"))
+
+ self.assertRaises(TypeError, a.extend, None)
+
+ self.assertRaises(TypeError, a.extend)
+
+ def test_insert(self):
+ a = self.type2test([0, 1, 2])
+ a.insert(0, -2)
+ a.insert(1, -1)
+ a.insert(2, 0)
+ self.assertEqual(a, [-2, -1, 0, 0, 1, 2])
+
+ b = a[:]
+ b.insert(-2, "foo")
+ b.insert(-200, "left")
+ b.insert(200, "right")
+ self.assertEqual(b, self.type2test(["left",-2,-1,0,0,"foo",1,2,"right"]))
+
+ self.assertRaises(TypeError, a.insert)
+
+ def test_pop(self):
+ a = self.type2test([-1, 0, 1])
+ a.pop()
+ self.assertEqual(a, [-1, 0])
+ a.pop(0)
+ self.assertEqual(a, [0])
+ self.assertRaises(IndexError, a.pop, 5)
+ a.pop(0)
+ self.assertEqual(a, [])
+ self.assertRaises(IndexError, a.pop)
+ self.assertRaises(TypeError, a.pop, 42, 42)
+ a = self.type2test([0, 10, 20, 30, 40])
+
+ def test_remove(self):
+ a = self.type2test([0, 0, 1])
+ a.remove(1)
+ self.assertEqual(a, [0, 0])
+ a.remove(0)
+ self.assertEqual(a, [0])
+ a.remove(0)
+ self.assertEqual(a, [])
+
+ self.assertRaises(ValueError, a.remove, 0)
+
+ self.assertRaises(TypeError, a.remove)
+
+ class BadExc(Exception):
+ pass
+
+ class BadCmp:
+ def __eq__(self, other):
+ if other == 2:
+ raise BadExc()
+ return False
+
+ a = self.type2test([0, 1, 2, 3])
+ self.assertRaises(BadExc, a.remove, BadCmp())
+
+ class BadCmp2:
+ def __eq__(self, other):
+ raise BadExc()
+
+ d = self.type2test('abcdefghcij')
+ d.remove('c')
+ self.assertEqual(d, self.type2test('abdefghcij'))
+ d.remove('c')
+ self.assertEqual(d, self.type2test('abdefghij'))
+ self.assertRaises(ValueError, d.remove, 'c')
+ self.assertEqual(d, self.type2test('abdefghij'))
+
+ # Handle comparison errors
+ d = self.type2test(['a', 'b', BadCmp2(), 'c'])
+ e = self.type2test(d)
+ self.assertRaises(BadExc, d.remove, 'c')
+ for x, y in zip(d, e):
+ # verify that original order and values are retained.
+ self.assert_(x is y)
+
+ def test_count(self):
+ a = self.type2test([0, 1, 2])*3
+ self.assertEqual(a.count(0), 3)
+ self.assertEqual(a.count(1), 3)
+ self.assertEqual(a.count(3), 0)
+
+ self.assertRaises(TypeError, a.count)
+
+ class BadExc(Exception):
+ pass
+
+ class BadCmp:
+ def __eq__(self, other):
+ if other == 2:
+ raise BadExc()
+ return False
+
+ self.assertRaises(BadExc, a.count, BadCmp())
+
+ def test_index(self):
+ u = self.type2test([0, 1])
+ self.assertEqual(u.index(0), 0)
+ self.assertEqual(u.index(1), 1)
+ self.assertRaises(ValueError, u.index, 2)
+
+ u = self.type2test([-2, -1, 0, 0, 1, 2])
+ self.assertEqual(u.count(0), 2)
+ self.assertEqual(u.index(0), 2)
+ self.assertEqual(u.index(0, 2), 2)
+ self.assertEqual(u.index(-2, -10), 0)
+ self.assertEqual(u.index(0, 3), 3)
+ self.assertEqual(u.index(0, 3, 4), 3)
+ self.assertRaises(ValueError, u.index, 2, 0, -10)
+
+ self.assertRaises(TypeError, u.index)
+
+ class BadExc(Exception):
+ pass
+
+ class BadCmp:
+ def __eq__(self, other):
+ if other == 2:
+ raise BadExc()
+ return False
+
+ a = self.type2test([0, 1, 2, 3])
+ self.assertRaises(BadExc, a.index, BadCmp())
+
+ a = self.type2test([-2, -1, 0, 0, 1, 2])
+ self.assertEqual(a.index(0), 2)
+ self.assertEqual(a.index(0, 2), 2)
+ self.assertEqual(a.index(0, -4), 2)
+ self.assertEqual(a.index(-2, -10), 0)
+ self.assertEqual(a.index(0, 3), 3)
+ self.assertEqual(a.index(0, -3), 3)
+ self.assertEqual(a.index(0, 3, 4), 3)
+ self.assertEqual(a.index(0, -3, -2), 3)
+ self.assertEqual(a.index(0, -4*sys.maxsize, 4*sys.maxsize), 2)
+ self.assertRaises(ValueError, a.index, 0, 4*sys.maxsize,-4*sys.maxsize)
+ self.assertRaises(ValueError, a.index, 2, 0, -10)
+ a.remove(0)
+ self.assertRaises(ValueError, a.index, 2, 0, 4)
+ self.assertEqual(a, self.type2test([-2, -1, 0, 1, 2]))
+
+ # Test modifying the list during index's iteration
+ class EvilCmp:
+ def __init__(self, victim):
+ self.victim = victim
+ def __eq__(self, other):
+ del self.victim[:]
+ return False
+ a = self.type2test()
+ a[:] = [EvilCmp(a) for _ in range(100)]
+ # This used to seg fault before patch #1005778
+ self.assertRaises(ValueError, a.index, None)
+
+ def test_reverse(self):
+ u = self.type2test([-2, -1, 0, 1, 2])
+ u2 = u[:]
+ u.reverse()
+ self.assertEqual(u, [2, 1, 0, -1, -2])
+ u.reverse()
+ self.assertEqual(u, u2)
+
+ self.assertRaises(TypeError, u.reverse, 42)
+
+ def test_sort(self):
+ u = self.type2test([1, 0])
+ u.sort()
+ self.assertEqual(u, [0, 1])
+
+ u = self.type2test([2,1,0,-1,-2])
+ u.sort()
+ self.assertEqual(u, self.type2test([-2,-1,0,1,2]))
+
+ self.assertRaises(TypeError, u.sort, 42, 42)
+
+ a = self.type2test(reversed(list(range(512))))
+ a.sort()
+ self.assertEqual(a, self.type2test(list(range(512))))
+
+ def revcmp(a, b): # pragma: no cover
+ if a == b:
+ return 0
+ elif a < b:
+ return 1
+ else: # a > b
+ return -1
+ u.sort(key=CmpToKey(revcmp))
+ self.assertEqual(u, self.type2test([2,1,0,-1,-2]))
+
+ # The following dumps core in unpatched Python 1.5:
+ def myComparison(x,y):
+ xmod, ymod = x%3, y%7
+ if xmod == ymod:
+ return 0
+ elif xmod < ymod:
+ return -1
+ else: # xmod > ymod
+ return 1
+ z = self.type2test(list(range(12)))
+ z.sort(key=CmpToKey(myComparison))
+
+ self.assertRaises(TypeError, z.sort, 2)
+
+ def selfmodifyingComparison(x,y):
+ z.append(1)
+ return cmp(x, y)
+ self.assertRaises(ValueError, z.sort, key=CmpToKey(selfmodifyingComparison))
+
+ if sys.version_info[0] < 3:
+ self.assertRaises(TypeError, z.sort, lambda x, y: 's')
+
+ self.assertRaises(TypeError, z.sort, 42, 42, 42, 42)
+
+ def test_slice(self):
+ u = self.type2test("spam")
+ u[:2] = "h"
+ self.assertEqual(u, list("ham"))
+
+ def test_iadd(self):
+ super(CommonTest, self).test_iadd()
+ u = self.type2test([0, 1])
+ u2 = u
+ u += [2, 3]
+ self.assert_(u is u2)
+
+ u = self.type2test("spam")
+ u += "eggs"
+ self.assertEqual(u, self.type2test("spameggs"))
+
+ self.assertRaises(TypeError, u.__iadd__, None)
+
+ def test_imul(self):
+ u = self.type2test([0, 1])
+ u *= 3
+ self.assertEqual(u, self.type2test([0, 1, 0, 1, 0, 1]))
+ u *= 0
+ self.assertEqual(u, self.type2test([]))
+ s = self.type2test([])
+ oldid = id(s)
+ s *= 10
+ self.assertEqual(id(s), oldid)
+
+ def test_extendedslicing(self):
+ # subscript
+ a = self.type2test([0,1,2,3,4])
+
+ # deletion
+ del a[::2]
+ self.assertEqual(a, self.type2test([1,3]))
+ a = self.type2test(list(range(5)))
+ del a[1::2]
+ self.assertEqual(a, self.type2test([0,2,4]))
+ a = self.type2test(list(range(5)))
+ del a[1::-2]
+ self.assertEqual(a, self.type2test([0,2,3,4]))
+ a = self.type2test(list(range(10)))
+ del a[::1000]
+ self.assertEqual(a, self.type2test([1, 2, 3, 4, 5, 6, 7, 8, 9]))
+ # assignment
+ a = self.type2test(list(range(10)))
+ a[::2] = [-1]*5
+ self.assertEqual(a, self.type2test([-1, 1, -1, 3, -1, 5, -1, 7, -1, 9]))
+ a = self.type2test(list(range(10)))
+ a[::-4] = [10]*3
+ self.assertEqual(a, self.type2test([0, 10, 2, 3, 4, 10, 6, 7, 8 ,10]))
+ a = self.type2test(list(range(4)))
+ a[::-1] = a
+ self.assertEqual(a, self.type2test([3, 2, 1, 0]))
+ a = self.type2test(list(range(10)))
+ b = a[:]
+ c = a[:]
+ a[2:3] = self.type2test(["two", "elements"])
+ b[slice(2,3)] = self.type2test(["two", "elements"])
+ c[2:3:] = self.type2test(["two", "elements"])
+ self.assertEqual(a, b)
+ self.assertEqual(a, c)
+ a = self.type2test(list(range(10)))
+ a[::2] = tuple(range(5))
+ self.assertEqual(a, self.type2test([0, 1, 1, 3, 2, 5, 3, 7, 4, 9]))
+
+ def test_constructor_exception_handling(self):
+ # Bug #1242657
+ class Iter(object):
+ def next(self):
+ raise KeyboardInterrupt
+ __next__ = next
+
+ class F(object):
+ def __iter__(self):
+ return Iter()
+ self.assertRaises(KeyboardInterrupt, self.type2test, F())
diff --git a/test/mapping_tests.py b/test/mapping_tests.py
new file mode 100644
index 0000000..e51756f
--- /dev/null
+++ b/test/mapping_tests.py
@@ -0,0 +1,678 @@
+# This file taken from Python, licensed under the Python License Agreement
+
+# tests common to dict and UserDict
+import unittest, sys
+import collections
+try:
+ from collections import UserDict # Python 3
+except ImportError:
+ from UserDict import UserDict # Python 2
+
+class BasicTestMappingProtocol(unittest.TestCase):
+ # This base class can be used to check that an object conforms to the
+ # mapping protocol
+
+ # Functions that can be useful to override to adapt to dictionary
+ # semantics
+ type2test = None # which class is being tested (overwrite in subclasses)
+
+ def _reference(self): # pragma: no cover
+ """Return a dictionary of values which are invariant by storage
+ in the object under test."""
+ return {1:2, "key1":"value1", "key2":(1,2,3)}
+ def _empty_mapping(self):
+ """Return an empty mapping object"""
+ return self.type2test()
+ def _full_mapping(self, data):
+ """Return a mapping object with the value contained in data
+ dictionary"""
+ x = self._empty_mapping()
+ for key, value in data.items():
+ x[key] = value
+ return x
+
+ def __init__(self, *args, **kw):
+ unittest.TestCase.__init__(self, *args, **kw)
+ self.reference = self._reference().copy()
+
+ # A (key, value) pair not in the mapping
+ key, value = self.reference.popitem()
+ self.other = {key:value}
+
+ # A (key, value) pair in the mapping
+ key, value = self.reference.popitem()
+ self.inmapping = {key:value}
+ self.reference[key] = value
+
+ def test_read(self):
+ # Test for read only operations on mapping
+ p = self._empty_mapping()
+ p1 = dict(p) #workaround for singleton objects
+ d = self._full_mapping(self.reference)
+ if d is p: # pragma: no cover
+ p = p1
+ #Indexing
+ for key, value in self.reference.items():
+ self.assertEqual(d[key], value)
+ knownkey = list(self.other.keys())[0]
+ self.failUnlessRaises(KeyError, lambda:d[knownkey])
+ #len
+ self.assertEqual(len(p), 0)
+ self.assertEqual(len(d), len(self.reference))
+ #__contains__
+ for k in self.reference:
+ self.assert_(k in d)
+ for k in self.other:
+ self.failIf(k in d)
+ #cmp
+ self.assertEqual(p, p)
+ self.assertEqual(d, d)
+ self.assertNotEqual(p, d)
+ self.assertNotEqual(d, p)
+ #__non__zero__
+ if p: self.fail("Empty mapping must compare to False")
+ if not d: self.fail("Full mapping must compare to True")
+ # keys(), items(), iterkeys() ...
+ def check_iterandlist(iter, lst, ref):
+ if sys.version_info[0] < 3: # pragma: no cover
+ self.assert_(hasattr(iter, 'next'))
+ else: # pragma: no cover
+ self.assert_(hasattr(iter, '__next__'))
+ self.assert_(hasattr(iter, '__iter__'))
+ x = list(iter)
+ self.assert_(set(x)==set(lst)==set(ref))
+ check_iterandlist(iter(d.keys()), list(d.keys()),
+ self.reference.keys())
+ check_iterandlist(iter(d), list(d.keys()), self.reference.keys())
+ check_iterandlist(iter(d.values()), list(d.values()),
+ self.reference.values())
+ check_iterandlist(iter(d.items()), list(d.items()),
+ self.reference.items())
+ #get
+ key, value = next(iter(d.items()))
+ knownkey, knownvalue = next(iter(self.other.items()))
+ self.assertEqual(d.get(key, knownvalue), value)
+ self.assertEqual(d.get(knownkey, knownvalue), knownvalue)
+ self.failIf(knownkey in d)
+
+ def test_write(self):
+ # Test for write operations on mapping
+ p = self._empty_mapping()
+ #Indexing
+ for key, value in self.reference.items():
+ p[key] = value
+ self.assertEqual(p[key], value)
+ for key in self.reference.keys():
+ del p[key]
+ self.failUnlessRaises(KeyError, lambda:p[key])
+ p = self._empty_mapping()
+ #update
+ p.update(self.reference)
+ self.assertEqual(dict(p), self.reference)
+ items = list(p.items())
+ p = self._empty_mapping()
+ p.update(items)
+ self.assertEqual(dict(p), self.reference)
+ d = self._full_mapping(self.reference)
+ #setdefault
+ key, value = next(iter(d.items()))
+ knownkey, knownvalue = next(iter(self.other.items()))
+ self.assertEqual(d.setdefault(key, knownvalue), value)
+ self.assertEqual(d[key], value)
+ self.assertEqual(d.setdefault(knownkey, knownvalue), knownvalue)
+ self.assertEqual(d[knownkey], knownvalue)
+ #pop
+ self.assertEqual(d.pop(knownkey), knownvalue)
+ self.failIf(knownkey in d)
+ self.assertRaises(KeyError, d.pop, knownkey)
+ default = 909
+ d[knownkey] = knownvalue
+ self.assertEqual(d.pop(knownkey, default), knownvalue)
+ self.failIf(knownkey in d)
+ self.assertEqual(d.pop(knownkey, default), default)
+ #popitem
+ key, value = d.popitem()
+ self.failIf(key in d)
+ self.assertEqual(value, self.reference[key])
+ p=self._empty_mapping()
+ self.assertRaises(KeyError, p.popitem)
+
+ def test_constructor(self):
+ self.assertEqual(self._empty_mapping(), self._empty_mapping())
+
+ def test_bool(self):
+ self.assert_(not self._empty_mapping())
+ self.assert_(self.reference)
+ self.assert_(bool(self._empty_mapping()) is False)
+ self.assert_(bool(self.reference) is True)
+
+ def test_keys(self):
+ d = self._empty_mapping()
+ self.assertEqual(list(d.keys()), [])
+ d = self.reference
+ self.assert_(list(self.inmapping.keys())[0] in d.keys())
+ self.assert_(list(self.other.keys())[0] not in d.keys())
+ self.assertRaises(TypeError, d.keys, None)
+
+ def test_values(self):
+ d = self._empty_mapping()
+ self.assertEqual(list(d.values()), [])
+
+ self.assertRaises(TypeError, d.values, None)
+
+ def test_items(self):
+ d = self._empty_mapping()
+ self.assertEqual(list(d.items()), [])
+
+ self.assertRaises(TypeError, d.items, None)
+
+ def test_len(self):
+ d = self._empty_mapping()
+ self.assertEqual(len(d), 0)
+
+ def test_getitem(self):
+ d = self.reference
+ self.assertEqual(d[list(self.inmapping.keys())[0]],
+ list(self.inmapping.values())[0])
+
+ self.assertRaises(TypeError, d.__getitem__)
+
+ def test_update(self):
+ # mapping argument
+ d = self._empty_mapping()
+ d.update(self.other)
+ self.assertEqual(list(d.items()), list(self.other.items()))
+
+ # No argument
+ d = self._empty_mapping()
+ d.update()
+ self.assertEqual(d, self._empty_mapping())
+
+ # item sequence
+ d = self._empty_mapping()
+ d.update(self.other.items())
+ self.assertEqual(list(d.items()), list(self.other.items()))
+
+ # Iterator
+ d = self._empty_mapping()
+ d.update(self.other.items())
+ self.assertEqual(list(d.items()), list(self.other.items()))
+
+ # FIXME: Doesn't work with UserDict
+ # self.assertRaises((TypeError, AttributeError), d.update, None)
+ self.assertRaises((TypeError, AttributeError), d.update, 42)
+
+ outerself = self
+ class SimpleUserDict:
+ def __init__(self):
+ self.d = outerself.reference
+ def keys(self):
+ return self.d.keys()
+ def __getitem__(self, i):
+ return self.d[i]
+ d.clear()
+ d.update(SimpleUserDict())
+ i1 = sorted(d.items())
+ i2 = sorted(self.reference.items())
+ self.assertEqual(i1, i2)
+
+ class Exc(Exception): pass
+
+ d = self._empty_mapping()
+ class FailingUserDict:
+ def keys(self):
+ raise Exc
+ self.assertRaises(Exc, d.update, FailingUserDict())
+
+ d.clear()
+
+ class FailingUserDict:
+ def keys(self):
+ class BogonIter:
+ def __init__(self):
+ self.i = 1
+ def __iter__(self):
+ return self
+ def __next__(self):
+ if self.i:
+ self.i = 0
+ return 'a'
+ raise Exc
+ next = __next__
+ return BogonIter()
+ def __getitem__(self, key):
+ return key
+ self.assertRaises(Exc, d.update, FailingUserDict())
+
+ class FailingUserDict:
+ def keys(self):
+ class BogonIter:
+ def __init__(self):
+ self.i = ord('a')
+ def __iter__(self):
+ return self
+ def __next__(self):
+ if self.i <= ord('z'):
+ rtn = chr(self.i)
+ self.i += 1
+ return rtn
+ else: # pragma: no cover
+ raise StopIteration
+ next = __next__
+ return BogonIter()
+ def __getitem__(self, key):
+ raise Exc
+ self.assertRaises(Exc, d.update, FailingUserDict())
+
+ d = self._empty_mapping()
+ class badseq(object):
+ def __iter__(self):
+ return self
+ def __next__(self):
+ raise Exc()
+ next = __next__
+
+ self.assertRaises(Exc, d.update, badseq())
+
+ self.assertRaises(ValueError, d.update, [(1, 2, 3)])
+
+ # no test_fromkeys or test_copy as both os.environ and selves don't support it
+
+ def test_get(self):
+ d = self._empty_mapping()
+ self.assert_(d.get(list(self.other.keys())[0]) is None)
+ self.assertEqual(d.get(list(self.other.keys())[0], 3), 3)
+ d = self.reference
+ self.assert_(d.get(list(self.other.keys())[0]) is None)
+ self.assertEqual(d.get(list(self.other.keys())[0], 3), 3)
+ self.assertEqual(d.get(list(self.inmapping.keys())[0]),
+ list(self.inmapping.values())[0])
+ self.assertEqual(d.get(list(self.inmapping.keys())[0], 3),
+ list(self.inmapping.values())[0])
+ self.assertRaises(TypeError, d.get)
+ self.assertRaises(TypeError, d.get, None, None, None)
+
+ def test_setdefault(self):
+ d = self._empty_mapping()
+ self.assertRaises(TypeError, d.setdefault)
+
+ def test_popitem(self):
+ d = self._empty_mapping()
+ self.assertRaises(KeyError, d.popitem)
+ self.assertRaises(TypeError, d.popitem, 42)
+
+ def test_pop(self):
+ d = self._empty_mapping()
+ k, v = list(self.inmapping.items())[0]
+ d[k] = v
+ self.assertRaises(KeyError, d.pop, list(self.other.keys())[0])
+
+ self.assertEqual(d.pop(k), v)
+ self.assertEqual(len(d), 0)
+
+ self.assertRaises(KeyError, d.pop, k)
+
+
+class TestMappingProtocol(BasicTestMappingProtocol):
+ def test_constructor(self):
+ BasicTestMappingProtocol.test_constructor(self)
+ self.assert_(self._empty_mapping() is not self._empty_mapping())
+ self.assertEqual(self.type2test(x=1, y=2), self._full_mapping({"x": 1, "y": 2}))
+
+ def test_bool(self):
+ BasicTestMappingProtocol.test_bool(self)
+ self.assert_(not self._empty_mapping())
+ self.assert_(self._full_mapping({"x": "y"}))
+ self.assert_(bool(self._empty_mapping()) is False)
+ self.assert_(bool(self._full_mapping({"x": "y"})) is True)
+
+ def test_keys(self):
+ BasicTestMappingProtocol.test_keys(self)
+ d = self._empty_mapping()
+ self.assertEqual(list(d.keys()), [])
+ d = self._full_mapping({'a': 1, 'b': 2})
+ k = d.keys()
+ self.assert_('a' in k)
+ self.assert_('b' in k)
+ self.assert_('c' not in k)
+
+ def test_values(self):
+ BasicTestMappingProtocol.test_values(self)
+ d = self._full_mapping({1:2})
+ self.assertEqual(list(d.values()), [2])
+
+ def test_items(self):
+ BasicTestMappingProtocol.test_items(self)
+
+ d = self._full_mapping({1:2})
+ self.assertEqual(list(d.items()), [(1, 2)])
+
+ def test_contains(self):
+ d = self._empty_mapping()
+ self.assert_(not ('a' in d))
+ self.assert_('a' not in d)
+ d = self._full_mapping({'a': 1, 'b': 2})
+ self.assert_('a' in d)
+ self.assert_('b' in d)
+ self.assert_('c' not in d)
+
+ self.assertRaises(TypeError, d.__contains__)
+
+ def test_len(self):
+ BasicTestMappingProtocol.test_len(self)
+ d = self._full_mapping({'a': 1, 'b': 2})
+ self.assertEqual(len(d), 2)
+
+ def test_getitem(self):
+ BasicTestMappingProtocol.test_getitem(self)
+ d = self._full_mapping({'a': 1, 'b': 2})
+ self.assertEqual(d['a'], 1)
+ self.assertEqual(d['b'], 2)
+ d['c'] = 3
+ d['a'] = 4
+ self.assertEqual(d['c'], 3)
+ self.assertEqual(d['a'], 4)
+ del d['b']
+ self.assertEqual(d, self._full_mapping({'a': 4, 'c': 3}))
+
+ self.assertRaises(TypeError, d.__getitem__)
+
+ def test_clear(self):
+ d = self._full_mapping({1:1, 2:2, 3:3})
+ d.clear()
+ self.assertEqual(d, self._full_mapping({}))
+
+ self.assertRaises(TypeError, d.clear, None)
+
+ def test_update(self):
+ BasicTestMappingProtocol.test_update(self)
+ # mapping argument
+ d = self._empty_mapping()
+ d.update({1:100})
+ d.update({2:20})
+ d.update({1:1, 2:2, 3:3})
+ self.assertEqual(d, self._full_mapping({1:1, 2:2, 3:3}))
+
+ # no argument
+ d.update()
+ self.assertEqual(d, self._full_mapping({1:1, 2:2, 3:3}))
+
+ # keyword arguments
+ d = self._empty_mapping()
+ d.update(x=100)
+ d.update(y=20)
+ d.update(x=1, y=2, z=3)
+ self.assertEqual(d, self._full_mapping({"x":1, "y":2, "z":3}))
+
+ # item sequence
+ d = self._empty_mapping()
+ d.update([("x", 100), ("y", 20)])
+ self.assertEqual(d, self._full_mapping({"x":100, "y":20}))
+
+ # Both item sequence and keyword arguments
+ d = self._empty_mapping()
+ d.update([("x", 100), ("y", 20)], x=1, y=2)
+ self.assertEqual(d, self._full_mapping({"x":1, "y":2}))
+
+ # iterator
+ d = self._full_mapping({1:3, 2:4})
+ d.update(self._full_mapping({1:2, 3:4, 5:6}).items())
+ self.assertEqual(d, self._full_mapping({1:2, 2:4, 3:4, 5:6}))
+
+ class SimpleUserDict:
+ def __init__(self):
+ self.d = {1:1, 2:2, 3:3}
+ def keys(self):
+ return self.d.keys()
+ def __getitem__(self, i):
+ return self.d[i]
+ d.clear()
+ d.update(SimpleUserDict())
+ self.assertEqual(d, self._full_mapping({1:1, 2:2, 3:3}))
+
+ def test_fromkeys(self):
+ self.assertEqual(self.type2test.fromkeys('abc'), self._full_mapping({'a':None, 'b':None, 'c':None}))
+ d = self._empty_mapping()
+ self.assert_(not(d.fromkeys('abc') is d))
+ self.assertEqual(d.fromkeys('abc'), self._full_mapping({'a':None, 'b':None, 'c':None}))
+ self.assertEqual(d.fromkeys((4,5),0), self._full_mapping({4:0, 5:0}))
+ self.assertEqual(d.fromkeys([]), self._full_mapping({}))
+ def g():
+ yield 1
+ self.assertEqual(d.fromkeys(g()), self._full_mapping({1:None}))
+ self.assertRaises(TypeError, {}.fromkeys, 3)
+ class dictlike(self.type2test): pass
+ self.assertEqual(dictlike.fromkeys('a'), self._full_mapping({'a':None}))
+ self.assertEqual(dictlike().fromkeys('a'), self._full_mapping({'a':None}))
+ self.assert_(dictlike.fromkeys('a').__class__ is dictlike)
+ self.assert_(dictlike().fromkeys('a').__class__ is dictlike)
+ # FIXME: the following won't work with UserDict, because it's an old style class
+ # self.assert_(type(dictlike.fromkeys('a')) is dictlike)
+ class mydict(self.type2test):
+ def __new__(cls):
+ return UserDict()
+ ud = mydict.fromkeys('ab')
+ self.assertEqual(ud, {'a':None, 'b':None})
+ # FIXME: the following won't work with UserDict, because it's an old style class
+ # self.assert_(isinstance(ud, collections.UserDict))
+ self.assertRaises(TypeError, dict.fromkeys)
+
+ class Exc(Exception): pass
+
+ class baddict1(self.type2test):
+ def __init__(self):
+ raise Exc()
+
+ self.assertRaises(Exc, baddict1.fromkeys, [1])
+
+ class BadSeq(object):
+ def __iter__(self):
+ return self
+ def __next__(self):
+ raise Exc()
+ next = __next__
+
+ self.assertRaises(Exc, self.type2test.fromkeys, BadSeq())
+
+ class baddict2(self.type2test):
+ def __setitem__(self, key, value):
+ raise Exc()
+
+ self.assertRaises(Exc, baddict2.fromkeys, [1])
+
+ def test_copy(self):
+ d = self._full_mapping({1:1, 2:2, 3:3})
+ self.assertEqual(d.copy(), self._full_mapping({1:1, 2:2, 3:3}))
+ d = self._empty_mapping()
+ self.assertEqual(d.copy(), d)
+ self.assert_(isinstance(d.copy(), d.__class__))
+ self.assertRaises(TypeError, d.copy, None)
+
+ def test_get(self):
+ BasicTestMappingProtocol.test_get(self)
+ d = self._empty_mapping()
+ self.assert_(d.get('c') is None)
+ self.assertEqual(d.get('c', 3), 3)
+ d = self._full_mapping({'a' : 1, 'b' : 2})
+ self.assert_(d.get('c') is None)
+ self.assertEqual(d.get('c', 3), 3)
+ self.assertEqual(d.get('a'), 1)
+ self.assertEqual(d.get('a', 3), 1)
+
+ def test_setdefault(self):
+ BasicTestMappingProtocol.test_setdefault(self)
+ d = self._empty_mapping()
+ self.assert_(d.setdefault('key0') is None)
+ d.setdefault('key0', [])
+ self.assert_(d.setdefault('key0') is None)
+ d.setdefault('key', []).append(3)
+ self.assertEqual(d['key'][0], 3)
+ d.setdefault('key', []).append(4)
+ self.assertEqual(len(d['key']), 2)
+
+ def test_popitem(self):
+ BasicTestMappingProtocol.test_popitem(self)
+ for copymode in -1, +1:
+ # -1: b has same structure as a
+ # +1: b is a.copy()
+ for log2size in range(12):
+ size = 2**log2size
+ a = self._empty_mapping()
+ b = self._empty_mapping()
+ for i in range(size):
+ a[repr(i)] = i
+ if copymode < 0:
+ b[repr(i)] = i
+ if copymode > 0:
+ b = a.copy()
+ for i in range(size):
+ ka, va = ta = a.popitem()
+ self.assertEqual(va, int(ka))
+ kb, vb = tb = b.popitem()
+ self.assertEqual(vb, int(kb))
+ self.assert_(not(copymode < 0 and ta != tb))
+ self.assert_(not a)
+ self.assert_(not b)
+
+ def test_pop(self):
+ BasicTestMappingProtocol.test_pop(self)
+
+ # Tests for pop with specified key
+ d = self._empty_mapping()
+ k, v = 'abc', 'def'
+
+ # verify longs/ints get same value when key > 32 bits (for 64-bit archs)
+ # see SF bug #689659
+ x = 4503599627370496
+ y = 4503599627370496
+ h = self._full_mapping({x: 'anything', y: 'something else'})
+ self.assertEqual(h[x], h[y])
+
+ self.assertEqual(d.pop(k, v), v)
+ d[k] = v
+ self.assertEqual(d.pop(k, 1), v)
+
+
+class TestHashMappingProtocol(TestMappingProtocol):
+
+ def test_getitem(self):
+ TestMappingProtocol.test_getitem(self)
+ class Exc(Exception): pass
+
+ class BadEq(object):
+ def __eq__(self, other): # pragma: no cover
+ raise Exc()
+ def __hash__(self):
+ return 24
+
+ d = self._empty_mapping()
+ d[BadEq()] = 42
+ self.assertRaises(KeyError, d.__getitem__, 23)
+
+ class BadHash(object):
+ fail = False
+ def __hash__(self):
+ if self.fail:
+ raise Exc()
+ else:
+ return 42
+
+ d = self._empty_mapping()
+ x = BadHash()
+ d[x] = 42
+ x.fail = True
+ self.assertRaises(Exc, d.__getitem__, x)
+
+ def test_fromkeys(self):
+ TestMappingProtocol.test_fromkeys(self)
+ class mydict(self.type2test):
+ def __new__(cls):
+ return UserDict()
+ ud = mydict.fromkeys('ab')
+ self.assertEqual(ud, {'a':None, 'b':None})
+ self.assert_(isinstance(ud, UserDict))
+
+ def test_pop(self):
+ TestMappingProtocol.test_pop(self)
+
+ class Exc(Exception): pass
+
+ class BadHash(object):
+ fail = False
+ def __hash__(self):
+ if self.fail:
+ raise Exc()
+ else:
+ return 42
+
+ d = self._empty_mapping()
+ x = BadHash()
+ d[x] = 42
+ x.fail = True
+ self.assertRaises(Exc, d.pop, x)
+
+ def test_mutatingiteration(self): # pragma: no cover
+ d = self._empty_mapping()
+ d[1] = 1
+ try:
+ for i in d:
+ d[i+1] = 1
+ except RuntimeError:
+ pass
+ else:
+ self.fail("changing dict size during iteration doesn't raise Error")
+
+ def test_repr(self): # pragma: no cover
+ d = self._empty_mapping()
+ self.assertEqual(repr(d), '{}')
+ d[1] = 2
+ self.assertEqual(repr(d), '{1: 2}')
+ d = self._empty_mapping()
+ d[1] = d
+ self.assertEqual(repr(d), '{1: {...}}')
+
+ class Exc(Exception): pass
+
+ class BadRepr(object):
+ def __repr__(self):
+ raise Exc()
+
+ d = self._full_mapping({1: BadRepr()})
+ self.assertRaises(Exc, repr, d)
+
+ def test_eq(self):
+ self.assertEqual(self._empty_mapping(), self._empty_mapping())
+ self.assertEqual(self._full_mapping({1: 2}),
+ self._full_mapping({1: 2}))
+
+ class Exc(Exception): pass
+
+ class BadCmp(object):
+ def __eq__(self, other):
+ raise Exc()
+ def __hash__(self):
+ return 1
+
+ d1 = self._full_mapping({BadCmp(): 1})
+ d2 = self._full_mapping({1: 1})
+ self.assertRaises(Exc, lambda: BadCmp()==1)
+ #self.assertRaises(Exc, lambda: d1==d2)
+
+ def test_setdefault(self):
+ TestMappingProtocol.test_setdefault(self)
+
+ class Exc(Exception): pass
+
+ class BadHash(object):
+ fail = False
+ def __hash__(self):
+ if self.fail:
+ raise Exc()
+ else:
+ return 42
+
+ d = self._empty_mapping()
+ x = BadHash()
+ d[x] = 42
+ x.fail = True
+ self.assertRaises(Exc, d.setdefault, x, [])
diff --git a/test/seq_tests.py b/test/seq_tests.py
new file mode 100644
index 0000000..3d87126
--- /dev/null
+++ b/test/seq_tests.py
@@ -0,0 +1,332 @@
+# This file taken from Python, licensed under the Python License Agreement
+
+from __future__ import print_function
+"""
+Tests common to tuple, list and UserList.UserList
+"""
+
+from . import unittest
+from test import test_support
+import sys
+
+# Various iterables
+# This is used for checking the constructor (here and in test_deque.py)
+def iterfunc(seqn):
+ 'Regular generator'
+ for i in seqn:
+ yield i
+
+class Sequence:
+ 'Sequence using __getitem__'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ def __getitem__(self, i):
+ return self.seqn[i]
+
+class IterFunc:
+ 'Sequence using iterator protocol'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+ def __next__(self):
+ if self.i >= len(self.seqn): raise StopIteration
+ v = self.seqn[self.i]
+ self.i += 1
+ return v
+ next = __next__
+
+class IterGen:
+ 'Sequence using iterator protocol defined with a generator'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ for val in self.seqn:
+ yield val
+
+class IterNextOnly:
+ 'Missing __getitem__ and __iter__'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __next__(self): # pragma: no cover
+ if self.i >= len(self.seqn): raise StopIteration
+ v = self.seqn[self.i]
+ self.i += 1
+ return v
+ next = __next__
+
+class IterNoNext:
+ 'Iterator missing next()'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+
+class IterGenExc:
+ 'Test propagation of exceptions'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+ def __next__(self):
+ 3 // 0
+ next = __next__
+
+class IterFuncStop:
+ 'Test immediate stop'
+ def __init__(self, seqn):
+ pass
+ def __iter__(self):
+ return self
+ def __next__(self):
+ raise StopIteration
+ next = __next__
+
+from itertools import chain
+def itermulti(seqn):
+ 'Test multiple tiers of iterators'
+ return chain(map(lambda x:x, iterfunc(IterGen(Sequence(seqn)))))
+
+class CommonTest(unittest.TestCase):
+ # The type to be tested
+ type2test = None
+
+ def test_constructors(self):
+ l0 = []
+ l1 = [0]
+ l2 = [0, 1]
+
+ u = self.type2test()
+ u0 = self.type2test(l0)
+ u1 = self.type2test(l1)
+ u2 = self.type2test(l2)
+
+ uu = self.type2test(u)
+ uu0 = self.type2test(u0)
+ uu1 = self.type2test(u1)
+ uu2 = self.type2test(u2)
+
+ v = self.type2test(tuple(u))
+ class OtherSeq:
+ def __init__(self, initseq):
+ self.__data = initseq
+ def __len__(self):
+ return len(self.__data)
+ def __getitem__(self, i):
+ return self.__data[i]
+ s = OtherSeq(u0)
+ v0 = self.type2test(s)
+ self.assertEqual(len(v0), len(s))
+
+ s = "this is also a sequence"
+ vv = self.type2test(s)
+ self.assertEqual(len(vv), len(s))
+
+ # Create from various iteratables
+ for s in ("123", "", list(range(1000)), ('do', 1.2), range(2000,2200,5)):
+ for g in (Sequence, IterFunc, IterGen,
+ itermulti, iterfunc):
+ self.assertEqual(self.type2test(g(s)), self.type2test(s))
+ self.assertEqual(self.type2test(IterFuncStop(s)), self.type2test())
+ self.assertEqual(self.type2test(c for c in "123"), self.type2test("123"))
+ self.assertRaises(TypeError, self.type2test, IterNextOnly(s))
+ self.assertRaises(TypeError, self.type2test, IterNoNext(s))
+ self.assertRaises(ZeroDivisionError, self.type2test, IterGenExc(s))
+
+ def test_truth(self):
+ self.assert_(not self.type2test())
+ self.assert_(self.type2test([42]))
+
+ def test_getitem(self):
+ u = self.type2test([0, 1, 2, 3, 4])
+ for i in range(len(u)):
+ self.assertEqual(u[i], i)
+ self.assertEqual(u[int(i)], i)
+ for i in range(-len(u), -1):
+ self.assertEqual(u[i], len(u)+i)
+ self.assertEqual(u[int(i)], len(u)+i)
+ self.assertRaises(IndexError, u.__getitem__, -len(u)-1)
+ self.assertRaises(IndexError, u.__getitem__, len(u))
+ self.assertRaises(ValueError, u.__getitem__, slice(0,10,0))
+
+ u = self.type2test()
+ self.assertRaises(IndexError, u.__getitem__, 0)
+ self.assertRaises(IndexError, u.__getitem__, -1)
+
+ self.assertRaises(TypeError, u.__getitem__)
+
+ a = self.type2test([10, 11])
+ self.assertEqual(a[0], 10)
+ self.assertEqual(a[1], 11)
+ self.assertEqual(a[-2], 10)
+ self.assertEqual(a[-1], 11)
+ self.assertRaises(IndexError, a.__getitem__, -3)
+ self.assertRaises(IndexError, a.__getitem__, 3)
+
+ def test_getslice(self):
+ l = [0, 1, 2, 3, 4]
+ u = self.type2test(l)
+
+ self.assertEqual(u[0:0], self.type2test())
+ self.assertEqual(u[1:2], self.type2test([1]))
+ self.assertEqual(u[-2:-1], self.type2test([3]))
+ self.assertEqual(u[-1000:1000], u)
+ self.assertEqual(u[1000:-1000], self.type2test([]))
+ self.assertEqual(u[:], u)
+ self.assertEqual(u[1:None], self.type2test([1, 2, 3, 4]))
+ self.assertEqual(u[None:3], self.type2test([0, 1, 2]))
+
+ # Extended slices
+ self.assertEqual(u[::], u)
+ self.assertEqual(u[::2], self.type2test([0, 2, 4]))
+ self.assertEqual(u[1::2], self.type2test([1, 3]))
+ self.assertEqual(u[::-1], self.type2test([4, 3, 2, 1, 0]))
+ self.assertEqual(u[::-2], self.type2test([4, 2, 0]))
+ self.assertEqual(u[3::-2], self.type2test([3, 1]))
+ self.assertEqual(u[3:3:-2], self.type2test([]))
+ self.assertEqual(u[3:2:-2], self.type2test([3]))
+ self.assertEqual(u[3:1:-2], self.type2test([3]))
+ self.assertEqual(u[3:0:-2], self.type2test([3, 1]))
+ self.assertEqual(u[::-100], self.type2test([4]))
+ self.assertEqual(u[100:-100:], self.type2test([]))
+ self.assertEqual(u[-100:100:], u)
+ self.assertEqual(u[100:-100:-1], u[::-1])
+ self.assertEqual(u[-100:100:-1], self.type2test([]))
+ self.assertEqual(u[-100:100:2], self.type2test([0, 2, 4]))
+
+ # Test extreme cases with long ints
+ a = self.type2test([0,1,2,3,4])
+ self.assertEqual(a[ -pow(2,128): 3 ], self.type2test([0,1,2]))
+ self.assertEqual(a[ 3: pow(2,145) ], self.type2test([3,4]))
+
+ if sys.version_info[0] < 3:
+ self.assertRaises(TypeError, u.__getslice__)
+
+ def test_contains(self):
+ u = self.type2test([0, 1, 2])
+ for i in u:
+ self.assert_(i in u)
+ for i in min(u)-1, max(u)+1:
+ self.assert_(i not in u)
+
+ self.assertRaises(TypeError, u.__contains__)
+
+ def test_contains_fake(self):
+ class AllEq:
+ # Sequences must use rich comparison against each item
+ # (unless "is" is true, or an earlier item answered)
+ # So instances of AllEq must be found in all non-empty sequences.
+ def __eq__(self, other):
+ return True
+ def __hash__(self): # pragma: no cover
+ raise NotImplemented
+ self.assert_(AllEq() not in self.type2test([]))
+ self.assert_(AllEq() in self.type2test([1]))
+
+ def test_contains_order(self):
+ # Sequences must test in-order. If a rich comparison has side
+ # effects, these will be visible to tests against later members.
+ # In this test, the "side effect" is a short-circuiting raise.
+ class DoNotTestEq(Exception):
+ pass
+ class StopCompares:
+ def __eq__(self, other):
+ raise DoNotTestEq
+
+ checkfirst = self.type2test([1, StopCompares()])
+ self.assert_(1 in checkfirst)
+ checklast = self.type2test([StopCompares(), 1])
+ self.assertRaises(DoNotTestEq, checklast.__contains__, 1)
+
+ def test_len(self):
+ self.assertEqual(len(self.type2test()), 0)
+ self.assertEqual(len(self.type2test([])), 0)
+ self.assertEqual(len(self.type2test([0])), 1)
+ self.assertEqual(len(self.type2test([0, 1, 2])), 3)
+
+ def test_minmax(self):
+ u = self.type2test([0, 1, 2])
+ self.assertEqual(min(u), 0)
+ self.assertEqual(max(u), 2)
+
+ def test_addmul(self):
+ u1 = self.type2test([0])
+ u2 = self.type2test([0, 1])
+ self.assertEqual(u1, u1 + self.type2test())
+ self.assertEqual(u1, self.type2test() + u1)
+ self.assertEqual(u1 + self.type2test([1]), u2)
+ self.assertEqual(self.type2test([-1]) + u1, self.type2test([-1, 0]))
+ self.assertEqual(self.type2test(), u2*0)
+ self.assertEqual(self.type2test(), 0*u2)
+ self.assertEqual(self.type2test(), u2*0)
+ self.assertEqual(self.type2test(), 0*u2)
+ self.assertEqual(u2, u2*1)
+ self.assertEqual(u2, 1*u2)
+ self.assertEqual(u2, u2*1)
+ self.assertEqual(u2, 1*u2)
+ self.assertEqual(u2+u2, u2*2)
+ self.assertEqual(u2+u2, 2*u2)
+ self.assertEqual(u2+u2, u2*2)
+ self.assertEqual(u2+u2, 2*u2)
+ self.assertEqual(u2+u2+u2, u2*3)
+ self.assertEqual(u2+u2+u2, 3*u2)
+
+ class subclass(self.type2test):
+ pass
+ u3 = subclass([0, 1])
+ self.assertEqual(u3, u3*1)
+ self.assert_(u3 is not u3*1)
+
+ def test_iadd(self):
+ u = self.type2test([0, 1])
+ u += self.type2test()
+ self.assertEqual(u, self.type2test([0, 1]))
+ u += self.type2test([2, 3])
+ self.assertEqual(u, self.type2test([0, 1, 2, 3]))
+ u += self.type2test([4, 5])
+ self.assertEqual(u, self.type2test([0, 1, 2, 3, 4, 5]))
+
+ u = self.type2test("spam")
+ u += self.type2test("eggs")
+ self.assertEqual(u, self.type2test("spameggs"))
+
+ def test_imul(self):
+ u = self.type2test([0, 1])
+ u *= 3
+ self.assertEqual(u, self.type2test([0, 1, 0, 1, 0, 1]))
+
+ def test_getitemoverwriteiter(self):
+ # Verify that __getitem__ overrides are not recognized by __iter__
+ class T(self.type2test):
+ def __getitem__(self, key): # pragma: no cover
+ return str(key) + '!!!'
+ self.assertEqual(next(iter(T((1,2)))), 1)
+
+ def test_repeat(self):
+ for m in range(4):
+ s = tuple(range(m))
+ for n in range(-3, 5):
+ self.assertEqual(self.type2test(s*n), self.type2test(s)*n)
+ self.assertEqual(self.type2test(s)*(-4), self.type2test([]))
+ self.assertEqual(id(s), id(s*1))
+
+ def test_subscript(self):
+ a = self.type2test([10, 11])
+ self.assertEqual(a.__getitem__(0), 10)
+ self.assertEqual(a.__getitem__(1), 11)
+ self.assertEqual(a.__getitem__(-2), 10)
+ self.assertEqual(a.__getitem__(-1), 11)
+ self.assertRaises(IndexError, a.__getitem__, -3)
+ self.assertRaises(IndexError, a.__getitem__, 3)
+ self.assertEqual(a.__getitem__(slice(0,1)), self.type2test([10]))
+ self.assertEqual(a.__getitem__(slice(1,2)), self.type2test([11]))
+ self.assertEqual(a.__getitem__(slice(0,2)), self.type2test([10, 11]))
+ self.assertEqual(a.__getitem__(slice(0,3)), self.type2test([10, 11]))
+ self.assertEqual(a.__getitem__(slice(3,5)), self.type2test([]))
+ self.assertRaises(ValueError, a.__getitem__, slice(0, 10, 0))
+ self.assertRaises(TypeError, a.__getitem__, 'x')
diff --git a/test/sorteddict_tests.py b/test/sorteddict_tests.py
new file mode 100644
index 0000000..c81436b
--- /dev/null
+++ b/test/sorteddict_tests.py
@@ -0,0 +1,81 @@
+from . import mapping_tests
+import blist
+
+def CmpToKey(mycmp):
+ 'Convert a cmp= function into a key= function'
+ class K(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __lt__(self, other):
+ return mycmp(self.obj, other.obj) == -1
+ return K
+
+class sorteddict_test(mapping_tests.TestHashMappingProtocol):
+ type2test = blist.sorteddict
+
+ def _reference(self):
+ """Return a dictionary of values which are invariant by storage
+ in the object under test."""
+ return {1:2, 3:4, 5:6}
+
+ def test_repr(self):
+ d = self._empty_mapping()
+ self.assertEqual(repr(d), 'sorteddict({})')
+ d[1] = 2
+ self.assertEqual(repr(d), 'sorteddict({1: 2})')
+ d = self._empty_mapping()
+ d[1] = d
+ self.assertEqual(repr(d), 'sorteddict({1: sorteddict({...})})')
+
+ class Exc(Exception): pass
+
+ class BadRepr(object):
+ def __repr__(self):
+ raise Exc()
+
+ d = self._full_mapping({1: BadRepr()})
+ self.assertRaises(Exc, repr, d)
+
+ def test_mutatingiteration(self):
+ pass
+
+ def test_sort(self):
+ u = self.type2test.fromkeys([1, 0])
+ self.assertEqual(list(u.keys()), [0, 1])
+
+ u = self.type2test.fromkeys([2,1,0,-1,-2])
+ self.assertEqual(u, self.type2test.fromkeys([-2,-1,0,1,2]))
+ self.assertEqual(list(u.keys()), [-2,-1,0,1,2])
+
+ a = self.type2test.fromkeys(reversed(list(range(512))))
+ self.assertEqual(list(a.keys()), list(range(512)))
+
+ def revcmp(a, b): # pragma: no cover
+ if a == b:
+ return 0
+ elif a < b:
+ return 1
+ else: # a > b
+ return -1
+ u = self.type2test.fromkeys([2,1,0,-1,-2], key=CmpToKey(revcmp))
+ self.assertEqual(list(u.keys()), [2,1,0,-1,-2])
+
+ # The following dumps core in unpatched Python 1.5:
+ def myComparison(x,y):
+ xmod, ymod = x%3, y%7
+ if xmod == ymod:
+ return 0
+ elif xmod < ymod:
+ return -1
+ else: # xmod > ymod
+ return 1
+
+ self.type2test.fromkeys(list(range(12)), key=CmpToKey(myComparison))
+
+ #def selfmodifyingComparison(x,y):
+ # z[x+y] = None
+ # return cmp(x, y)
+ #z = self.type2test(CmpToKey(selfmodifyingComparison))
+ #self.assertRaises(ValueError, z.update, [(i,i) for i in range(12)])
+
+ self.assertRaises(TypeError, self.type2test.fromkeys, 42, 42, 42, 42)
diff --git a/test/sortedlist_tests.py b/test/sortedlist_tests.py
new file mode 100644
index 0000000..7618b4d
--- /dev/null
+++ b/test/sortedlist_tests.py
@@ -0,0 +1,614 @@
+# This file based loosely on Python's list_tests.py.
+
+import unittest, collections, operator
+import sys
+from . import list_tests, seq_tests
+import blist
+import random
+import gc
+
+def CmpToKey(mycmp):
+ 'Convert a cmp= function into a key= function'
+ class K(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __lt__(self, other):
+ return mycmp(self.obj, other.obj) == -1
+ return K
+
+class SortedBase(object):
+ def build_items(self, n):
+ return list(range(n))
+
+ def build_item(self, x):
+ return x
+
+ def test_empty_repr(self):
+ self.assertEqual('%s()' % self.type2test.__name__,
+ repr(self.type2test()))
+
+ def validate_comparison(self, instance):
+ if sys.version_info[0] < 3 and isinstance(instance, collections.Set):
+ ops = ['ne', 'or', 'and', 'xor', 'sub']
+ else:
+ ops = ['lt', 'gt', 'le', 'ge', 'ne', 'or', 'and', 'xor', 'sub']
+ operators = {}
+ for op in ops:
+ name = '__'+op+'__'
+ operators['__'+op+'__'] = getattr(operator, name)
+
+ class Other(object):
+ def __init__(self):
+ self.right_side = False
+ def __eq__(self, other):
+ self.right_side = True
+ return True
+ __lt__ = __eq__
+ __gt__ = __eq__
+ __le__ = __eq__
+ __ge__ = __eq__
+ __ne__ = __eq__
+ __ror__ = __eq__
+ __rand__ = __eq__
+ __rxor__ = __eq__
+ __rsub__ = __eq__
+
+ for name, op in operators.items():
+ if not hasattr(instance, name): continue
+ other = Other()
+ op(instance, other)
+ self.assertTrue(other.right_side,'Right side not called for %s.%s'
+ % (type(instance), name))
+
+ def test_right_side(self):
+ self.validate_comparison(self.type2test())
+
+ def test_delitem(self):
+ items = self.build_items(2)
+ a = self.type2test(items)
+ del a[1]
+ self.assertEqual(a, self.type2test(items[:1]))
+ del a[0]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ del a[-2]
+ self.assertEqual(a, self.type2test(items[1:]))
+ del a[-1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ self.assertRaises(IndexError, a.__delitem__, -3)
+ self.assertRaises(IndexError, a.__delitem__, 2)
+
+ a = self.type2test([])
+ self.assertRaises(IndexError, a.__delitem__, 0)
+
+ self.assertRaises(TypeError, a.__delitem__)
+
+ def test_delslice(self):
+ items = self.build_items(2)
+ a = self.type2test(items)
+ del a[1:2]
+ del a[0:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ del a[1:2]
+ del a[0:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ del a[-2:-1]
+ self.assertEqual(a, self.type2test(items[1:]))
+
+ a = self.type2test(items)
+ del a[-2:-1]
+ self.assertEqual(a, self.type2test(items[1:]))
+
+ a = self.type2test(items)
+ del a[1:]
+ del a[:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ del a[1:]
+ del a[:1]
+ self.assertEqual(a, self.type2test([]))
+
+ a = self.type2test(items)
+ del a[-1:]
+ self.assertEqual(a, self.type2test(items[:1]))
+
+ a = self.type2test(items)
+ del a[-1:]
+ self.assertEqual(a, self.type2test(items[:1]))
+
+ a = self.type2test(items)
+ del a[:]
+ self.assertEqual(a, self.type2test([]))
+
+ def test_out_of_range(self):
+ u = self.type2test()
+ def del_test():
+ del u[0]
+ self.assertRaises(IndexError, lambda: u[0])
+ self.assertRaises(IndexError, del_test)
+
+ def test_bad_mul(self):
+ u = self.type2test()
+ self.assertRaises(TypeError, lambda: u * 'q')
+ def imul_test():
+ u = self.type2test()
+ u *= 'q'
+ self.assertRaises(TypeError, imul_test)
+
+ def test_pop(self):
+ lst = self.build_items(20)
+ random.shuffle(lst)
+ u = self.type2test(lst)
+ for i in range(20-1,-1,-1):
+ x = u.pop(i)
+ self.assertEqual(x, i)
+ self.assertEqual(0, len(u))
+
+ def test_reversed(self):
+ lst = list(range(20))
+ a = self.type2test(lst)
+ r = reversed(a)
+ self.assertEqual(list(r), list(range(19, -1, -1)))
+ if hasattr(r, '__next__'): # pragma: no cover
+ self.assertRaises(StopIteration, r.__next__)
+ else: # pragma: no cover
+ self.assertRaises(StopIteration, r.next)
+ self.assertEqual(self.type2test(reversed(self.type2test())),
+ self.type2test())
+
+ def test_mismatched_types(self):
+ class NotComparable:
+ def __lt__(self, other): # pragma: no cover
+ raise TypeError
+ def __cmp__(self, other): # pragma: no cover
+ raise TypeError
+ NotComparable = NotComparable()
+
+ item = self.build_item(5)
+ sl = self.type2test()
+ sl.add(item)
+ self.assertRaises(TypeError, sl.add, NotComparable)
+ self.assertFalse(NotComparable in sl)
+ self.assertEqual(sl.count(NotComparable), 0)
+ sl.discard(NotComparable)
+ self.assertRaises(ValueError, sl.index, NotComparable)
+
+ def test_order(self):
+ stuff = [self.build_item(random.randrange(1000000))
+ for i in range(1000)]
+ if issubclass(self.type2test, collections.Set):
+ stuff = set(stuff)
+ sorted_stuff = list(sorted(stuff))
+ u = self.type2test
+
+ self.assertEqual(sorted_stuff, list(u(stuff)))
+ sl = u()
+ for x in stuff:
+ sl.add(x)
+ self.assertEqual(sorted_stuff, list(sl))
+ x = sorted_stuff.pop(len(stuff)//2)
+ sl.discard(x)
+ self.assertEqual(sorted_stuff, list(sl))
+
+ def test_constructors(self):
+ # Based on the seq_test, but without adding incomparable types
+ # to the list.
+
+ l0 = self.build_items(0)
+ l1 = self.build_items(1)
+ l2 = self.build_items(2)
+
+ u = self.type2test()
+ u0 = self.type2test(l0)
+ u1 = self.type2test(l1)
+ u2 = self.type2test(l2)
+
+ uu = self.type2test(u)
+ uu0 = self.type2test(u0)
+ uu1 = self.type2test(u1)
+ uu2 = self.type2test(u2)
+
+ v = self.type2test(tuple(u))
+ class OtherSeq:
+ def __init__(self, initseq):
+ self.__data = initseq
+ def __len__(self):
+ return len(self.__data)
+ def __getitem__(self, i):
+ return self.__data[i]
+ s = OtherSeq(u0)
+ v0 = self.type2test(s)
+ self.assertEqual(len(v0), len(s))
+
+ def test_sort(self):
+ # based on list_tests.py
+ lst = [1, 0]
+ lst = [self.build_item(x) for x in lst]
+ u = self.type2test(lst)
+ self.assertEqual(list(u), [0, 1])
+
+ lst = [2,1,0,-1,-2]
+ lst = [self.build_item(x) for x in lst]
+ u = self.type2test(lst)
+ self.assertEqual(list(u), [-2,-1,0,1,2])
+
+ lst = list(range(512))
+ lst = [self.build_item(x) for x in lst]
+ a = self.type2test(reversed(lst))
+ self.assertEqual(list(a), lst)
+
+ def revcmp(a, b): # pragma: no cover
+ if a == b:
+ return 0
+ elif a < b:
+ return 1
+ else: # a > b
+ return -1
+ u = self.type2test(u, key=CmpToKey(revcmp))
+ self.assertEqual(list(u), [2,1,0,-1,-2])
+
+ # The following dumps core in unpatched Python 1.5:
+ def myComparison(x,y):
+ xmod, ymod = x%3, y%7
+ if xmod == ymod:
+ return 0
+ elif xmod < ymod:
+ return -1
+ else: # xmod > ymod
+ return 1
+ z = self.type2test(list(range(12)), key=CmpToKey(myComparison))
+
+ self.assertRaises(TypeError, self.type2test, 42, 42, 42, 42)
+
+class StrongSortedBase(SortedBase, seq_tests.CommonTest):
+ def not_applicable(self):
+ pass
+ test_repeat = not_applicable
+ test_imul = not_applicable
+ test_addmul = not_applicable
+ test_iadd = not_applicable
+ test_getslice = not_applicable
+ test_contains_order = not_applicable
+ test_contains_fake = not_applicable
+
+ def test_constructors2(self):
+ s = "a seq"
+ vv = self.type2test(s)
+ self.assertEqual(len(vv), len(s))
+
+ # Create from various iteratables
+ for s in ("123", "", list(range(1000)), (1.5, 1.2), range(2000,2200,5)):
+ for g in (seq_tests.Sequence, seq_tests.IterFunc,
+ seq_tests.IterGen, seq_tests.itermulti,
+ seq_tests.iterfunc):
+ self.assertEqual(self.type2test(g(s)), self.type2test(s))
+ self.assertEqual(self.type2test(seq_tests.IterFuncStop(s)),
+ self.type2test())
+ self.assertEqual(self.type2test(c for c in "123"),
+ self.type2test("123"))
+ self.assertRaises(TypeError, self.type2test,
+ seq_tests.IterNextOnly(s))
+ self.assertRaises(TypeError, self.type2test,
+ seq_tests.IterNoNext(s))
+ self.assertRaises(ZeroDivisionError, self.type2test,
+ seq_tests.IterGenExc(s))
+
+class weak_int:
+ def __init__(self, v):
+ self.value = v
+ def unwrap(self, other):
+ if isinstance(other, weak_int):
+ return other.value
+ return other
+ def __hash__(self):
+ return hash(self.value)
+ def __repr__(self): # pragma: no cover
+ return repr(self.value)
+ def __lt__(self, other):
+ return self.value < self.unwrap(other)
+ def __le__(self, other):
+ return self.value <= self.unwrap(other)
+ def __gt__(self, other):
+ return self.value > self.unwrap(other)
+ def __ge__(self, other):
+ return self.value >= self.unwrap(other)
+ def __eq__(self, other):
+ return self.value == self.unwrap(other)
+ def __ne__(self, other):
+ return self.value != self.unwrap(other)
+ def __mod__(self, other):
+ return self.value % self.unwrap(other)
+ def __neg__(self):
+ return weak_int(-self.value)
+
+class weak_manager():
+ def __init__(self):
+ self.all = [weak_int(i) for i in range(10)]
+ self.live = [v for v in self.all if random.randrange(2)]
+ random.shuffle(self.all)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ del self.all
+ gc.collect()
+
+class WeakSortedBase(SortedBase, unittest.TestCase):
+ def build_items(self, n):
+ return [weak_int(i) for i in range(n)]
+
+ def build_item(self, x):
+ return weak_int(x)
+
+ def test_collapse(self):
+ items = self.build_items(10)
+ u = self.type2test(items)
+ del items
+ gc.collect()
+ self.assertEqual(list(u), [])
+
+ def test_sort(self):
+ # based on list_tests.py
+ x = [weak_int(i) for i in [1, 0]]
+ u = self.type2test(x)
+ self.assertEqual(list(u), list(reversed(x)))
+
+ x = [weak_int(i) for i in [2,1,0,-1,-2]]
+ u = self.type2test(x)
+ self.assertEqual(list(u), list(reversed(x)))
+
+ #y = [weak_int(i) for i in reversed(list(range(512)))]
+ #a = self.type2test(y)
+ #self.assertEqual(list(a), list(reversed(y)))
+
+ def revcmp(a, b): # pragma: no cover
+ if a == b:
+ return 0
+ elif a < b:
+ return 1
+ else: # a > b
+ return -1
+ u = self.type2test(u, key=CmpToKey(revcmp))
+ self.assertEqual(list(u), x)
+
+ # The following dumps core in unpatched Python 1.5:
+ def myComparison(x,y):
+ xmod, ymod = x%3, y%7
+ if xmod == ymod:
+ return 0
+ elif xmod < ymod:
+ return -1
+ else: # xmod > ymod
+ return 1
+ x = [weak_int(i) for i in range(12)]
+ z = self.type2test(x, key=CmpToKey(myComparison))
+
+ self.assertRaises(TypeError, self.type2test, 42, 42, 42, 42)
+
+ def test_constructor(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ self.assertEqual(list(wsl), m.live)
+
+ def test_add(self):
+ with weak_manager() as m:
+ wsl = self.type2test()
+ for x in m.all:
+ wsl.add(x)
+ del x
+ self.assertEqual(list(wsl), m.live)
+
+ def test_discard(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ x = m.live.pop(len(m.live)//2)
+ wsl.discard(x)
+ self.assertEqual(list(wsl), m.live)
+
+ def test_contains(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ for x in m.live:
+ self.assertTrue(x in wsl)
+ self.assertFalse(weak_int(-1) in wsl)
+
+ def test_iter(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ for i, x in enumerate(wsl):
+ self.assertEqual(x, m.live[i])
+
+ def test_getitem(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ for i in range(len(m.live)):
+ self.assertEqual(wsl[i], m.live[i])
+
+ def test_reversed(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ r1 = list(reversed(wsl))
+ r2 = list(reversed(m.live))
+ self.assertEqual(r1, r2)
+
+ all = [weak_int(i) for i in range(6)]
+ wsl = self.type2test(all)
+ del all[-1]
+ self.assertEqual(list(reversed(wsl)), list(reversed(all)))
+
+ def test_index(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ for x in m.live:
+ self.assertEqual(wsl[wsl.index(x)], x)
+ self.assertRaises(ValueError, wsl.index, weak_int(-1))
+
+ def test_count(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ for x in m.live:
+ self.assertEqual(wsl.count(x), 1)
+ self.assertEqual(wsl.count(weak_int(-1)), 0)
+
+ def test_getslice(self):
+ with weak_manager() as m:
+ wsl = self.type2test(m.all)
+ self.assertEqual(m.live, list(wsl[:]))
+
+class SortedListMixin:
+ def test_eq(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ v = self.type2test(items, key=lambda x: -x)
+ self.assertNotEqual(u, v)
+
+ def test_cmp(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ low = u[:10]
+ high = u[10:]
+ self.assert_(low != high)
+ self.assert_(low == u[:10])
+ self.assert_(low < high)
+ self.assert_(low <= high, str((low, high)))
+ self.assert_(high > low)
+ self.assert_(high >= low)
+ self.assertFalse(low == high)
+ self.assertFalse(high < low)
+ self.assertFalse(high <= low)
+ self.assertFalse(low > high)
+ self.assertFalse(low >= high)
+
+ low = u[:5]
+ self.assert_(low != high)
+ self.assertFalse(low == high)
+
+ def test_update(self):
+ items = self.build_items(20)
+ u = self.type2test()
+ u.update(items)
+ self.assertEqual(u, self.type2test(items))
+
+ def test_remove(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ u.remove(items[-1])
+ self.assertEqual(u, self.type2test(items[:19]))
+ self.assertRaises(ValueError, u.remove, items[-1])
+
+ def test_mul(self):
+ items = self.build_items(2)
+ u1 = self.type2test(items[:1])
+ u2 = self.type2test(items)
+ self.assertEqual(self.type2test(), u2*0)
+ self.assertEqual(self.type2test(), 0*u2)
+ self.assertEqual(self.type2test(), u2*0)
+ self.assertEqual(self.type2test(), 0*u2)
+ self.assertEqual(u2, u2*1)
+ self.assertEqual(u2, 1*u2)
+ self.assertEqual(u2, u2*1)
+ self.assertEqual(u2, 1*u2)
+ self.assertEqual(self.type2test(items + items), u2*2)
+ self.assertEqual(self.type2test(items + items), 2*u2)
+ self.assertEqual(self.type2test(items + items + items), 3*u2)
+ self.assertEqual(self.type2test(items + items + items), u2*3)
+
+ class subclass(self.type2test):
+ pass
+ u3 = subclass(items)
+ self.assertEqual(u3, u3*1)
+ self.assert_(u3 is not u3*1)
+
+ def test_imul(self):
+ items = self.build_items(2)
+ items6 = items[:1]*3 + items[1:]*3
+ u = self.type2test(items)
+ u *= 3
+ self.assertEqual(u, self.type2test(items6))
+ u *= 0
+ self.assertEqual(u, self.type2test([]))
+ s = self.type2test([])
+ oldid = id(s)
+ s *= 10
+ self.assertEqual(id(s), oldid)
+
+ def test_repr(self):
+ name = self.type2test.__name__
+ u = self.type2test()
+ self.assertEqual(repr(u), '%s()' % name)
+ items = self.build_items(3)
+ u.update(items)
+ self.assertEqual(repr(u), '%s([0, 1, 2])' % name)
+ u = self.type2test()
+ u.update([u])
+ self.assertEqual(repr(u), '%s([%s(...)])' % (name, name))
+
+ def test_bisect(self):
+ items = self.build_items(5)
+ del items[0]
+ del items[2] # We end up with [1, 2, 4]
+ u = self.type2test(items, key=lambda x: -x) # We end up with [4, 2, 1]
+ self.assertEqual(u.bisect_left(3), 1)
+ self.assertEqual(u.bisect(2), 2) # bisect == bisect_right
+ self.assertEqual(u.bisect_right(2), 2)
+
+class SortedSetMixin:
+ def test_duplicates(self):
+ u = self.type2test
+ ss = u()
+ stuff = [weak_int(random.randrange(100000)) for i in range(10)]
+ sorted_stuff = list(sorted(stuff))
+ for x in stuff:
+ ss.add(x)
+ for x in stuff:
+ ss.add(x)
+ self.assertEqual(sorted_stuff, list(ss))
+ x = sorted_stuff.pop(len(stuff)//2)
+ ss.discard(x)
+ self.assertEqual(sorted_stuff, list(ss))
+
+ def test_eq(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ v = self.type2test(items, key=lambda x: -x)
+ self.assertEqual(u, v)
+
+ def test_remove(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ u.remove(items[-1])
+ self.assertEqual(u, self.type2test(items[:19]))
+ self.assertRaises(KeyError, u.remove, items[-1])
+
+class SortedListTest(StrongSortedBase, SortedListMixin):
+ type2test = blist.sortedlist
+
+class WeakSortedListTest(WeakSortedBase, SortedListMixin):
+ type2test = blist.weaksortedlist
+
+ def test_advance(self):
+ items = [weak_int(0), weak_int(0)]
+ u = self.type2test(items)
+ del items[0]
+ gc.collect()
+ self.assertEqual(u.count(items[0]), 1)
+
+class SortedSetTest(StrongSortedBase, SortedSetMixin):
+ type2test = blist.sortedset
+
+class WeakSortedSetTest(WeakSortedBase, SortedSetMixin):
+ type2test = blist.weaksortedset
+
+ def test_repr(self):
+ items = self.build_items(20)
+ u = self.type2test(items)
+ self.assertEqual(repr(u), 'weaksortedset(%s)' % repr(items))
diff --git a/test/test_list.py b/test/test_list.py
new file mode 100644
index 0000000..51c22e4
--- /dev/null
+++ b/test/test_list.py
@@ -0,0 +1,39 @@
+from __future__ import print_function
+from . import unittest
+from test import test_support, list_tests
+
+class ListTest(list_tests.CommonTest):
+ type2test = list
+
+ def test_truth(self):
+ super(ListTest, self).test_truth()
+ self.assert_(not [])
+ self.assert_([42])
+
+ def test_identity(self):
+ self.assert_([] is not [])
+
+ def test_len(self):
+ super(ListTest, self).test_len()
+ self.assertEqual(len([]), 0)
+ self.assertEqual(len([0]), 1)
+ self.assertEqual(len([0, 1, 2]), 3)
+
+def test_main(verbose=None):
+ test_support.run_unittest(ListTest)
+
+ # verify reference counting
+ import sys
+ if verbose:
+ import gc
+ counts = [None] * 5
+ for i in range(len(counts)):
+ test_support.run_unittest(ListTest)
+ gc.set_debug(gc.DEBUG_STATS)
+ gc.collect()
+ #counts[i] = sys.gettotalrefcount()
+ print(counts)
+
+
+if __name__ == "__main__":
+ test_main(verbose=False)
diff --git a/test/test_set.py b/test/test_set.py
new file mode 100644
index 0000000..a27d01e
--- /dev/null
+++ b/test/test_set.py
@@ -0,0 +1,1536 @@
+# This file taken from Python, licensed under the Python License Agreement
+
+from __future__ import print_function
+
+import unittest
+from . import test_support as support
+import gc
+import weakref
+import operator
+import copy
+import pickle
+from random import randrange, shuffle
+import sys
+import warnings
+import collections
+
+from blist import sortedset as set
+
+class PassThru(Exception):
+ pass
+
+def check_pass_thru():
+ raise PassThru
+ yield 1 # pragma: no cover
+
+class BadCmp: # pragma: no cover
+ def __hash__(self):
+ return 1
+ def __lt__(self, other):
+ raise RuntimeError
+ def __eq__(self, other):
+ raise RuntimeError
+
+class ReprWrapper:
+ 'Used to test self-referential repr() calls'
+ def __repr__(self):
+ return repr(self.value)
+
+class TestJointOps(unittest.TestCase):
+ # Tests common to both set and frozenset
+
+ def setUp(self):
+ self.word = word = 'simsalabim'
+ self.otherword = 'madagascar'
+ self.letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
+ self.s = self.thetype(word)
+ self.d = dict.fromkeys(word)
+
+ def test_new_or_init(self):
+ self.assertRaises(TypeError, self.thetype, [], 2)
+ self.assertRaises(TypeError, set().__init__, a=1)
+
+ def test_uniquification(self):
+ actual = sorted(self.s)
+ expected = sorted(self.d)
+ self.assertEqual(actual, expected)
+ self.assertRaises(PassThru, self.thetype, check_pass_thru())
+
+ def test_len(self):
+ self.assertEqual(len(self.s), len(self.d))
+
+ def test_contains(self):
+ for c in self.letters:
+ self.assertEqual(c in self.s, c in self.d)
+ s = self.thetype([frozenset(self.letters)])
+ # Issue 8752
+ #self.assertIn(self.thetype(self.letters), s)
+
+ def test_union(self):
+ u = self.s.union(self.otherword)
+ for c in self.letters:
+ self.assertEqual(c in u, c in self.d or c in self.otherword)
+ self.assertEqual(self.s, self.thetype(self.word))
+ self.assertRaises(PassThru, self.s.union, check_pass_thru())
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ self.assertEqual(self.thetype('abcba').union(C('cdc')), set('abcd'))
+ self.assertEqual(self.thetype('abcba').union(C('efgfe')), set('abcefg'))
+ self.assertEqual(self.thetype('abcba').union(C('ccb')), set('abc'))
+ self.assertEqual(self.thetype('abcba').union(C('ef')), set('abcef'))
+ self.assertEqual(self.thetype('abcba').union(C('ef'), C('fg')), set('abcefg'))
+
+ # Issue #6573
+ x = self.thetype()
+ self.assertEqual(x.union(set([1]), x, set([2])), self.thetype([1, 2]))
+
+ def test_or(self):
+ i = self.s.union(self.otherword)
+ self.assertEqual(self.s | set(self.otherword), i)
+ self.assertEqual(self.s | frozenset(self.otherword), i)
+ self.assertEqual(self.s | self.otherword, i)
+
+ def test_intersection(self):
+ i = self.s.intersection(self.otherword)
+ for c in self.letters:
+ self.assertEqual(c in i, c in self.d and c in self.otherword)
+ self.assertEqual(self.s, self.thetype(self.word))
+ self.assertRaises(PassThru, self.s.intersection, check_pass_thru())
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ self.assertEqual(self.thetype('abcba').intersection(C('cdc')), set('cc'))
+ self.assertEqual(self.thetype('abcba').intersection(C('efgfe')), set(''))
+ self.assertEqual(self.thetype('abcba').intersection(C('ccb')), set('bc'))
+ self.assertEqual(self.thetype('abcba').intersection(C('ef')), set(''))
+ self.assertEqual(self.thetype('abcba').intersection(C('cbcf'), C('bag')), set('b'))
+ s = self.thetype('abcba')
+ z = s.intersection()
+ if self.thetype == frozenset(): # pragma: no cover
+ self.assertEqual(id(s), id(z))
+ else:
+ self.assertNotEqual(id(s), id(z))
+
+ def test_isdisjoint(self):
+ def f(s1, s2):
+ 'Pure python equivalent of isdisjoint()'
+ return not set(s1).intersection(s2)
+ for larg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+ s1 = self.thetype(larg)
+ for rarg in '', 'a', 'ab', 'abc', 'ababac', 'cdc', 'cc', 'efgfe', 'ccb', 'ef':
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s2 = C(rarg)
+ actual = s1.isdisjoint(s2)
+ expected = f(s1, s2)
+ self.assertEqual(actual, expected)
+ self.assertTrue(actual is True or actual is False)
+
+ def test_and(self):
+ i = self.s.intersection(self.otherword)
+ self.assertEqual(self.s & set(self.otherword), i)
+ self.assertEqual(self.s & frozenset(self.otherword), i)
+ self.assertEqual(self.s & self.otherword, i)
+
+ def test_difference(self):
+ i = self.s.difference(self.otherword)
+ for c in self.letters:
+ self.assertEqual(c in i, c in self.d and c not in self.otherword)
+ self.assertEqual(self.s, self.thetype(self.word))
+ self.assertRaises(PassThru, self.s.difference, check_pass_thru())
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ self.assertEqual(self.thetype('abcba').difference(C('cdc')), set('ab'))
+ self.assertEqual(self.thetype('abcba').difference(C('efgfe')), set('abc'))
+ self.assertEqual(self.thetype('abcba').difference(C('ccb')), set('a'))
+ self.assertEqual(self.thetype('abcba').difference(C('ef')), set('abc'))
+ self.assertEqual(self.thetype('abcba').difference(), set('abc'))
+ self.assertEqual(self.thetype('abcba').difference(C('a'), C('b')), set('c'))
+
+ def test_sub(self):
+ i = self.s.difference(self.otherword)
+ self.assertEqual(self.s - set(self.otherword), i)
+ self.assertEqual(self.s - frozenset(self.otherword), i)
+ self.assertEqual(self.s - self.otherword, i)
+
+ def test_symmetric_difference(self):
+ i = self.s.symmetric_difference(self.otherword)
+ for c in self.letters:
+ self.assertEqual(c in i, (c in self.d) ^ (c in self.otherword))
+ self.assertEqual(self.s, self.thetype(self.word))
+ self.assertRaises(PassThru, self.s.symmetric_difference, check_pass_thru())
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ self.assertEqual(self.thetype('abcba').symmetric_difference(C('cdc')), set('abd'))
+ self.assertEqual(self.thetype('abcba').symmetric_difference(C('efgfe')), set('abcefg'))
+ self.assertEqual(self.thetype('abcba').symmetric_difference(C('ccb')), set('a'))
+ self.assertEqual(self.thetype('abcba').symmetric_difference(C('ef')), set('abcef'))
+
+ def test_xor(self):
+ i = self.s.symmetric_difference(self.otherword)
+ self.assertEqual(self.s ^ set(self.otherword), i)
+ self.assertEqual(self.s ^ frozenset(self.otherword), i)
+ self.assertEqual(self.s ^ self.otherword, i)
+
+ def test_equality(self):
+ self.assertEqual(self.s, set(self.word))
+ self.assertEqual(self.s, frozenset(self.word))
+ self.assertEqual(self.s == self.word, False)
+ self.assertNotEqual(self.s, set(self.otherword))
+ self.assertNotEqual(self.s, frozenset(self.otherword))
+ self.assertEqual(self.s != self.word, True)
+
+ def test_setOfFrozensets(self):
+ t = map(frozenset, ['abcdef', 'bcd', 'bdcb', 'fed', 'fedccba'])
+ s = self.thetype(t)
+ self.assertEqual(len(s), 3)
+
+ def test_sub_and_super(self):
+ p, q, r = map(self.thetype, ['ab', 'abcde', 'def'])
+ self.assertTrue(p < q)
+ self.assertTrue(p <= q)
+ self.assertTrue(q <= q)
+ self.assertTrue(q > p)
+ self.assertTrue(q >= p)
+ self.assertFalse(q < r)
+ self.assertFalse(q <= r)
+ self.assertFalse(q > r)
+ self.assertFalse(q >= r)
+ self.assertTrue(set('a').issubset('abc'))
+ self.assertTrue(set('abc').issuperset('a'))
+ self.assertFalse(set('a').issubset('cbs'))
+ self.assertFalse(set('cbs').issuperset('a'))
+
+ def test_pickling(self):
+ for i in range(pickle.HIGHEST_PROTOCOL + 1):
+ p = pickle.dumps(self.s, i)
+ dup = pickle.loads(p)
+ self.assertEqual(self.s, dup, "%s != %s" % (self.s, dup))
+ if type(self.s) not in (set, frozenset):
+ self.s.x = 10
+ p = pickle.dumps(self.s)
+ dup = pickle.loads(p)
+ self.assertEqual(self.s.x, dup.x)
+
+ def test_deepcopy(self):
+ class Tracer:
+ def __init__(self, value):
+ self.value = value
+ def __hash__(self): # pragma: no cover
+ return self.value
+ def __deepcopy__(self, memo=None):
+ return Tracer(self.value + 1)
+ t = Tracer(10)
+ s = self.thetype([t])
+ dup = copy.deepcopy(s)
+ self.assertNotEqual(id(s), id(dup))
+ for elem in dup:
+ newt = elem
+ self.assertNotEqual(id(t), id(newt))
+ self.assertEqual(t.value + 1, newt.value)
+
+ def test_gc(self):
+ # Create a nest of cycles to exercise overall ref count check
+ class A:
+ def __lt__(self, other):
+ return id(self) < id(other)
+ def __gt__(self, other):
+ return id(self) > id(other)
+ s = set(A() for i in range(1000))
+ for elem in s:
+ elem.cycle = s
+ elem.sub = elem
+ elem.set = set([elem])
+
+ def test_badcmp(self):
+ s = self.thetype([BadCmp()])
+ # Detect comparison errors during insertion and lookup
+ self.assertRaises(RuntimeError, self.thetype, [BadCmp(), BadCmp()])
+ self.assertRaises(RuntimeError, s.__contains__, BadCmp())
+ # Detect errors during mutating operations
+ if hasattr(s, 'add'):
+ self.assertRaises(RuntimeError, s.add, BadCmp())
+ self.assertRaises(RuntimeError, s.discard, BadCmp())
+ self.assertRaises(RuntimeError, s.remove, BadCmp())
+
+ def test_cyclical_repr(self):
+ w = ReprWrapper()
+ s = self.thetype([w])
+ w.value = s
+ if self.thetype == set:
+ self.assertEqual(repr(s), 'sortedset([sortedset(...)])')
+ else:
+ name = repr(s).partition('(')[0] # strip class name
+ self.assertEqual(repr(s), '%s([%s(...)])' % (name, name))
+
+ def test_cyclical_print(self):
+ w = ReprWrapper()
+ s = self.thetype([w])
+ w.value = s
+ fo = open(support.TESTFN, "w")
+ try:
+ fo.write(str(s))
+ fo.close()
+ fo = open(support.TESTFN, "r")
+ self.assertEqual(fo.read(), repr(s))
+ finally:
+ fo.close()
+ support.unlink(support.TESTFN)
+
+ def test_container_iterator(self):
+ # Bug #3680: tp_traverse was not implemented for set iterator object
+ class C(object):
+ def __lt__(self, other):
+ return id(self) < id(other)
+ def __gt__(self, other):
+ return id(self) > id(other)
+ obj = C()
+ ref = weakref.ref(obj)
+ container = set([obj, 1])
+ obj.x = iter(container)
+ del obj, container
+ gc.collect()
+ self.assertTrue(ref() is None, "Cycle was not collected")
+
+class TestSet(TestJointOps):
+ thetype = set
+ basetype = set
+
+ def test_init(self):
+ s = self.thetype()
+ s.__init__(self.word)
+ self.assertEqual(s, set(self.word))
+ s.__init__(self.otherword)
+ self.assertEqual(s, set(self.otherword))
+ self.assertRaises(TypeError, s.__init__, s, 2);
+ self.assertRaises(TypeError, s.__init__, 1);
+
+ def test_constructor_identity(self):
+ s = self.thetype(range(3))
+ t = self.thetype(s)
+ self.assertNotEqual(id(s), id(t))
+
+ def test_hash(self):
+ self.assertRaises(TypeError, hash, self.s)
+
+ def test_clear(self):
+ self.s.clear()
+ self.assertEqual(self.s, set())
+ self.assertEqual(len(self.s), 0)
+
+ def test_copy(self):
+ dup = self.s.copy()
+ self.assertEqual(self.s, dup)
+ self.assertNotEqual(id(self.s), id(dup))
+ #self.assertEqual(type(dup), self.basetype)
+
+ def test_add(self):
+ self.s.add('Q')
+ self.assertIn('Q', self.s)
+ dup = self.s.copy()
+ self.s.add('Q')
+ self.assertEqual(self.s, dup)
+ #self.assertRaises(TypeError, self.s.add, [])
+
+ def test_remove(self):
+ self.s.remove('a')
+ self.assertNotIn('a', self.s)
+ self.assertRaises(KeyError, self.s.remove, 'Q')
+ s = self.thetype([frozenset(self.word)])
+ #self.assertIn(self.thetype(self.word), s)
+ #s.remove(self.thetype(self.word))
+ #self.assertNotIn(self.thetype(self.word), s)
+ #self.assertRaises(KeyError, self.s.remove, self.thetype(self.word))
+
+ def test_remove_keyerror_unpacking(self):
+ # bug: www.python.org/sf/1576657
+ for v1 in ['Q', (1,)]:
+ try:
+ self.s.remove(v1)
+ except KeyError as e:
+ v2 = e.args[0]
+ self.assertEqual(v1, v2)
+ else: # pragma: no cover
+ self.fail()
+
+ def test_remove_keyerror_set(self):
+ key = self.thetype([3, 4])
+ try:
+ self.s.remove(key)
+ except KeyError as e:
+ self.assertTrue(e.args[0] is key,
+ "KeyError should be {0}, not {1}".format(key,
+ e.args[0]))
+ else: # pragma: no cover
+ self.fail()
+
+ def test_discard(self):
+ self.s.discard('a')
+ self.assertNotIn('a', self.s)
+ self.s.discard('Q')
+ #self.assertRaises(TypeError, self.s.discard, [])
+ s = self.thetype([frozenset(self.word)])
+ #self.assertIn(self.thetype(self.word), s)
+ #s.discard(self.thetype(self.word))
+ #self.assertNotIn(self.thetype(self.word), s)
+ #s.discard(self.thetype(self.word))
+
+ def test_pop(self):
+ for i in range(len(self.s)):
+ elem = self.s.pop()
+ self.assertNotIn(elem, self.s)
+ self.assertRaises(IndexError, self.s.pop)
+
+ def test_update(self):
+ retval = self.s.update(self.otherword)
+ self.assertEqual(retval, None)
+ for c in (self.word + self.otherword):
+ self.assertIn(c, self.s)
+ self.assertRaises(PassThru, self.s.update, check_pass_thru())
+ self.assertRaises(TypeError, self.s.update, 6)
+ for p, q in (('cdc', 'abcd'), ('efgfe', 'abcefg'), ('ccb', 'abc'), ('ef', 'abcef')):
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s = self.thetype('abcba')
+ self.assertEqual(s.update(C(p)), None)
+ self.assertEqual(s, set(q))
+ for p in ('cdc', 'efgfe', 'ccb', 'ef', 'abcda'):
+ q = 'ahi'
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s = self.thetype('abcba')
+ self.assertEqual(s.update(C(p), C(q)), None)
+ self.assertEqual(s, set(s) | set(p) | set(q))
+
+ def test_ior(self):
+ self.s |= set(self.otherword)
+ for c in (self.word + self.otherword):
+ self.assertIn(c, self.s)
+
+ def test_intersection_update(self):
+ retval = self.s.intersection_update(self.otherword)
+ self.assertEqual(retval, None)
+ for c in (self.word + self.otherword):
+ if c in self.otherword and c in self.word:
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+ self.assertRaises(PassThru, self.s.intersection_update, check_pass_thru())
+ self.assertRaises(TypeError, self.s.intersection_update, 6)
+ for p, q in (('cdc', 'c'), ('efgfe', ''), ('ccb', 'bc'), ('ef', '')):
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s = self.thetype('abcba')
+ self.assertEqual(s.intersection_update(C(p)), None)
+ self.assertEqual(s, set(q))
+ ss = 'abcba'
+ s = self.thetype(ss)
+ t = 'cbc'
+ self.assertEqual(s.intersection_update(C(p), C(t)), None)
+ self.assertEqual(s, set('abcba')&set(p)&set(t))
+
+ def test_iand(self):
+ self.s &= set(self.otherword)
+ for c in (self.word + self.otherword):
+ if c in self.otherword and c in self.word:
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+
+ def test_difference_update(self):
+ retval = self.s.difference_update(self.otherword)
+ self.assertEqual(retval, None)
+ for c in (self.word + self.otherword):
+ if c in self.word and c not in self.otherword:
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+ self.assertRaises(PassThru, self.s.difference_update, check_pass_thru())
+ self.assertRaises(TypeError, self.s.difference_update, 6)
+ self.assertRaises(TypeError, self.s.symmetric_difference_update, 6)
+ for p, q in (('cdc', 'ab'), ('efgfe', 'abc'), ('ccb', 'a'), ('ef', 'abc')):
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s = self.thetype('abcba')
+ self.assertEqual(s.difference_update(C(p)), None)
+ self.assertEqual(s, set(q))
+
+ s = self.thetype('abcdefghih')
+ s.difference_update()
+ self.assertEqual(s, self.thetype('abcdefghih'))
+
+ s = self.thetype('abcdefghih')
+ s.difference_update(C('aba'))
+ self.assertEqual(s, self.thetype('cdefghih'))
+
+ s = self.thetype('abcdefghih')
+ s.difference_update(C('cdc'), C('aba'))
+ self.assertEqual(s, self.thetype('efghih'))
+
+ def test_isub(self):
+ self.s -= set(self.otherword)
+ for c in (self.word + self.otherword):
+ if c in self.word and c not in self.otherword:
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+
+ def test_symmetric_difference_update(self):
+ retval = self.s.symmetric_difference_update(self.otherword)
+ self.assertEqual(retval, None)
+ for c in (self.word + self.otherword):
+ if (c in self.word) ^ (c in self.otherword):
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+ self.assertRaises(PassThru, self.s.symmetric_difference_update, check_pass_thru())
+ self.assertRaises(TypeError, self.s.symmetric_difference_update, 6)
+ for p, q in (('cdc', 'abd'), ('efgfe', 'abcefg'), ('ccb', 'a'), ('ef', 'abcef')):
+ for C in set, frozenset, dict.fromkeys, str, list, tuple:
+ s = self.thetype('abcba')
+ self.assertEqual(s.symmetric_difference_update(C(p)), None)
+ self.assertEqual(s, set(q))
+
+ def test_ixor(self):
+ self.s ^= set(self.otherword)
+ for c in (self.word + self.otherword):
+ if (c in self.word) ^ (c in self.otherword):
+ self.assertIn(c, self.s)
+ else:
+ self.assertNotIn(c, self.s)
+
+ def test_inplace_on_self(self):
+ t = self.s.copy()
+ t |= t
+ self.assertEqual(t, self.s)
+ t &= t
+ self.assertEqual(t, self.s)
+ t -= t
+ self.assertEqual(t, self.thetype())
+ t = self.s.copy()
+ t ^= t
+ self.assertEqual(t, self.thetype())
+
+ def test_weakref(self):
+ s = self.thetype('gallahad')
+ p = weakref.proxy(s)
+ self.assertEqual(str(p), str(s))
+ s = None
+ self.assertRaises(ReferenceError, str, p)
+
+ def test_rich_compare(self): # pragma: no cover
+ if sys.version_info[0] < 3:
+ return
+
+ class TestRichSetCompare:
+ def __gt__(self, some_set):
+ self.gt_called = True
+ return False
+ def __lt__(self, some_set):
+ self.lt_called = True
+ return False
+ def __ge__(self, some_set):
+ self.ge_called = True
+ return False
+ def __le__(self, some_set):
+ self.le_called = True
+ return False
+
+ # This first tries the bulitin rich set comparison, which doesn't know
+ # how to handle the custom object. Upon returning NotImplemented, the
+ # corresponding comparison on the right object is invoked.
+ myset = set((1, 2, 3))
+
+ myobj = TestRichSetCompare()
+ myset < myobj
+ self.assertTrue(myobj.gt_called)
+
+ myobj = TestRichSetCompare()
+ myset > myobj
+ self.assertTrue(myobj.lt_called)
+
+ myobj = TestRichSetCompare()
+ myset <= myobj
+ self.assertTrue(myobj.ge_called)
+
+ myobj = TestRichSetCompare()
+ myset >= myobj
+ self.assertTrue(myobj.le_called)
+
+class SetSubclass(set):
+ pass
+
+class TestSetSubclass(TestSet):
+ thetype = SetSubclass
+ basetype = set
+
+class SetSubclassWithKeywordArgs(set):
+ def __init__(self, iterable=[], newarg=None):
+ set.__init__(self, iterable)
+
+class TestSetSubclassWithKeywordArgs(TestSet):
+
+ def test_keywords_in_subclass(self):
+ 'SF bug #1486663 -- this used to erroneously raise a TypeError'
+ SetSubclassWithKeywordArgs(newarg=1)
+
+# Tests taken from test_sets.py =============================================
+
+empty_set = set()
+
+#==============================================================================
+
+class TestBasicOps(unittest.TestCase):
+
+ def test_repr(self):
+ if self.repr is not None:
+ self.assertEqual(repr(self.set), self.repr)
+
+ def test_print(self):
+ try:
+ fo = open(support.TESTFN, "w")
+ fo.write(str(self.set))
+ fo.close()
+ fo = open(support.TESTFN, "r")
+ self.assertEqual(fo.read(), repr(self.set))
+ finally:
+ fo.close()
+ support.unlink(support.TESTFN)
+
+ def test_length(self):
+ self.assertEqual(len(self.set), self.length)
+
+ def test_self_equality(self):
+ self.assertEqual(self.set, self.set)
+
+ def test_equivalent_equality(self):
+ self.assertEqual(self.set, self.dup)
+
+ def test_copy(self):
+ self.assertEqual(self.set.copy(), self.dup)
+
+ def test_self_union(self):
+ result = self.set | self.set
+ self.assertEqual(result, self.dup)
+
+ def test_empty_union(self):
+ result = self.set | empty_set
+ self.assertEqual(result, self.dup)
+
+ def test_union_empty(self):
+ result = empty_set | self.set
+ self.assertEqual(result, self.dup)
+
+ def test_self_intersection(self):
+ result = self.set & self.set
+ self.assertEqual(result, self.dup)
+
+ def test_empty_intersection(self):
+ result = self.set & empty_set
+ self.assertEqual(result, empty_set)
+
+ def test_intersection_empty(self):
+ result = empty_set & self.set
+ self.assertEqual(result, empty_set)
+
+ def test_self_isdisjoint(self):
+ result = self.set.isdisjoint(self.set)
+ self.assertEqual(result, not self.set)
+
+ def test_empty_isdisjoint(self):
+ result = self.set.isdisjoint(empty_set)
+ self.assertEqual(result, True)
+
+ def test_isdisjoint_empty(self):
+ result = empty_set.isdisjoint(self.set)
+ self.assertEqual(result, True)
+
+ def test_self_symmetric_difference(self):
+ result = self.set ^ self.set
+ self.assertEqual(result, empty_set)
+
+ def test_checkempty_symmetric_difference(self):
+ result = self.set ^ empty_set
+ self.assertEqual(result, self.set)
+
+ def test_self_difference(self):
+ result = self.set - self.set
+ self.assertEqual(result, empty_set)
+
+ def test_empty_difference(self):
+ result = self.set - empty_set
+ self.assertEqual(result, self.dup)
+
+ def test_empty_difference_rev(self):
+ result = empty_set - self.set
+ self.assertEqual(result, empty_set)
+
+ def test_iteration(self):
+ for v in self.set:
+ self.assertIn(v, self.values)
+ setiter = iter(self.set)
+ # note: __length_hint__ is an internal undocumented API,
+ # don't rely on it in your own programs
+ #self.assertEqual(setiter.__length_hint__(), len(self.set))
+
+ def test_pickling(self):
+ p = pickle.dumps(self.set)
+ copy = pickle.loads(p)
+ self.assertEqual(self.set, copy,
+ "%s != %s" % (self.set, copy))
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsEmpty(TestBasicOps):
+ def setUp(self):
+ self.case = "empty set"
+ self.values = []
+ self.set = set(self.values)
+ self.dup = set(self.values)
+ self.length = 0
+ self.repr = "sortedset()"
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsSingleton(TestBasicOps):
+ def setUp(self):
+ self.case = "unit set (number)"
+ self.values = [3]
+ self.set = set(self.values)
+ self.dup = set(self.values)
+ self.length = 1
+ self.repr = "sortedset([3])"
+
+ def test_in(self):
+ self.assertIn(3, self.set)
+
+ def test_not_in(self):
+ self.assertNotIn(2, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTuple(TestBasicOps):
+ def setUp(self):
+ self.case = "unit set (tuple)"
+ self.values = [(0, 1)]
+ self.set = set(self.values)
+ self.dup = set(self.values)
+ self.length = 1
+ self.repr = "sortedset([(0, 1)])"
+
+ def test_in(self):
+ self.assertIn((0, 1), self.set)
+
+ def test_not_in(self):
+ self.assertNotIn(9, self.set)
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsTriple(TestBasicOps):
+ def setUp(self):
+ self.case = "triple set"
+ self.values = [0, 1, 2]
+ self.set = set(self.values)
+ self.dup = set(self.values)
+ self.length = 3
+ self.repr = None
+
+#------------------------------------------------------------------------------
+
+class TestBasicOpsString(TestBasicOps):
+ def setUp(self):
+ self.case = "string set"
+ self.values = ["a", "b", "c"]
+ self.set = set(self.values)
+ self.dup = set(self.values)
+ self.length = 3
+ self.repr = "sortedset(['a', 'b', 'c'])"
+
+#------------------------------------------------------------------------------
+
+def baditer():
+ raise TypeError
+ yield True # pragma: no cover
+
+def gooditer():
+ yield True
+
+class TestExceptionPropagation(unittest.TestCase):
+ """SF 628246: Set constructor should not trap iterator TypeErrors"""
+
+ def test_instanceWithException(self):
+ self.assertRaises(TypeError, set, baditer())
+
+ def test_instancesWithoutException(self):
+ # All of these iterables should load without exception.
+ set([1,2,3])
+ set((1,2,3))
+ set({'one':1, 'two':2, 'three':3})
+ set(range(3))
+ set('abc')
+ set(gooditer())
+
+ def test_changingSizeWhileIterating(self):
+ s = set([1,2,3])
+ try:
+ for i in s:
+ s.update([4])
+ except RuntimeError:
+ pass
+ else: # pragma: no cover
+ self.fail("no exception when changing size during iteration")
+
+#==============================================================================
+
+class TestSetOfSets(unittest.TestCase):
+ def test_constructor(self):
+ inner = frozenset([1])
+ outer = set([inner])
+ element = outer.pop()
+ self.assertEqual(type(element), frozenset)
+ outer.add(inner) # Rebuild set of sets with .add method
+ outer.remove(inner)
+ self.assertEqual(outer, set()) # Verify that remove worked
+ outer.discard(inner) # Absence of KeyError indicates working fine
+
+#==============================================================================
+
+class TestBinaryOps(unittest.TestCase):
+ def setUp(self):
+ self.set = set((2, 4, 6))
+
+ def test_eq(self): # SF bug 643115
+ self.assertEqual(self.set, set({2:1,4:3,6:5}))
+
+ def test_union_subset(self):
+ result = self.set | set([2])
+ self.assertEqual(result, set((2, 4, 6)))
+
+ def test_union_superset(self):
+ result = self.set | set([2, 4, 6, 8])
+ self.assertEqual(result, set([2, 4, 6, 8]))
+
+ def test_union_overlap(self):
+ result = self.set | set([3, 4, 5])
+ self.assertEqual(result, set([2, 3, 4, 5, 6]))
+
+ def test_union_non_overlap(self):
+ result = self.set | set([8])
+ self.assertEqual(result, set([2, 4, 6, 8]))
+
+ def test_intersection_subset(self):
+ result = self.set & set((2, 4))
+ self.assertEqual(result, set((2, 4)))
+
+ def test_intersection_superset(self):
+ result = self.set & set([2, 4, 6, 8])
+ self.assertEqual(result, set([2, 4, 6]))
+
+ def test_intersection_overlap(self):
+ result = self.set & set([3, 4, 5])
+ self.assertEqual(result, set([4]))
+
+ def test_intersection_non_overlap(self):
+ result = self.set & set([8])
+ self.assertEqual(result, empty_set)
+
+ def test_isdisjoint_subset(self):
+ result = self.set.isdisjoint(set((2, 4)))
+ self.assertEqual(result, False)
+
+ def test_isdisjoint_superset(self):
+ result = self.set.isdisjoint(set([2, 4, 6, 8]))
+ self.assertEqual(result, False)
+
+ def test_isdisjoint_overlap(self):
+ result = self.set.isdisjoint(set([3, 4, 5]))
+ self.assertEqual(result, False)
+
+ def test_isdisjoint_non_overlap(self):
+ result = self.set.isdisjoint(set([8]))
+ self.assertEqual(result, True)
+
+ def test_sym_difference_subset(self):
+ result = self.set ^ set((2, 4))
+ self.assertEqual(result, set([6]))
+
+ def test_sym_difference_superset(self):
+ result = self.set ^ set((2, 4, 6, 8))
+ self.assertEqual(result, set([8]))
+
+ def test_sym_difference_overlap(self):
+ result = self.set ^ set((3, 4, 5))
+ self.assertEqual(result, set([2, 3, 5, 6]))
+
+ def test_sym_difference_non_overlap(self):
+ result = self.set ^ set([8])
+ self.assertEqual(result, set([2, 4, 6, 8]))
+
+#==============================================================================
+
+class TestUpdateOps(unittest.TestCase):
+ def setUp(self):
+ self.set = set((2, 4, 6))
+
+ def test_union_subset(self):
+ self.set |= set([2])
+ self.assertEqual(self.set, set((2, 4, 6)))
+
+ def test_union_superset(self):
+ self.set |= set([2, 4, 6, 8])
+ self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+ def test_union_overlap(self):
+ self.set |= set([3, 4, 5])
+ self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+ def test_union_non_overlap(self):
+ self.set |= set([8])
+ self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+ def test_union_method_call(self):
+ self.set.update(set([3, 4, 5]))
+ self.assertEqual(self.set, set([2, 3, 4, 5, 6]))
+
+ def test_intersection_subset(self):
+ self.set &= set((2, 4))
+ self.assertEqual(self.set, set((2, 4)))
+
+ def test_intersection_superset(self):
+ self.set &= set([2, 4, 6, 8])
+ self.assertEqual(self.set, set([2, 4, 6]))
+
+ def test_intersection_overlap(self):
+ self.set &= set([3, 4, 5])
+ self.assertEqual(self.set, set([4]))
+
+ def test_intersection_non_overlap(self):
+ self.set &= set([8])
+ self.assertEqual(self.set, empty_set)
+
+ def test_intersection_method_call(self):
+ self.set.intersection_update(set([3, 4, 5]))
+ self.assertEqual(self.set, set([4]))
+
+ def test_sym_difference_subset(self):
+ self.set ^= set((2, 4))
+ self.assertEqual(self.set, set([6]))
+
+ def test_sym_difference_superset(self):
+ self.set ^= set((2, 4, 6, 8))
+ self.assertEqual(self.set, set([8]))
+
+ def test_sym_difference_overlap(self):
+ self.set ^= set((3, 4, 5))
+ self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+ def test_sym_difference_non_overlap(self):
+ self.set ^= set([8])
+ self.assertEqual(self.set, set([2, 4, 6, 8]))
+
+ def test_sym_difference_method_call(self):
+ self.set.symmetric_difference_update(set([3, 4, 5]))
+ self.assertEqual(self.set, set([2, 3, 5, 6]))
+
+ def test_difference_subset(self):
+ self.set -= set((2, 4))
+ self.assertEqual(self.set, set([6]))
+
+ def test_difference_superset(self):
+ self.set -= set((2, 4, 6, 8))
+ self.assertEqual(self.set, set([]))
+
+ def test_difference_overlap(self):
+ self.set -= set((3, 4, 5))
+ self.assertEqual(self.set, set([2, 6]))
+
+ def test_difference_non_overlap(self):
+ self.set -= set([8])
+ self.assertEqual(self.set, set([2, 4, 6]))
+
+ def test_difference_method_call(self):
+ self.set.difference_update(set([3, 4, 5]))
+ self.assertEqual(self.set, set([2, 6]))
+
+#==============================================================================
+
+class TestMutate(unittest.TestCase):
+ def setUp(self):
+ self.values = ["a", "b", "c"]
+ self.set = set(self.values)
+
+ def test_add_present(self):
+ self.set.add("c")
+ self.assertEqual(self.set, set("abc"))
+
+ def test_add_absent(self):
+ self.set.add("d")
+ self.assertEqual(self.set, set("abcd"))
+
+ def test_add_until_full(self):
+ tmp = set()
+ expected_len = 0
+ for v in self.values:
+ tmp.add(v)
+ expected_len += 1
+ self.assertEqual(len(tmp), expected_len)
+ self.assertEqual(tmp, self.set)
+
+ def test_remove_present(self):
+ self.set.remove("b")
+ self.assertEqual(self.set, set("ac"))
+
+ def test_remove_absent(self):
+ try:
+ self.set.remove("d")
+ self.fail("Removing missing element should have raised LookupError") # pragma: no cover
+ except LookupError:
+ pass
+
+ def test_remove_until_empty(self):
+ expected_len = len(self.set)
+ for v in self.values:
+ self.set.remove(v)
+ expected_len -= 1
+ self.assertEqual(len(self.set), expected_len)
+
+ def test_discard_present(self):
+ self.set.discard("c")
+ self.assertEqual(self.set, set("ab"))
+
+ def test_discard_absent(self):
+ self.set.discard("d")
+ self.assertEqual(self.set, set("abc"))
+
+ def test_clear(self):
+ self.set.clear()
+ self.assertEqual(len(self.set), 0)
+
+ def test_pop(self):
+ popped = {}
+ while self.set:
+ popped[self.set.pop()] = None
+ self.assertEqual(len(popped), len(self.values))
+ for v in self.values:
+ self.assertIn(v, popped)
+
+ def test_update_empty_tuple(self):
+ self.set.update(())
+ self.assertEqual(self.set, set(self.values))
+
+ def test_update_unit_tuple_overlap(self):
+ self.set.update(("a",))
+ self.assertEqual(self.set, set(self.values))
+
+ def test_update_unit_tuple_non_overlap(self):
+ self.set.update(("a", "z"))
+ self.assertEqual(self.set, set(self.values + ["z"]))
+
+#==============================================================================
+
+class TestSubsets(unittest.TestCase):
+
+ case2method = {"<=": "issubset",
+ ">=": "issuperset",
+ }
+
+ reverse = {"==": "==",
+ "!=": "!=",
+ "<": ">",
+ ">": "<",
+ "<=": ">=",
+ ">=": "<=",
+ }
+
+ def test_issubset(self):
+ x = self.left
+ y = self.right
+ for case in "!=", "==", "<", "<=", ">", ">=":
+ expected = case in self.cases
+ # Test the binary infix spelling.
+ result = eval("x" + case + "y", locals())
+ self.assertEqual(result, expected)
+ # Test the "friendly" method-name spelling, if one exists.
+ if case in TestSubsets.case2method:
+ method = getattr(x, TestSubsets.case2method[case])
+ result = method(y)
+ self.assertEqual(result, expected)
+
+ # Now do the same for the operands reversed.
+ rcase = TestSubsets.reverse[case]
+ result = eval("y" + rcase + "x", locals())
+ self.assertEqual(result, expected)
+ if rcase in TestSubsets.case2method:
+ method = getattr(y, TestSubsets.case2method[rcase])
+ result = method(x)
+ self.assertEqual(result, expected)
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualEmpty(TestSubsets):
+ left = set()
+ right = set()
+ name = "both empty"
+ cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEqualNonEmpty(TestSubsets):
+ left = set([1, 2])
+ right = set([1, 2])
+ name = "equal pair"
+ cases = "==", "<=", ">="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetEmptyNonEmpty(TestSubsets):
+ left = set()
+ right = set([1, 2])
+ name = "one empty, one non-empty"
+ cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetPartial(TestSubsets):
+ left = set([1])
+ right = set([1, 2])
+ name = "one a non-empty proper subset of other"
+ cases = "!=", "<", "<="
+
+#------------------------------------------------------------------------------
+
+class TestSubsetNonOverlap(TestSubsets):
+ left = set([1])
+ right = set([2])
+ name = "neither empty, neither contains"
+ cases = "!="
+
+#==============================================================================
+
+class TestOnlySetsInBinaryOps(unittest.TestCase):
+
+ def test_eq_ne(self):
+ # Unlike the others, this is testing that == and != *are* allowed.
+ self.assertEqual(self.other == self.set, False)
+ self.assertEqual(self.set == self.other, False)
+ self.assertEqual(self.other != self.set, True)
+ self.assertEqual(self.set != self.other, True)
+
+ def test_ge_gt_le_lt(self):
+ self.assertRaises(TypeError, lambda: self.set < self.other)
+ self.assertRaises(TypeError, lambda: self.set <= self.other)
+ self.assertRaises(TypeError, lambda: self.set > self.other)
+ self.assertRaises(TypeError, lambda: self.set >= self.other)
+
+ self.assertRaises(TypeError, lambda: self.other < self.set)
+ self.assertRaises(TypeError, lambda: self.other <= self.set)
+ self.assertRaises(TypeError, lambda: self.other > self.set)
+ self.assertRaises(TypeError, lambda: self.other >= self.set)
+
+ def test_update_operator(self):
+ if self.otherIsIterable:
+ self.set |= self.other
+ else:
+ try:
+ self.set |= self.other
+ except TypeError:
+ pass
+ else: # pragma: no cover
+ self.fail("expected TypeError")
+
+ def test_update(self):
+ if self.otherIsIterable:
+ self.set.update(self.other)
+ else:
+ self.assertRaises(TypeError, self.set.update, self.other)
+
+ def test_union(self):
+ if self.otherIsIterable:
+ self.set | self.other
+ self.other | self.set
+ self.set.union(self.other)
+ else:
+ self.assertRaises(TypeError, lambda: self.set | self.other)
+ self.assertRaises(TypeError, lambda: self.other | self.set)
+ self.assertRaises(TypeError, self.set.union, self.other)
+
+ def test_intersection_update_operator(self):
+ if self.otherIsIterable:
+ self.set &= self.other
+ else:
+ try:
+ self.set &= self.other
+ except TypeError:
+ pass
+ else: # pragma: no cover
+ self.fail("expected TypeError")
+
+ def test_intersection_update(self):
+ if self.otherIsIterable:
+ self.set.intersection_update(self.other)
+ else:
+ self.assertRaises(TypeError,
+ self.set.intersection_update,
+ self.other)
+
+ def test_intersection(self):
+ if self.otherIsIterable:
+ self.set & self.other
+ self.other & self.set
+ self.set.intersection(self.other)
+ else:
+ self.assertRaises(TypeError, lambda: self.set & self.other)
+ self.assertRaises(TypeError, lambda: self.other & self.set)
+ self.assertRaises(TypeError, self.set.intersection, self.other)
+
+ def test_sym_difference_update_operator(self):
+ if self.otherIsIterable:
+ self.set ^= self.other
+ else:
+ try:
+ self.set ^= self.other
+ except TypeError:
+ pass
+ else: # pragma: no cover
+ self.fail("expected TypeError")
+
+ def test_sym_difference_update(self):
+ if self.otherIsIterable:
+ self.set.symmetric_difference_update(self.other)
+ else:
+ self.assertRaises(TypeError,
+ self.set.symmetric_difference_update,
+ self.other)
+
+ def test_sym_difference(self):
+ if self.otherIsIterable:
+ self.set ^ self.other
+ self.other ^ self.set
+ self.set.symmetric_difference(self.other)
+ else:
+ self.assertRaises(TypeError, lambda: self.set ^ self.other)
+ self.assertRaises(TypeError, lambda: self.other ^ self.set)
+ self.assertRaises(TypeError, self.set.symmetric_difference, self.other)
+
+ def test_difference_update_operator(self):
+ if self.otherIsIterable:
+ self.set -= self.other
+ else:
+ try:
+ self.set -= self.other
+ except TypeError:
+ pass
+ else: # pragma: no cover
+ self.fail("expected TypeError")
+
+ def test_difference_update(self):
+ if self.otherIsIterable:
+ self.set.difference_update(self.other)
+ else:
+ self.assertRaises(TypeError,
+ self.set.difference_update,
+ self.other)
+
+ def test_difference(self):
+ if self.otherIsIterable:
+ self.set - self.other
+ self.other - self.set
+ self.set.difference(self.other)
+ else:
+ self.assertRaises(TypeError, lambda: self.set - self.other)
+ self.assertRaises(TypeError, lambda: self.other - self.set)
+ self.assertRaises(TypeError, self.set.difference, self.other)
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsNumeric(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ self.set = set((1, 2, 3))
+ self.other = 19
+ self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsDict(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ self.set = set((1, 2, 3))
+ self.other = {1:2, 3:4}
+ self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsOperator(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ self.set = set((1, 2, 3))
+ self.other = operator.add
+ self.otherIsIterable = False
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsTuple(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ self.set = set((1, 2, 3))
+ self.other = (2, 4, 6)
+ self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsString(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ self.set = set('xyz')
+ self.other = 'abc'
+ self.otherIsIterable = True
+
+#------------------------------------------------------------------------------
+
+class TestOnlySetsGenerator(TestOnlySetsInBinaryOps):
+ def setUp(self):
+ def gen():
+ for i in range(0, 10, 2):
+ yield i
+ self.set = set((1, 2, 3))
+ self.other = gen()
+ self.otherIsIterable = True
+
+#==============================================================================
+
+class TestCopying(unittest.TestCase):
+
+ def test_copy(self):
+ dup = self.set.copy()
+ dup_list = sorted(dup, key=repr)
+ set_list = sorted(self.set, key=repr)
+ self.assertEqual(len(dup_list), len(set_list))
+ for i in range(len(dup_list)):
+ self.assertTrue(dup_list[i] is set_list[i])
+
+ def test_deep_copy(self):
+ dup = copy.deepcopy(self.set)
+ ##print type(dup), repr(dup)
+ dup_list = sorted(dup, key=repr)
+ set_list = sorted(self.set, key=repr)
+ self.assertEqual(len(dup_list), len(set_list))
+ for i in range(len(dup_list)):
+ self.assertEqual(dup_list[i], set_list[i])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingEmpty(TestCopying):
+ def setUp(self):
+ self.set = set()
+
+#------------------------------------------------------------------------------
+
+class TestCopyingSingleton(TestCopying):
+ def setUp(self):
+ self.set = set(["hello"])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTriple(TestCopying):
+ def setUp(self):
+ self.set = set([-1, 0, 1])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingTuple(TestCopying):
+ def setUp(self):
+ self.set = set([(1, 2)])
+
+#------------------------------------------------------------------------------
+
+class TestCopyingNested(TestCopying):
+ def setUp(self):
+ self.set = set([((1, 2), (3, 4))])
+
+#==============================================================================
+
+class TestIdentities(unittest.TestCase):
+ def setUp(self):
+ self.a = set('abracadabra')
+ self.b = set('alacazam')
+
+ def test_binopsVsSubsets(self):
+ a, b = self.a, self.b
+ self.assertTrue(a - b < a)
+ self.assertTrue(b - a < b)
+ self.assertTrue(a & b < a)
+ self.assertTrue(a & b < b)
+ self.assertTrue(a | b > a)
+ self.assertTrue(a | b > b)
+ self.assertTrue(a ^ b < a | b)
+
+ def test_commutativity(self):
+ a, b = self.a, self.b
+ self.assertEqual(a&b, b&a)
+ self.assertEqual(a|b, b|a)
+ self.assertEqual(a^b, b^a)
+ if a != b:
+ self.assertNotEqual(a-b, b-a)
+
+ def test_summations(self):
+ # check that sums of parts equal the whole
+ a, b = self.a, self.b
+ self.assertEqual((a-b)|(a&b)|(b-a), a|b)
+ self.assertEqual((a&b)|(a^b), a|b)
+ self.assertEqual(a|(b-a), a|b)
+ self.assertEqual((a-b)|b, a|b)
+ self.assertEqual((a-b)|(a&b), a)
+ self.assertEqual((b-a)|(a&b), b)
+ self.assertEqual((a-b)|(b-a), a^b)
+
+ def test_exclusion(self):
+ # check that inverse operations show non-overlap
+ a, b, zero = self.a, self.b, set()
+ self.assertEqual((a-b)&b, zero)
+ self.assertEqual((b-a)&a, zero)
+ self.assertEqual((a&b)&(a^b), zero)
+
+# Tests derived from test_itertools.py =======================================
+
+def R(seqn):
+ 'Regular generator'
+ for i in seqn:
+ yield i
+
+class G:
+ 'Sequence using __getitem__'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ def __getitem__(self, i):
+ return self.seqn[i]
+
+class I:
+ 'Sequence using iterator protocol'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+ def __next__(self):
+ if self.i >= len(self.seqn): raise StopIteration
+ v = self.seqn[self.i]
+ self.i += 1
+ return v
+ next = __next__
+
+class Ig:
+ 'Sequence using iterator protocol defined with a generator'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ for val in self.seqn:
+ yield val
+
+class X:
+ 'Missing __getitem__ and __iter__'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __next__(self): # pragma: no cover
+ if self.i >= len(self.seqn): raise StopIteration
+ v = self.seqn[self.i]
+ self.i += 1
+ return v
+ next = __next__
+
+class N:
+ 'Iterator missing __next__()'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+
+class E:
+ 'Test propagation of exceptions'
+ def __init__(self, seqn):
+ self.seqn = seqn
+ self.i = 0
+ def __iter__(self):
+ return self
+ def __next__(self):
+ 3 // 0
+ next = __next__
+
+class S:
+ 'Test immediate stop'
+ def __init__(self, seqn):
+ pass
+ def __iter__(self):
+ return self
+ def __next__(self):
+ raise StopIteration
+ next = __next__
+
+from itertools import chain
+def L(seqn):
+ 'Test multiple tiers of iterators'
+ return chain(map(lambda x:x, R(Ig(G(seqn)))))
+
+class TestVariousIteratorArgs(unittest.TestCase):
+
+ def test_constructor(self):
+ for cons in (set,):
+ for s in (range(3), (), range(1000), (1.1, 1.2), range(2000,2200,5), range(10)):
+ for g in (G, I, Ig, S, L, R):
+ self.assertEqual(sorted(cons(g(s)), key=repr), sorted(g(s), key=repr))
+ self.assertRaises(TypeError, cons , X(s))
+ self.assertRaises(TypeError, cons , N(s))
+ self.assertRaises(ZeroDivisionError, cons , E(s))
+
+ def test_inline_methods(self):
+ s = set([-1,-2,-3])
+ for data in (range(3), (), range(1000), (1.1, 1.2), range(2000,2200,5), range(10)):
+ for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint):
+ for g in (G, I, Ig, L, R):
+ expected = meth(data)
+ actual = meth(G(data))
+ if isinstance(expected, bool):
+ self.assertEqual(actual, expected)
+ else:
+ self.assertEqual(sorted(actual, key=repr), sorted(expected, key=repr))
+ self.assertRaises(TypeError, meth, X(s))
+ self.assertRaises(TypeError, meth, N(s))
+ self.assertRaises(ZeroDivisionError, meth, E(s))
+
+ def test_inplace_methods(self):
+ for data in (range(3), (), range(1000), (1.1, 1.2), range(2000,2200,5), range(10)):
+ for methname in ('update', 'intersection_update',
+ 'difference_update', 'symmetric_difference_update'):
+ for g in (G, I, Ig, S, L, R):
+ s = set([1,2,3])
+ t = s.copy()
+ getattr(s, methname)(list(g(data)))
+ getattr(t, methname)(g(data))
+ self.assertEqual(sorted(s, key=repr), sorted(t, key=repr))
+
+ self.assertRaises(TypeError, getattr(set([1,2,3]), methname), X(data))
+ self.assertRaises(TypeError, getattr(set([1,2,3]), methname), N(data))
+ self.assertRaises(ZeroDivisionError, getattr(set([1,2,3]), methname), E(data))
+
+#==============================================================================
+
+test_classes = (
+ TestSet,
+ TestSetSubclass,
+ TestSetSubclassWithKeywordArgs,
+ TestSetOfSets,
+ TestExceptionPropagation,
+ TestBasicOpsEmpty,
+ TestBasicOpsSingleton,
+ TestBasicOpsTuple,
+ TestBasicOpsTriple,
+ TestBasicOpsString,
+ TestBinaryOps,
+ TestUpdateOps,
+ TestMutate,
+ TestSubsetEqualEmpty,
+ TestSubsetEqualNonEmpty,
+ TestSubsetEmptyNonEmpty,
+ TestSubsetPartial,
+ TestSubsetNonOverlap,
+ TestOnlySetsNumeric,
+ TestOnlySetsDict,
+ TestOnlySetsOperator,
+ TestOnlySetsTuple,
+ TestOnlySetsString,
+ TestOnlySetsGenerator,
+ TestCopyingEmpty,
+ TestCopyingSingleton,
+ TestCopyingTriple,
+ TestCopyingTuple,
+ TestCopyingNested,
+ TestIdentities,
+ TestVariousIteratorArgs,
+ )
diff --git a/test/test_support.py b/test/test_support.py
new file mode 100644
index 0000000..d632ba4
--- /dev/null
+++ b/test/test_support.py
@@ -0,0 +1,415 @@
+# This file taken from Python, licensed under the Python License Agreement
+
+from __future__ import print_function
+"""Supporting definitions for the Python regression tests."""
+
+if __name__ != 'test.test_support':
+ raise ImportError('test_support must be imported from the test package')
+
+import sys
+
+class Error(Exception):
+ """Base class for regression test exceptions."""
+
+class TestFailed(Error):
+ """Test failed."""
+
+class TestSkipped(Error):
+ """Test skipped.
+
+ This can be raised to indicate that a test was deliberatly
+ skipped, but not because a feature wasn't available. For
+ example, if some resource can't be used, such as the network
+ appears to be unavailable, this should be raised instead of
+ TestFailed.
+ """
+
+class ResourceDenied(TestSkipped):
+ """Test skipped because it requested a disallowed resource.
+
+ This is raised when a test calls requires() for a resource that
+ has not be enabled. It is used to distinguish between expected
+ and unexpected skips.
+ """
+
+verbose = 1 # Flag set to 0 by regrtest.py
+use_resources = None # Flag set to [] by regrtest.py
+max_memuse = 0 # Disable bigmem tests (they will still be run with
+ # small sizes, to make sure they work.)
+
+# _original_stdout is meant to hold stdout at the time regrtest began.
+# This may be "the real" stdout, or IDLE's emulation of stdout, or whatever.
+# The point is to have some flavor of stdout the user can actually see.
+_original_stdout = None
+def record_original_stdout(stdout):
+ global _original_stdout
+ _original_stdout = stdout
+
+def get_original_stdout():
+ return _original_stdout or sys.stdout
+
+def unload(name):
+ try:
+ del sys.modules[name]
+ except KeyError:
+ pass
+
+def unlink(filename):
+ import os
+ try:
+ os.unlink(filename)
+ except OSError:
+ pass
+
+def forget(modname):
+ '''"Forget" a module was ever imported by removing it from sys.modules and
+ deleting any .pyc and .pyo files.'''
+ unload(modname)
+ import os
+ for dirname in sys.path:
+ unlink(os.path.join(dirname, modname + os.extsep + 'pyc'))
+ # Deleting the .pyo file cannot be within the 'try' for the .pyc since
+ # the chance exists that there is no .pyc (and thus the 'try' statement
+ # is exited) but there is a .pyo file.
+ unlink(os.path.join(dirname, modname + os.extsep + 'pyo'))
+
+def is_resource_enabled(resource):
+ """Test whether a resource is enabled. Known resources are set by
+ regrtest.py."""
+ return use_resources is not None and resource in use_resources
+
+def requires(resource, msg=None):
+ """Raise ResourceDenied if the specified resource is not available.
+
+ If the caller's module is __main__ then automatically return True. The
+ possibility of False being returned occurs when regrtest.py is executing."""
+ # see if the caller's module is __main__ - if so, treat as if
+ # the resource was set
+ if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
+ return
+ if not is_resource_enabled(resource):
+ if msg is None:
+ msg = "Use of the `%s' resource not enabled" % resource
+ raise ResourceDenied(msg)
+
+FUZZ = 1e-6
+
+def fcmp(x, y): # fuzzy comparison function
+ if type(x) == type(0.0) or type(y) == type(0.0):
+ try:
+ x, y = coerce(x, y)
+ fuzz = (abs(x) + abs(y)) * FUZZ
+ if abs(x-y) <= fuzz:
+ return 0
+ except:
+ pass
+ elif type(x) == type(y) and type(x) in (type(()), type([])):
+ for i in range(min(len(x), len(y))):
+ outcome = fcmp(x[i], y[i])
+ if outcome != 0:
+ return outcome
+ return cmp(len(x), len(y))
+ return cmp(x, y)
+
+try:
+ str
+ have_unicode = 1
+except NameError:
+ have_unicode = 0
+
+is_jython = sys.platform.startswith('java')
+
+import os
+# Filename used for testing
+if os.name == 'java':
+ # Jython disallows @ in module names
+ TESTFN = '$test'
+elif os.name == 'riscos':
+ TESTFN = 'testfile'
+else:
+ TESTFN = '@test'
+ # Unicode name only used if TEST_FN_ENCODING exists for the platform.
+ if have_unicode:
+ # Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding()
+ # TESTFN_UNICODE is a filename that can be encoded using the
+ # file system encoding, but *not* with the default (ascii) encoding
+ if isinstance('', str):
+ # python -U
+ # XXX perhaps unicode() should accept Unicode strings?
+ TESTFN_UNICODE = "@test-\xe0\xf2"
+ else:
+ # 2 latin characters.
+ TESTFN_UNICODE = str("@test-\xe0\xf2", "latin-1")
+ TESTFN_ENCODING = sys.getfilesystemencoding()
+
+# Make sure we can write to TESTFN, try in /tmp if we can't
+fp = None
+try:
+ fp = open(TESTFN, 'w+')
+except IOError:
+ TMP_TESTFN = os.path.join('/tmp', TESTFN)
+ try:
+ fp = open(TMP_TESTFN, 'w+')
+ TESTFN = TMP_TESTFN
+ del TMP_TESTFN
+ except IOError:
+ print(('WARNING: tests will fail, unable to write to: %s or %s' %
+ (TESTFN, TMP_TESTFN)))
+if fp is not None:
+ fp.close()
+ unlink(TESTFN)
+del os, fp
+
+def findfile(file, here=__file__):
+ """Try to find a file on sys.path and the working directory. If it is not
+ found the argument passed to the function is returned (this does not
+ necessarily signal failure; could still be the legitimate path)."""
+ import os
+ if os.path.isabs(file):
+ return file
+ path = sys.path
+ path = [os.path.dirname(here)] + path
+ for dn in path:
+ fn = os.path.join(dn, file)
+ if os.path.exists(fn): return fn
+ return file
+
+def verify(condition, reason='test failed'):
+ """Verify that condition is true. If not, raise TestFailed.
+
+ The optional argument reason can be given to provide
+ a better error text.
+ """
+
+ if not condition:
+ raise TestFailed(reason)
+
+def vereq(a, b):
+ """Raise TestFailed if a == b is false.
+
+ This is better than verify(a == b) because, in case of failure, the
+ error message incorporates repr(a) and repr(b) so you can see the
+ inputs.
+
+ Note that "not (a == b)" isn't necessarily the same as "a != b"; the
+ former is tested.
+ """
+
+ if not (a == b):
+ raise TestFailed("%r == %r" % (a, b))
+
+def sortdict(dict):
+ "Like repr(dict), but in sorted order."
+ items = list(dict.items())
+ items.sort()
+ reprpairs = ["%r: %r" % pair for pair in items]
+ withcommas = ", ".join(reprpairs)
+ return "{%s}" % withcommas
+
+def check_syntax(statement):
+ try:
+ compile(statement, '<string>', 'exec')
+ except SyntaxError:
+ pass
+ else:
+ print('Missing SyntaxError: "%s"' % statement)
+
+def open_urlresource(url):
+ import urllib.request, urllib.parse, urllib.error, urllib.parse
+ import os.path
+
+ filename = urllib.parse.urlparse(url)[2].split('/')[-1] # '/': it's URL!
+
+ for path in [os.path.curdir, os.path.pardir]:
+ fn = os.path.join(path, filename)
+ if os.path.exists(fn):
+ return open(fn)
+
+ requires('urlfetch')
+ print('\tfetching %s ...' % url, file=get_original_stdout())
+ fn, _ = urllib.request.urlretrieve(url, filename)
+ return open(fn)
+
+#=======================================================================
+# Decorator for running a function in a different locale, correctly resetting
+# it afterwards.
+
+def run_with_locale(catstr, *locales):
+ def decorator(func):
+ def inner(*args, **kwds):
+ try:
+ import locale
+ category = getattr(locale, catstr)
+ orig_locale = locale.setlocale(category)
+ except AttributeError:
+ # if the test author gives us an invalid category string
+ raise
+ except:
+ # cannot retrieve original locale, so do nothing
+ locale = orig_locale = None
+ else:
+ for loc in locales:
+ try:
+ locale.setlocale(category, loc)
+ break
+ except:
+ pass
+
+ # now run the function, resetting the locale on exceptions
+ try:
+ return func(*args, **kwds)
+ finally:
+ if locale and orig_locale:
+ locale.setlocale(category, orig_locale)
+ inner.__name__ = func.__name__
+ inner.__doc__ = func.__doc__
+ return inner
+ return decorator
+
+#=======================================================================
+# Big-memory-test support. Separate from 'resources' because memory use should be configurable.
+
+# Some handy shorthands. Note that these are used for byte-limits as well
+# as size-limits, in the various bigmem tests
+_1M = 1024*1024
+_1G = 1024 * _1M
+_2G = 2 * _1G
+
+def set_memlimit(limit):
+ import re
+ global max_memuse
+ sizes = {
+ 'k': 1024,
+ 'm': _1M,
+ 'g': _1G,
+ 't': 1024*_1G,
+ }
+ m = re.match(r'(\d+(\.\d+)?) (K|M|G|T)b?$', limit,
+ re.IGNORECASE | re.VERBOSE)
+ if m is None:
+ raise ValueError('Invalid memory limit %r' % (limit,))
+ memlimit = int(float(m.group(1)) * sizes[m.group(3).lower()])
+ if memlimit < 2.5*_1G:
+ raise ValueError('Memory limit %r too low to be useful' % (limit,))
+ max_memuse = memlimit
+
+def bigmemtest(minsize, memuse, overhead=5*_1M):
+ """Decorator for bigmem tests.
+
+ 'minsize' is the minimum useful size for the test (in arbitrary,
+ test-interpreted units.) 'memuse' is the number of 'bytes per size' for
+ the test, or a good estimate of it. 'overhead' specifies fixed overhead,
+ independant of the testsize, and defaults to 5Mb.
+
+ The decorator tries to guess a good value for 'size' and passes it to
+ the decorated test function. If minsize * memuse is more than the
+ allowed memory use (as defined by max_memuse), the test is skipped.
+ Otherwise, minsize is adjusted upward to use up to max_memuse.
+ """
+ def decorator(f):
+ def wrapper(self):
+ if not max_memuse:
+ # If max_memuse is 0 (the default),
+ # we still want to run the tests with size set to a few kb,
+ # to make sure they work. We still want to avoid using
+ # too much memory, though, but we do that noisily.
+ maxsize = 5147
+ self.failIf(maxsize * memuse + overhead > 20 * _1M)
+ else:
+ maxsize = int((max_memuse - overhead) / memuse)
+ if maxsize < minsize:
+ # Really ought to print 'test skipped' or something
+ if verbose:
+ sys.stderr.write("Skipping %s because of memory "
+ "constraint\n" % (f.__name__,))
+ return
+ # Try to keep some breathing room in memory use
+ maxsize = max(maxsize - 50 * _1M, minsize)
+ return f(self, maxsize)
+ wrapper.minsize = minsize
+ wrapper.memuse = memuse
+ wrapper.overhead = overhead
+ return wrapper
+ return decorator
+
+#=======================================================================
+# Preliminary PyUNIT integration.
+
+from . import unittest
+
+
+class BasicTestRunner:
+ def run(self, test):
+ result = unittest.TestResult()
+ test(result)
+ return result
+
+def run_suite(suite, testclass=None):
+ """Run tests from a unittest.TestSuite-derived class."""
+ if verbose:
+ runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
+ else:
+ runner = BasicTestRunner()
+
+ result = runner.run(suite)
+ if not result.wasSuccessful():
+ if len(result.errors) == 1 and not result.failures:
+ err = result.errors[0][1]
+ elif len(result.failures) == 1 and not result.errors:
+ err = result.failures[0][1]
+ else:
+ if testclass is None:
+ msg = "errors occurred; run in verbose mode for details"
+ else:
+ msg = "errors occurred in %s.%s" \
+ % (testclass.__module__, testclass.__name__)
+ raise TestFailed(msg)
+ raise TestFailed(err)
+
+
+def run_unittest(*classes):
+ """Run tests from unittest.TestCase-derived classes."""
+ suite = unittest.TestSuite()
+ for cls in classes:
+ if isinstance(cls, (unittest.TestSuite, unittest.TestCase)):
+ suite.addTest(cls)
+ else:
+ suite.addTest(unittest.makeSuite(cls))
+ if len(classes)==1:
+ testclass = classes[0]
+ else:
+ testclass = None
+ run_suite(suite, testclass)
+
+
+#=======================================================================
+# doctest driver.
+
+def run_doctest(module, verbosity=None):
+ """Run doctest on the given module. Return (#failures, #tests).
+
+ If optional argument verbosity is not specified (or is None), pass
+ test_support's belief about verbosity on to doctest. Else doctest's
+ usual behavior is used (it searches sys.argv for -v).
+ """
+
+ import doctest
+
+ if verbosity is None:
+ verbosity = verbose
+ else:
+ verbosity = None
+
+ # Direct doctest output (normally just errors) to real stdout; doctest
+ # output shouldn't be compared by regrtest.
+ save_stdout = sys.stdout
+ sys.stdout = get_original_stdout()
+ try:
+ f, t = doctest.testmod(module, verbose=verbosity)
+ if f:
+ raise TestFailed("%d of %d doctests failed" % (f, t))
+ finally:
+ sys.stdout = save_stdout
+ if verbose:
+ print('doctest (%s) ... %d tests with zero failures' % (module.__name__, t))
+ return f, t
diff --git a/test/unittest.py b/test/unittest.py
new file mode 100644
index 0000000..2ff3e2d
--- /dev/null
+++ b/test/unittest.py
@@ -0,0 +1,840 @@
+#! /usr/bin/python2.5
+from __future__ import print_function
+'''
+Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's
+Smalltalk testing framework.
+
+This module contains the core framework classes that form the basis of
+specific test cases and suites (TestCase, TestSuite etc.), and also a
+text-based utility class for running the tests and reporting the results
+ (TextTestRunner).
+
+Simple usage:
+
+ import unittest
+
+ class IntegerArithmenticTestCase(unittest.TestCase):
+ def testAdd(self): ## test method names begin 'test*'
+ self.assertEquals((1 + 2), 3)
+ self.assertEquals(0 + 1, 1)
+ def testMultiply(self):
+ self.assertEquals((0 * 10), 0)
+ self.assertEquals((5 * 8), 40)
+
+ if __name__ == '__main__':
+ unittest.main()
+
+Further information is available in the bundled documentation, and from
+
+ http://pyunit.sourceforge.net/
+
+Copyright (c) 1999-2003 Steve Purcell
+This module is free software, and you may redistribute it and/or modify
+it under the same terms as Python itself, so long as this copyright message
+and disclaimer are retained in their original form.
+
+IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+
+THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+'''
+
+__author__ = "Steve Purcell"
+__email__ = "stephen_purcell at yahoo dot com"
+__version__ = "#Revision: 1.63 $"[11:-2]
+
+import time
+import sys
+import traceback
+import os
+import types
+
+class ReferenceLeak(Exception):
+ def __init__(self, s, n):
+ self.n = n
+ self.s = s
+
+ def __str__(self):
+ return '%s: %d' % (self.s, self.n)
+
+##############################################################################
+# Exported classes and functions
+##############################################################################
+__all__ = ['TestResult', 'TestCase', 'TestSuite', 'TextTestRunner',
+ 'TestLoader', 'FunctionTestCase', 'main', 'defaultTestLoader']
+
+# Expose obsolete functions for backwards compatibility
+__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
+
+
+##############################################################################
+# Test framework core
+##############################################################################
+
+# All classes defined herein are 'new-style' classes, allowing use of 'super()'
+__metaclass__ = type
+
+def _strclass(cls):
+ return "%s.%s" % (cls.__module__, cls.__name__)
+
+__unittest = 1
+
+class TestResult:
+ """Holder for test result information.
+
+ Test results are automatically managed by the TestCase and TestSuite
+ classes, and do not need to be explicitly manipulated by writers of tests.
+
+ Each instance holds the total number of tests run, and collections of
+ failures and errors that occurred among those test runs. The collections
+ contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
+ formatted traceback of the error that occurred.
+ """
+ def __init__(self):
+ self.failures = []
+ self.errors = []
+ self.testsRun = 0
+ self.shouldStop = 0
+
+ def startTest(self, test):
+ "Called when the given test is about to be run"
+ self.testsRun = self.testsRun + 1
+
+ def stopTest(self, test):
+ "Called when the given test has been run"
+ pass
+
+ def addError(self, test, err):
+ """Called when an error has occurred. 'err' is a tuple of values as
+ returned by sys.exc_info().
+ """
+ self.errors.append((test, self._exc_info_to_string(err, test)))
+
+ def addFailure(self, test, err):
+ """Called when an error has occurred. 'err' is a tuple of values as
+ returned by sys.exc_info()."""
+ self.failures.append((test, self._exc_info_to_string(err, test)))
+
+ def addSuccess(self, test):
+ "Called when a test has completed successfully"
+ pass
+
+ def wasSuccessful(self):
+ "Tells whether or not this result was a success"
+ return len(self.failures) == len(self.errors) == 0
+
+ def stop(self):
+ "Indicates that the tests should be aborted"
+ self.shouldStop = True
+
+ def _exc_info_to_string(self, err, test):
+ """Converts a sys.exc_info()-style tuple of values into a string."""
+ exctype, value, tb = err
+ # Skip test runner traceback levels
+ while tb and self._is_relevant_tb_level(tb):
+ tb = tb.tb_next
+ if exctype is test.failureException:
+ # Skip assert*() traceback levels
+ length = self._count_relevant_tb_levels(tb)
+ return ''.join(traceback.format_exception(exctype, value, tb, length))
+ return ''.join(traceback.format_exception(exctype, value, tb))
+
+ def _is_relevant_tb_level(self, tb):
+ return '__unittest' in tb.tb_frame.f_globals
+
+ def _count_relevant_tb_levels(self, tb):
+ length = 0
+ while tb and not self._is_relevant_tb_level(tb):
+ length += 1
+ tb = tb.tb_next
+ return length
+
+ def __repr__(self):
+ return "<%s run=%i errors=%i failures=%i>" % \
+ (_strclass(self.__class__), self.testsRun, len(self.errors),
+ len(self.failures))
+
+class TestCase:
+ """A class whose instances are single test cases.
+
+ By default, the test code itself should be placed in a method named
+ 'runTest'.
+
+ If the fixture may be used for many test cases, create as
+ many test methods as are needed. When instantiating such a TestCase
+ subclass, specify in the constructor arguments the name of the test method
+ that the instance is to execute.
+
+ Test authors should subclass TestCase for their own tests. Construction
+ and deconstruction of the test's environment ('fixture') can be
+ implemented by overriding the 'setUp' and 'tearDown' methods respectively.
+
+ If it is necessary to override the __init__ method, the base class
+ __init__ method must always be called. It is important that subclasses
+ should not change the signature of their __init__ method, since instances
+ of the classes are instantiated automatically by parts of the framework
+ in order to be run.
+ """
+
+ # This attribute determines which exception will be raised when
+ # the instance's assertion methods fail; test methods raising this
+ # exception will be deemed to have 'failed' rather than 'errored'
+
+ failureException = AssertionError
+
+ def __init__(self, methodName='runTest'):
+ """Create an instance of the class that will use the named test
+ method when executed. Raises a ValueError if the instance does
+ not have a method with the specified name.
+ """
+ try:
+ self._testMethodName = methodName
+ testMethod = getattr(self, methodName)
+ self._testMethodDoc = testMethod.__doc__
+ except AttributeError:
+ raise ValueError("no such test method in %s: %s" % \
+ (self.__class__, methodName))
+
+ def assertIn(self, member, container, msg=None):
+ """Just like self.assertTrue(a in b), but with a nicer default message."""
+ if member not in container:
+ standardMsg = '%s not found in %s' % (repr(member),
+ repr(container))
+ self.fail(str((msg, standardMsg)))
+
+ def assertNotIn(self, member, container, msg=None):
+ """Just like self.assertTrue(a not in b), but with a nicer default message."""
+ if member in container:
+ standardMsg = '%s unexpectedly found in %s' % (repr(member),
+ repr(container))
+ self.fail(str((msg, standardMsg)))
+
+
+ def setUp(self):
+ "Hook method for setting up the test fixture before exercising it."
+ pass
+
+ def tearDown(self):
+ "Hook method for deconstructing the test fixture after testing it."
+ pass
+
+ def countTestCases(self):
+ return 1
+
+ def defaultTestResult(self):
+ return TestResult()
+
+ def shortDescription(self):
+ """Returns a one-line description of the test, or None if no
+ description has been provided.
+
+ The default implementation of this method returns the first line of
+ the specified test method's docstring.
+ """
+ doc = self._testMethodDoc
+ return doc and doc.split("\n")[0].strip() or None
+
+ def id(self):
+ return "%s.%s" % (_strclass(self.__class__), self._testMethodName)
+
+ def __str__(self):
+ return "%s (%s)" % (self._testMethodName, _strclass(self.__class__))
+
+ def __repr__(self):
+ return "<%s testMethod=%s>" % \
+ (_strclass(self.__class__), self._testMethodName)
+
+ def run(self, result=None):
+ self.run_(result, False)
+ try:
+ self.run_(result, True)
+ except AttributeError:
+ pass
+
+ def run_(self, result, memcheck):
+ if result is None: result = self.defaultTestResult()
+ if memcheck and not hasattr(sys, 'gettotalrefcount'):
+ return
+ result.startTest(self)
+ testMethod = getattr(self, self._testMethodName)
+ try:
+ import gc
+
+ total_start = 0
+ total_finish = 0
+ ok = True
+ gc.collect()
+ ob_start = set(id(x) for x in gc.get_objects())
+ try:
+ total_start = sys.gettotalrefcount()
+ except AttributeError:
+ pass
+
+ try:
+ self.setUp()
+ except KeyboardInterrupt:
+ raise
+ except:
+ result.addError(self, self._exc_info())
+ return
+
+ ok = False
+ try:
+ testMethod()
+ ok = True
+ except self.failureException:
+ result.addFailure(self, self._exc_info())
+ except KeyboardInterrupt:
+ raise
+ except:
+ result.addError(self, self._exc_info())
+
+ try:
+ self.tearDown()
+ if ok and memcheck:
+ gc.collect()
+ gc.collect()
+ total_finish = sys.gettotalrefcount()
+ ob_finish = gc.get_objects()
+ if len(ob_start) != len(ob_finish):
+ #for ob in ob_finish:
+ # if id(ob) not in ob_start and ob is not ob_start:
+ # print ob
+ raise ReferenceLeak('more objects', len(ob_finish)-len(ob_start))
+ if total_finish != total_start:
+ print(total_start, total_finish)
+ raise ReferenceLeak('more references', total_finish - total_start)
+ except KeyboardInterrupt:
+ raise
+ except:
+ result.addError(self, self._exc_info())
+ ok = False
+ if ok: result.addSuccess(self)
+ finally:
+ result.stopTest(self)
+
+ def __call__(self, *args, **kwds):
+ return self.run(*args, **kwds)
+
+ def debug(self):
+ """Run the test without collecting errors in a TestResult"""
+ self.setUp()
+ getattr(self, self._testMethodName)()
+ self.tearDown()
+
+ def _exc_info(self):
+ """Return a version of sys.exc_info() with the traceback frame
+ minimised; usually the top level of the traceback frame is not
+ needed.
+ """
+ exctype, excvalue, tb = sys.exc_info()
+ if sys.platform[:4] == 'java': ## tracebacks look different in Jython
+ return (exctype, excvalue, tb)
+ return (exctype, excvalue, tb)
+
+ def fail(self, msg=None):
+ """Fail immediately, with the given message."""
+ raise self.failureException(msg)
+
+ def failIf(self, expr, msg=None):
+ "Fail the test if the expression is true."
+ if expr: raise self.failureException(msg)
+
+ def failUnless(self, expr, msg=None):
+ """Fail the test unless the expression is true."""
+ if not expr: raise self.failureException(msg)
+
+ def failUnlessRaises(self, excClass, callableObj, *args, **kwargs):
+ """Fail unless an exception of class excClass is thrown
+ by callableObj when invoked with arguments args and keyword
+ arguments kwargs. If a different type of exception is
+ thrown, it will not be caught, and the test case will be
+ deemed to have suffered an error, exactly as for an
+ unexpected exception.
+ """
+ try:
+ callableObj(*args, **kwargs)
+ except excClass:
+ return
+ else:
+ if hasattr(excClass,'__name__'): excName = excClass.__name__
+ else: excName = str(excClass)
+ raise self.failureException("%s not raised" % excName)
+
+ def failUnlessEqual(self, first, second, msg=None):
+ """Fail if the two objects are unequal as determined by the '=='
+ operator.
+ """
+ if not first == second:
+ raise self.failureException(msg or '%r != %r' % (first, second))
+
+ def failIfEqual(self, first, second, msg=None):
+ """Fail if the two objects are equal as determined by the '=='
+ operator.
+ """
+ if first == second:
+ raise self.failureException(msg or '%r == %r' % (first, second))
+
+ def failUnlessAlmostEqual(self, first, second, places=7, msg=None):
+ """Fail if the two objects are unequal as determined by their
+ difference rounded to the given number of decimal places
+ (default 7) and comparing to zero.
+
+ Note that decimal places (from zero) are usually not the same
+ as significant digits (measured from the most signficant digit).
+ """
+ if round(second-first, places) != 0:
+ raise self.failureException(msg or '%r != %r within %r places' % (first, second, places))
+
+ def failIfAlmostEqual(self, first, second, places=7, msg=None):
+ """Fail if the two objects are equal as determined by their
+ difference rounded to the given number of decimal places
+ (default 7) and comparing to zero.
+
+ Note that decimal places (from zero) are usually not the same
+ as significant digits (measured from the most signficant digit).
+ """
+ if round(second-first, places) == 0:
+ raise self.failureException(msg or '%r == %r within %r places' % (first, second, places))
+
+ # Synonyms for assertion methods
+
+ assertEqual = assertEquals = failUnlessEqual
+
+ assertNotEqual = assertNotEquals = failIfEqual
+
+ assertAlmostEqual = assertAlmostEquals = failUnlessAlmostEqual
+
+ assertNotAlmostEqual = assertNotAlmostEquals = failIfAlmostEqual
+
+ assertRaises = failUnlessRaises
+
+ assert_ = assertTrue = failUnless
+
+ assertFalse = failIf
+
+
+
+class TestSuite:
+ """A test suite is a composite test consisting of a number of TestCases.
+
+ For use, create an instance of TestSuite, then add test case instances.
+ When all tests have been added, the suite can be passed to a test
+ runner, such as TextTestRunner. It will run the individual test cases
+ in the order in which they were added, aggregating the results. When
+ subclassing, do not forget to call the base class constructor.
+ """
+ def __init__(self, tests=()):
+ self._tests = []
+ self.addTests(tests)
+
+ def __repr__(self):
+ return "<%s tests=%s>" % (_strclass(self.__class__), self._tests)
+
+ __str__ = __repr__
+
+ def __iter__(self):
+ return iter(self._tests)
+
+ def countTestCases(self):
+ cases = 0
+ for test in self._tests:
+ cases += test.countTestCases()
+ return cases
+
+ def addTest(self, test):
+ self._tests.append(test)
+
+ def addTests(self, tests):
+ for test in tests:
+ self.addTest(test)
+
+ def run(self, result):
+ for test in self._tests:
+ if result.shouldStop:
+ break
+ test(result)
+ return result
+
+ def __call__(self, *args, **kwds):
+ return self.run(*args, **kwds)
+
+ def debug(self):
+ """Run the tests without collecting errors in a TestResult"""
+ for test in self._tests: test.debug()
+
+
+class FunctionTestCase(TestCase):
+ """A test case that wraps a test function.
+
+ This is useful for slipping pre-existing test functions into the
+ PyUnit framework. Optionally, set-up and tidy-up functions can be
+ supplied. As with TestCase, the tidy-up ('tearDown') function will
+ always be called if the set-up ('setUp') function ran successfully.
+ """
+
+ def __init__(self, testFunc, setUp=None, tearDown=None,
+ description=None):
+ TestCase.__init__(self)
+ self.__setUpFunc = setUp
+ self.__tearDownFunc = tearDown
+ self.__testFunc = testFunc
+ self.__description = description
+
+ def setUp(self):
+ if self.__setUpFunc is not None:
+ self.__setUpFunc()
+
+ def tearDown(self):
+ if self.__tearDownFunc is not None:
+ self.__tearDownFunc()
+
+ def runTest(self):
+ self.__testFunc()
+
+ def id(self):
+ return self.__testFunc.__name__
+
+ def __str__(self):
+ return "%s (%s)" % (_strclass(self.__class__), self.__testFunc.__name__)
+
+ def __repr__(self):
+ return "<%s testFunc=%s>" % (_strclass(self.__class__), self.__testFunc)
+
+ def shortDescription(self):
+ if self.__description is not None: return self.__description
+ doc = self.__testFunc.__doc__
+ return doc and doc.split("\n")[0].strip() or None
+
+
+
+##############################################################################
+# Locating and loading tests
+##############################################################################
+
+class TestLoader:
+ """This class is responsible for loading tests according to various
+ criteria and returning them wrapped in a Test
+ """
+ testMethodPrefix = 'test'
+ suiteClass = TestSuite
+
+ def loadTestsFromTestCase(self, testCaseClass):
+ """Return a suite of all tests cases contained in testCaseClass"""
+ if issubclass(testCaseClass, TestSuite):
+ raise TypeError("Test cases should not be derived from TestSuite. Maybe you meant to derive from TestCase?")
+ testCaseNames = self.getTestCaseNames(testCaseClass)
+ if not testCaseNames and hasattr(testCaseClass, 'runTest'):
+ testCaseNames = ['runTest']
+ return self.suiteClass(list(map(testCaseClass, testCaseNames)))
+
+ def loadTestsFromModule(self, module):
+ """Return a suite of all tests cases contained in the given module"""
+ tests = []
+ for name in dir(module):
+ obj = getattr(module, name)
+ if (isinstance(obj, type) and
+ issubclass(obj, TestCase)):
+ tests.append(self.loadTestsFromTestCase(obj))
+ return self.suiteClass(tests)
+
+ def loadTestsFromName(self, name, module=None):
+ """Return a suite of all tests cases given a string specifier.
+
+ The name may resolve either to a module, a test case class, a
+ test method within a test case class, or a callable object which
+ returns a TestCase or TestSuite instance.
+
+ The method optionally resolves the names relative to a given module.
+ """
+ parts = name.split('.')
+ if module is None:
+ parts_copy = parts[:]
+ while parts_copy:
+ try:
+ module = __import__('.'.join(parts_copy))
+ break
+ except ImportError:
+ del parts_copy[-1]
+ if not parts_copy: raise
+ parts = parts[1:]
+ obj = module
+ for part in parts:
+ parent, obj = obj, getattr(obj, part)
+
+ if type(obj) == types.ModuleType:
+ return self.loadTestsFromModule(obj)
+ elif (isinstance(obj, type) and
+ issubclass(obj, TestCase)):
+ return self.loadTestsFromTestCase(obj)
+ elif type(obj) == types.UnboundMethodType:
+ return parent(obj.__name__)
+ elif isinstance(obj, TestSuite):
+ return obj
+ elif hasattr(obj, '__call__'):
+ test = obj()
+ if not isinstance(test, (TestCase, TestSuite)):
+ raise ValueError("calling %s returned %s, not a test" % (obj,test))
+ return test
+ else:
+ raise ValueError("don't know how to make test from: %s" % obj)
+
+ def loadTestsFromNames(self, names, module=None):
+ """Return a suite of all tests cases found using the given sequence
+ of string specifiers. See 'loadTestsFromName()'.
+ """
+ suites = [self.loadTestsFromName(name, module) for name in names]
+ return self.suiteClass(suites)
+
+ def getTestCaseNames(self, testCaseClass):
+ """Return a sorted sequence of method names found within testCaseClass
+ """
+ def isTestMethod(attrname, testCaseClass=testCaseClass, prefix=self.testMethodPrefix):
+ return attrname.startswith(prefix) and hasattr(getattr(testCaseClass, attrname), '__call__')
+ testFnNames = list(filter(isTestMethod, dir(testCaseClass)))
+ for baseclass in testCaseClass.__bases__:
+ for testFnName in self.getTestCaseNames(baseclass):
+ if testFnName not in testFnNames: # handle overridden methods
+ testFnNames.append(testFnName)
+ return testFnNames
+
+
+
+defaultTestLoader = TestLoader()
+
+
+##############################################################################
+# Patches for old functions: these functions should be considered obsolete
+##############################################################################
+
+def _makeLoader(prefix, suiteClass=None):
+ loader = TestLoader()
+ loader.testMethodPrefix = prefix
+ if suiteClass: loader.suiteClass = suiteClass
+ return loader
+
+def getTestCaseNames(testCaseClass, prefix):
+ return _makeLoader(prefix).getTestCaseNames(testCaseClass)
+
+def makeSuite(testCaseClass, prefix='test', suiteClass=TestSuite):
+ return _makeLoader(prefix, suiteClass).loadTestsFromTestCase(testCaseClass)
+
+def findTestCases(module, prefix='test', suiteClass=TestSuite):
+ return _makeLoader(prefix, suiteClass).loadTestsFromModule(module)
+
+
+##############################################################################
+# Text UI
+##############################################################################
+
+class _WritelnDecorator:
+ """Used to decorate file-like objects with a handy 'writeln' method"""
+ def __init__(self,stream):
+ self.stream = stream
+
+ def __getattr__(self, attr):
+ return getattr(self.stream,attr)
+
+ def writeln(self, arg=None):
+ if arg: self.write(arg)
+ self.write('\n') # text-mode streams translate to \r\n if needed
+
+
+class _TextTestResult(TestResult):
+ """A test result class that can print formatted text results to a stream.
+
+ Used by TextTestRunner.
+ """
+ separator1 = '=' * 70
+ separator2 = '-' * 70
+
+ def __init__(self, stream, descriptions, verbosity):
+ TestResult.__init__(self)
+ self.stream = stream
+ self.showAll = verbosity > 1
+ self.dots = verbosity == 1
+ self.descriptions = descriptions
+
+ def getDescription(self, test):
+ if self.descriptions:
+ return test.shortDescription() or str(test)
+ else:
+ return str(test)
+
+ def startTest(self, test):
+ TestResult.startTest(self, test)
+ if self.showAll:
+ self.stream.write(self.getDescription(test))
+ self.stream.write(" ... ")
+
+ def addSuccess(self, test):
+ TestResult.addSuccess(self, test)
+ if self.showAll:
+ self.stream.writeln("ok")
+ elif self.dots:
+ self.stream.write('.')
+
+ def addError(self, test, err):
+ TestResult.addError(self, test, err)
+ if self.showAll:
+ self.stream.writeln("ERROR")
+ elif self.dots:
+ self.stream.write('E')
+
+ def addFailure(self, test, err):
+ TestResult.addFailure(self, test, err)
+ if self.showAll:
+ self.stream.writeln("FAIL")
+ elif self.dots:
+ self.stream.write('F')
+
+ def printErrors(self):
+ if self.dots or self.showAll:
+ self.stream.writeln()
+ self.printErrorList('ERROR', self.errors)
+ self.printErrorList('FAIL', self.failures)
+
+ def printErrorList(self, flavour, errors):
+ for test, err in errors:
+ self.stream.writeln(self.separator1)
+ self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
+ self.stream.writeln(self.separator2)
+ self.stream.writeln("%s" % err)
+
+
+class TextTestRunner:
+ """A test runner class that displays results in textual form.
+
+ It prints out the names of tests as they are run, errors as they
+ occur, and a summary of the results at the end of the test run.
+ """
+ def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1):
+ self.stream = _WritelnDecorator(stream)
+ self.descriptions = descriptions
+ self.verbosity = verbosity
+
+ def _makeResult(self):
+ return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+ def run(self, test):
+ "Run the given test case or test suite."
+ result = self._makeResult()
+ startTime = time.time()
+ test(result)
+ stopTime = time.time()
+ timeTaken = stopTime - startTime
+ result.printErrors()
+ self.stream.writeln(result.separator2)
+ run = result.testsRun
+ self.stream.writeln("Ran %d test%s in %.3fs" %
+ (run, run != 1 and "s" or "", timeTaken))
+ self.stream.writeln()
+ if not result.wasSuccessful():
+ self.stream.write("FAILED (")
+ failed, errored = list(map(len, (result.failures, result.errors)))
+ if failed:
+ self.stream.write("failures=%d" % failed)
+ if errored:
+ if failed: self.stream.write(", ")
+ self.stream.write("errors=%d" % errored)
+ self.stream.writeln(")")
+ else:
+ self.stream.writeln("OK")
+ return result
+
+
+
+##############################################################################
+# Facilities for running tests from the command line
+##############################################################################
+
+class TestProgram:
+ """A command-line program that runs a set of tests; this is primarily
+ for making test modules conveniently executable.
+ """
+ USAGE = """\
+Usage: %(progName)s [options] [test] [...]
+
+Options:
+ -h, --help Show this message
+ -v, --verbose Verbose output
+ -q, --quiet Minimal output
+
+Examples:
+ %(progName)s - run default set of tests
+ %(progName)s MyTestSuite - run suite 'MyTestSuite'
+ %(progName)s MyTestCase.testSomething - run MyTestCase.testSomething
+ %(progName)s MyTestCase - run all 'test*' test methods
+ in MyTestCase
+"""
+ def __init__(self, module='__main__', defaultTest=None,
+ argv=None, testRunner=None, testLoader=defaultTestLoader):
+ if type(module) == type(''):
+ self.module = __import__(module)
+ for part in module.split('.')[1:]:
+ self.module = getattr(self.module, part)
+ else:
+ self.module = module
+ if argv is None:
+ argv = sys.argv
+ self.verbosity = 1
+ self.defaultTest = defaultTest
+ self.testRunner = testRunner
+ self.testLoader = testLoader
+ self.progName = os.path.basename(argv[0])
+ self.parseArgs(argv)
+ self.runTests()
+
+ def usageExit(self, msg=None):
+ if msg: print(msg)
+ print(self.USAGE % self.__dict__)
+ sys.exit(2)
+
+ def parseArgs(self, argv):
+ import getopt
+ try:
+ options, args = getopt.getopt(argv[1:], 'hHvq',
+ ['help','verbose','quiet'])
+ for opt, value in options:
+ if opt in ('-h','-H','--help'):
+ self.usageExit()
+ if opt in ('-q','--quiet'):
+ self.verbosity = 0
+ if opt in ('-v','--verbose'):
+ self.verbosity = 2
+ if len(args) == 0 and self.defaultTest is None:
+ self.test = self.testLoader.loadTestsFromModule(self.module)
+ return
+ if len(args) > 0:
+ self.testNames = args
+ else:
+ self.testNames = (self.defaultTest,)
+ self.createTests()
+ except getopt.error as msg:
+ self.usageExit(msg)
+
+ def createTests(self):
+ self.test = self.testLoader.loadTestsFromNames(self.testNames,
+ self.module)
+
+ def runTests(self):
+ if self.testRunner is None:
+ self.testRunner = TextTestRunner(verbosity=self.verbosity)
+ result = self.testRunner.run(self.test)
+ sys.exit(not result.wasSuccessful())
+
+main = TestProgram
+
+
+##############################################################################
+# Executing this module from the command line
+##############################################################################
+
+if __name__ == "__main__":
+ main(module=None)
diff --git a/test_blist.py b/test_blist.py
new file mode 100755
index 0000000..9a32a42
--- /dev/null
+++ b/test_blist.py
@@ -0,0 +1,368 @@
+#!/usr/bin/python
+from __future__ import print_function
+
+"""
+Copyright 2007-2010 Stutzbach Enterprises, LLC (daniel@stutzbachenterprises.com)
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ 1. Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ 2. Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ 3. The name of the author may not be used to endorse or promote
+ products derived from this software without specific prior written
+ permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
+IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
+INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGE.
+
+"""
+
+
+import sys
+import os
+
+import unittest, operator
+import blist, pickle, _blist
+#BList = list
+from test import test_support, list_tests, sortedlist_tests, btuple_tests
+from test import sorteddict_tests, test_set
+
+limit = _blist._limit
+n = 512//8 * limit
+
+class BListTest(list_tests.CommonTest):
+ type2test = blist.blist
+
+ def test_delmul(self):
+ x = self.type2test(list(range(10000)))
+ for i in range(100):
+ del x[len(x)//4:3*len(x)//4]
+ x *= 2
+
+ def test_truth(self):
+ super(BListTest, self).test_truth()
+ self.assert_(not self.type2test())
+ self.assert_(self.type2test([42]))
+
+ def test_identity(self):
+ self.assert_(self.type2test([]) is not self.type2test([]))
+
+ def test_len(self):
+ super(BListTest, self).test_len()
+ self.assertEqual(len(self.type2test()), 0)
+ self.assertEqual(len(self.type2test([0])), 1)
+ self.assertEqual(len(self.type2test([0, 1, 2])), 3)
+
+ def test_append2(self):
+ lst = self.type2test()
+ t = tuple(range(n))
+ for i in range(n):
+ lst.append(i)
+ self.assertEqual(tuple(lst), t[:i+1])
+
+ def test_delstuff(self):
+ lst = self.type2test(list(range(n)))
+ t = tuple(range(n))
+ x = lst[4:258]
+ self.assertEqual(tuple(x), tuple(t[4:258]))
+ x.append(-1)
+ self.assertEqual(tuple(x), tuple(t[4:258] + (-1,)))
+ self.assertEqual(tuple(lst), t)
+ lst[200] = 6
+ self.assertEqual(tuple(x), tuple(t[4:258] + (-1,)))
+ self.assertEqual(tuple(lst), tuple(t[0:200] + (6,) + t[201:]))
+ del lst[200]
+ self.assertEqual(tuple(lst), tuple(t[0:200] + t[201:]))
+
+ def test_del1(self):
+ lst2 = self.type2test(list(range(limit+1)))
+ self.assertEqual(tuple(lst2), tuple(range(limit+1)))
+ del lst2[1]
+ del lst2[-1]
+ self.assertEqual(tuple(lst2), (0,) + tuple(range(2,limit)))
+
+ def test_insert_and_del(self):
+ lst = self.type2test(list(range(n)))
+ t = tuple(range(n))
+ lst.insert(200, 0)
+ self.assertEqual(tuple(lst), (t[0:200] + (0,) + t[200:]))
+ del lst[200:]
+ self.assertEqual(tuple(lst), tuple(range(200)))
+
+ def test_mul3(self):
+ lst = self.type2test(list(range(3)))
+ self.assertEqual(tuple(lst*3), tuple(list(range(3))*3))
+
+ def test_mul(self):
+ x = self.type2test(list(range(limit**2)))
+ for i in range(10):
+ self.assertEqual(len(x*i), i*limit**2)
+
+ def test_extendspam(self):
+ a = self.type2test('spam')
+ a.extend('eggs')
+ self.assertEqual(list(a), list('spameggs'))
+
+ def test_bigmul1(self):
+ x = self.type2test([0])
+ for i in list(range(290)) + [1000, 10000, 100000, 1000000, 10000000, 2**29]:
+ self.assertEqual(len(x*i), i)
+
+ def test_badinit(self):
+ self.assertRaises(TypeError, self.type2test, 0, 0, 0)
+
+ def test_copyself(self):
+ x = self.type2test(list(range(n)))
+ x[:] = x
+
+ def test_nohash(self):
+ x = self.type2test()
+ d = {}
+ self.assertRaises(TypeError, d.__setitem__, x, 5)
+
+ def test_collapseboth(self):
+ x = self.type2test(list(range(512)))
+ del x[193:318]
+
+ def test_collapseright(self):
+ x = self.type2test(list(range(512)))
+ del x[248:318]
+
+ def test_badrepr(self):
+ class BadExc(Exception):
+ pass
+
+ class BadRepr:
+ def __repr__(self):
+ raise BadExc
+
+ x = self.type2test([BadRepr()])
+ self.assertRaises(BadExc, repr, x)
+ x = self.type2test(list(range(n)))
+ x.append(BadRepr())
+ self.assertRaises(BadExc, repr, x)
+
+ def test_slice0(self):
+ x = self.type2test(list(range(n)))
+ x[slice(5,3,1)] = []
+ self.assertEqual(x, list(range(n)))
+ x = self.type2test(list(range(n)))
+ self.assertRaises(ValueError, x.__setitem__, slice(5,3,1), [5,3,2])
+ del x[slice(5,3,1)]
+ self.assertEqual(x, list(range(n)))
+
+ def test_badindex(self):
+ x = self.type2test()
+ self.assertRaises(TypeError, x.__setitem__, 's', 5)
+
+ def test_comparelist(self):
+ x = self.type2test(list(range(n)))
+ y = list(range(n-1))
+ self.assert_(not (x == y))
+ self.assert_(x != y)
+ self.assert_(not (x < y))
+ self.assert_(not (x <= y))
+ self.assert_(x > y)
+ self.assert_(x >= y)
+
+ y = list(range(n))
+ self.assert_(x == y)
+ self.assert_(y == x)
+
+ y[100] = 6
+ self.assert_(not (x == y))
+ self.assert_(x != y)
+
+ def test_compareblist(self):
+ x = self.type2test(list(range(n)))
+ y = self.type2test(list(range(n-1)))
+ self.assert_(not (x == y))
+ self.assert_(x != y)
+ self.assert_(not (x < y))
+ self.assert_(not (x <= y))
+ self.assert_(x > y)
+ self.assert_(x >= y)
+
+ y[100] = 6
+ self.assert_(not (x == y))
+ self.assert_(x != y)
+
+ def test_comparetuple(self):
+ x = self.type2test(list(range(n)))
+ y = tuple(range(n))
+ self.assert_(x != y)
+
+ def test_indexempty(self):
+ x = self.type2test(list(range(10)))
+ self.assertRaises(ValueError, x.index, 'spam')
+
+ def test_indexargs(self):
+ x = self.type2test(list(range(10)))
+ self.assertEqual(x.index(5,1,-1), 5)
+ self.assertRaises(ValueError, x.index, 5, -1, -9)
+ self.assertRaises(ValueError, x.index, 8, 1, 4)
+ self.assertRaises(ValueError, x.index, 0, 1, 4)
+
+ def test_reversebig(self):
+ x = self.type2test(list(range(n)))
+ x.reverse()
+ self.assertEqual(x, list(range(n-1,-1,-1)))
+
+ def test_badconcat(self):
+ x = self.type2test()
+ y = 'foo'
+ self.assertRaises(TypeError, operator.add, x, y)
+
+ def test_bad_assign(self):
+ x = self.type2test(list(range(n)))
+ self.assertRaises(TypeError, x.__setitem__, slice(1,10,2), 5)
+
+ def sort_evil(self, after):
+ class EvilCompare:
+ count = 0
+ num_raises = 0
+ def __init__(self, x):
+ self.x = x
+ def __lt__(self, other):
+ EvilCompare.count += 1
+ if EvilCompare.count > after:
+ EvilCompare.num_raises += 1
+ raise ValueError
+ return self.x < other.x
+
+ x = self.type2test(EvilCompare(x) for x in range(n))
+ from random import shuffle
+ shuffle(x)
+ self.assertRaises(ValueError, x.sort)
+ self.assertEqual(EvilCompare.num_raises, 1)
+ x = [a.x for a in x]
+ x.sort()
+ self.assertEquals(x, list(range(n)))
+
+ def test_sort_evil_small(self):
+ self.sort_evil(limit * 5)
+
+ def test_sort_evil_big(self):
+ self.sort_evil(n + limit)
+
+ def test_big_extend(self):
+ x = self.type2test([1])
+ x.extend(range(n))
+ self.assertEqual(tuple(x), (1,) + tuple(range(n)))
+
+ def test_big_getslice(self):
+ x = self.type2test([0]) * 65536
+ self.assertEqual(len(x[256:512]), 256)
+
+ def test_modify_original(self):
+ x = self.type2test(list(range(1024)))
+ y = x[:]
+ x[5] = 'z'
+ self.assertEqual(tuple(y), tuple(range(1024)))
+ self.assertEqual(x[5], 'z')
+ self.assertEqual(tuple(x[:5]), tuple(range(5)))
+ self.assertEqual(tuple(x[6:]), tuple(range(6, 1024)))
+
+ def test_modify_copy(self):
+ x = self.type2test(list(range(1024)))
+ y = x[:]
+ y[5] = 'z'
+ self.assertEqual(tuple(x), tuple(range(1024)))
+ self.assertEqual(y[5], 'z')
+ self.assertEqual(tuple(y[:5]), tuple(range(5)))
+ self.assertEqual(tuple(y[6:]), tuple(range(6, 1024)))
+
+ def test_bigsort(self):
+ x = self.type2test(list(range(100000)))
+ x.sort()
+
+ def test_sort_twice(self):
+ y = blist.blist(list(range(limit+1)))
+ for i in range(2):
+ x = blist.blist(y)
+ x.sort()
+ self.assertEqual(tuple(x), tuple(range(limit+1)))
+
+ def test_LIFO(self):
+ x = blist.blist()
+ for i in range(1000):
+ x.append(i)
+ for j in range(1000-1,-1,-1):
+ self.assertEqual(x.pop(), j)
+
+ def pickle_test(self, pickler, x):
+ y = pickler.dumps(x)
+ z = pickler.loads(y)
+ self.assertEqual(x, z)
+ self.assertEqual(repr(x), repr(z))
+
+ def pickle_tests(self, pickler):
+ self.pickle_test(pickler, blist.blist())
+ self.pickle_test(pickler, blist.blist(list(range(limit))))
+ self.pickle_test(pickler, blist.blist(list(range(limit+1))))
+ self.pickle_test(pickler, blist.blist(list(range(n))))
+
+ x = blist.blist([0])
+ x *= n
+ self.pickle_test(pickler, x)
+ y = blist.blist(x)
+ y[5] = 'x'
+ self.pickle_test(pickler, x)
+ self.pickle_test(pickler, y)
+
+ def test_pickle(self):
+ self.pickle_tests(pickle)
+
+ def test_types(self):</