summaryrefslogtreecommitdiffstats
path: root/generic/tclListObj.c
diff options
context:
space:
mode:
Diffstat (limited to 'generic/tclListObj.c')
-rw-r--r--generic/tclListObj.c87
1 files changed, 43 insertions, 44 deletions
diff --git a/generic/tclListObj.c b/generic/tclListObj.c
index 87530be..726b8dd 100644
--- a/generic/tclListObj.c
+++ b/generic/tclListObj.c
@@ -40,7 +40,7 @@
#ifdef ENABLE_LIST_ASSERTS
-#define LIST_ASSERT(cond_) assert(cond_) /* TODO - is there a Tcl-specific one? */
+#define LIST_ASSERT(cond_) assert(cond_)
/*
* LIST_INDEX_ASSERT is to catch errors with negative indices and counts
* being passed AFTER validation. On Tcl9 length types are unsigned hence
@@ -69,8 +69,7 @@
/* Checks for when caller should have already converted to internal list type */
#define LIST_ASSERT_TYPE(listObj_) \
- LIST_ASSERT((listObj_)->typePtr == &tclListType.objType);
-
+ LIST_ASSERT(TclHasInternalRep((listObj_), &tclListType.objType))
/*
* If ENABLE_LIST_INVARIANTS is enabled (-DENABLE_LIST_INVARIANTS from the
@@ -305,12 +304,12 @@ ListSpanMerited(
Tcl_Size allocatedStorageLength) /* Length of the currently allocation */
{
/*
- TODO
- - heuristics thresholds need to be determined
- - currently, information about the sharing (ref count) of existing
- storage is not passed. Perhaps it should be. For example if the
- existing storage has a "large" ref count, then it might make sense
- to do even a small span.
+ * Possible optimizations for future consideration
+ * - heuristic LIST_SPAN_THRESHOLD
+ * - currently, information about the sharing (ref count) of existing
+ * storage is not passed. Perhaps it should be. For example if the
+ * existing storage has a "large" ref count, then it might make sense
+ * to do even a small span.
*/
if (length < LIST_SPAN_THRESHOLD) {
@@ -771,14 +770,16 @@ ListStoreNew(
}
if (flags & LISTREP_SPACE_FLAGS) {
+ /* Caller requests extra space front, back or both */
capacity = ListStoreUpSize(objc);
} else {
capacity = objc;
}
storePtr = (ListStore *)Tcl_AttemptAlloc(LIST_SIZE(capacity));
- if (storePtr == NULL && capacity != objc) {
- capacity = objc; /* Try allocating exact size */
+ while (storePtr == NULL && (capacity > (objc+1))) {
+ /* Because of loop condition capacity won't overflow */
+ capacity = objc + ((capacity - objc) / 2);
storePtr = (ListStore *)Tcl_AttemptAlloc(LIST_SIZE(capacity));
}
if (storePtr == NULL) {
@@ -827,7 +828,8 @@ ListStoreNew(
*
* ListStoreReallocate --
*
- * Reallocates the memory for a ListStore.
+ * Reallocates the memory for a ListStore allocating extra for
+ * possible future growth.
*
* Results:
* Pointer to the ListStore which may be the same as storePtr or pointer
@@ -856,7 +858,7 @@ ListStoreReallocate (ListStore *storePtr, Tcl_Size numSlots)
* by half every time.
*/
while (newStorePtr == NULL && (newCapacity > (numSlots+1))) {
- /* Because of loop condition newCapacity can't overflow */
+ /* Because of loop condition newCapacity won't overflow */
newCapacity = numSlots + ((newCapacity - numSlots) / 2);
newStorePtr =
(ListStore *)Tcl_AttemptRealloc(storePtr, LIST_SIZE(newCapacity));
@@ -1960,19 +1962,18 @@ int
Tcl_ListObjIndex(
Tcl_Interp *interp, /* Used to report errors if not NULL. */
Tcl_Obj *listObj, /* List object to index into. */
- Tcl_Size index, /* Index of element to return. */
+ Tcl_Size index, /* Index of element to return. */
Tcl_Obj **objPtrPtr) /* The resulting Tcl_Obj* is stored here. */
{
Tcl_Obj **elemObjs;
Tcl_Size numElems;
- /*
- * TODO
- * Unlike the original list code, this does not optimize for lindex'ing
- * an empty string when the internal rep is not already a list. On the
- * other hand, this code will be faster for the case where the object
- * is currently a dict. Benchmark the two cases.
- */
+ /* Empty string => empty list. Avoid unnecessary shimmering */
+ if (listObj->bytes == &tclEmptyString) {
+ *objPtrPtr = NULL;
+ return TCL_OK;
+ }
+
if (TclListObjGetElementsM(interp, listObj, &numElems, &elemObjs)
!= TCL_OK) {
return TCL_ERROR;
@@ -2017,19 +2018,19 @@ Tcl_ListObjLength(
{
ListRep listRep;
+ /* Empty string => empty list. Avoid unnecessary shimmering */
+ if (listObj->bytes == &tclEmptyString) {
+ *lenPtr = 0;
+ return TCL_OK;
+ }
+
Tcl_Size (*lengthProc)(Tcl_Obj *obj) = ABSTRACTLIST_PROC(listObj, lengthProc);
if (lengthProc) {
*lenPtr = lengthProc(listObj);
return TCL_OK;
}
- /*
- * TODO
- * Unlike the original list code, this does not optimize for lindex'ing
- * an empty string when the internal rep is not already a list. On the
- * other hand, this code will be faster for the case where the object
- * is currently a dict. Benchmark the two cases.
- */
+
if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK) {
return TCL_ERROR;
}
@@ -2094,12 +2095,12 @@ Tcl_ListObjReplace(
{
ListRep listRep;
Tcl_Size origListLen;
- ptrdiff_t lenChange;
- ptrdiff_t leadSegmentLen;
- ptrdiff_t tailSegmentLen;
+ Tcl_Size lenChange;
+ Tcl_Size leadSegmentLen;
+ Tcl_Size tailSegmentLen;
Tcl_Size numFreeSlots;
- ptrdiff_t leadShift;
- ptrdiff_t tailShift;
+ Tcl_Size leadShift;
+ Tcl_Size tailShift;
Tcl_Obj **listObjs;
int favor;
@@ -2110,8 +2111,6 @@ Tcl_ListObjReplace(
if (TclListObjGetRep(interp, listObj, &listRep) != TCL_OK)
return TCL_ERROR; /* Cannot be converted to a list */
- /* TODO - will need modification if Tcl9 sticks to unsigned indices */
-
/* Make limits sane */
origListLen = ListRepLength(&listRep);
if (first < 0) {
@@ -2260,7 +2259,7 @@ Tcl_ListObjReplace(
* be an explicit alloc and memmove which would let us redistribute
* free space.
*/
- if ((ptrdiff_t)numFreeSlots < lenChange && !ListRepIsShared(&listRep)) {
+ if (numFreeSlots < lenChange && !ListRepIsShared(&listRep)) {
/* T:listrep-1.{1,3,14,18,21},3.{3,10,11,14,27,32,41} */
ListStore *newStorePtr =
ListStoreReallocate(listRep.storePtr, origListLen + lenChange);
@@ -2287,7 +2286,7 @@ Tcl_ListObjReplace(
* TODO - for unshared case ONLY, consider a "move" based implementation
*/
if (ListRepIsShared(&listRep) || /* 3a */
- (ptrdiff_t)numFreeSlots < lenChange || /* 3b */
+ numFreeSlots < lenChange || /* 3b */
(origListLen + lenChange) < (listRep.storePtr->numAllocated / 4) /* 3c */
) {
ListRep newRep;
@@ -2402,9 +2401,9 @@ Tcl_ListObjReplace(
* or need to shift both. In the former case, favor shifting the
* smaller segment.
*/
- ptrdiff_t leadSpace = ListRepNumFreeHead(&listRep);
- ptrdiff_t tailSpace = ListRepNumFreeTail(&listRep);
- ptrdiff_t finalFreeSpace = leadSpace + tailSpace - lenChange;
+ Tcl_Size leadSpace = ListRepNumFreeHead(&listRep);
+ Tcl_Size tailSpace = ListRepNumFreeTail(&listRep);
+ Tcl_Size finalFreeSpace = leadSpace + tailSpace - lenChange;
LIST_ASSERT((leadSpace + tailSpace) >= lenChange);
if (leadSpace >= lenChange
@@ -2421,7 +2420,7 @@ Tcl_ListObjReplace(
* insertions.
*/
if (finalFreeSpace > 1 && (tailSpace == 0 || tailSegmentLen == 0)) {
- ptrdiff_t postShiftLeadSpace = leadSpace - lenChange;
+ Tcl_Size postShiftLeadSpace = leadSpace - lenChange;
if (postShiftLeadSpace > (finalFreeSpace/2)) {
Tcl_Size extraShift = postShiftLeadSpace - (finalFreeSpace / 2);
leadShift -= extraShift;
@@ -2438,7 +2437,7 @@ Tcl_ListObjReplace(
* See comments above. This is analogous.
*/
if (finalFreeSpace > 1 && (leadSpace == 0 || leadSegmentLen == 0)) {
- ptrdiff_t postShiftTailSpace = tailSpace - lenChange;
+ Tcl_Size postShiftTailSpace = tailSpace - lenChange;
if (postShiftTailSpace > (finalFreeSpace/2)) {
/* T:listrep-1.{1,3,14,18,21},3.{2,3,26,27} */
Tcl_Size extraShift = postShiftTailSpace - (finalFreeSpace / 2);
@@ -2613,7 +2612,7 @@ TclLindexList(
/*
* The argument is neither an index nor a well-formed list.
* Report the error via TclLindexFlat.
- * TODO - This is as original. why not directly return an error?
+ * TODO - This is as original code. why not directly return an error?
*/
return TclLindexFlat(interp, listObj, 1, &argObj);
}
@@ -3557,7 +3556,7 @@ TclListTestObj(size_t length, size_t leadingSpace, size_t endSpace)
return NULL;
}
- ListRepInit(capacity, NULL, 0, &listRep);
+ ListRepInit(capacity, NULL, LISTREP_PANIC_ON_FAIL, &listRep);
ListStore *storePtr = listRep.storePtr;
size_t i;